├── .gitignore
├── README.md
├── build.gradle
├── demo
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── org
│ │ └── buyun
│ │ └── alpr
│ │ ├── AlprVideoSequentialActivity.java
│ │ └── common
│ │ ├── AlprActivity.java
│ │ ├── AlprBackgroundTask.java
│ │ ├── AlprCameraFragment.java
│ │ ├── AlprGLSurfaceView.java
│ │ ├── AlprImage.java
│ │ ├── AlprPlateView.java
│ │ └── AlprUtils.java
│ └── res
│ ├── drawable-hdpi
│ ├── ic_action_info.png
│ ├── ic_launcher.png
│ ├── ic_launcher_round.png
│ └── tile.9.png
│ ├── drawable-mdpi
│ ├── ic_action_info.png
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── drawable-xhdpi
│ ├── ic_action_info.png
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── drawable-xxhdpi
│ ├── ic_action_info.png
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── layout-land
│ └── fragment_camera.xml
│ ├── layout
│ ├── activity_main.xml
│ └── fragment_camera.xml
│ ├── values-sw600dp
│ ├── template-dimens.xml
│ └── template-styles.xml
│ ├── values-v11
│ └── template-styles.xml
│ ├── values-v21
│ ├── base-colors.xml
│ └── base-template-styles.xml
│ └── values
│ ├── base-strings.xml
│ ├── colors.xml
│ ├── strings.xml
│ ├── styles.xml
│ ├── template-dimens.xml
│ └── template-styles.xml
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/caches
5 | /.idea/libraries
6 | /.idea/modules.xml
7 | /.idea/workspace.xml
8 | /.idea/navEditor.xml
9 | /.idea/assetWizardSettings.xml
10 | .DS_Store
11 | /build
12 | /captures
13 | .externalNativeBuild
14 | .cxx
15 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
18 | Robust, Realtime, On-Device License Plate Recognition SDK For Android
19 |
20 | It can not only recognize number plate, but also detect vehicle model, color and country.
21 |
22 | ## :tada: Try It Yourself
23 |
24 |
25 |
26 |
27 |
28 |
29 | https://user-images.githubusercontent.com/91896009/186433213-6bb1bda3-6b1b-4f71-b950-85e7d233ddff.mp4
30 |
31 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | google()
6 | jcenter()
7 |
8 | }
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:3.5.1'
11 |
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 |
22 | }
23 | }
24 |
25 | task clean(type: Delete) {
26 | delete rootProject.buildDir
27 | }
28 |
--------------------------------------------------------------------------------
/demo/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/demo/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 28
5 |
6 |
7 | defaultConfig {
8 | applicationId "org.buyun.alpr"
9 | minSdkVersion 21
10 | targetSdkVersion 28
11 | versionCode 1
12 | versionName "1.0"
13 |
14 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
15 | }
16 |
17 | buildTypes {
18 | release {
19 | minifyEnabled true
20 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
21 | }
22 | }
23 |
24 | sourceSets {
25 | main {
26 | java.srcDirs += ['../common/src/main/java']
27 | res.srcDirs += ['../common/src/main/res']
28 | }
29 | }
30 |
31 | }
32 |
33 | dependencies {
34 | implementation fileTree(dir: 'libs', include: ['*.jar'])
35 |
36 | implementation 'androidx.appcompat:appcompat:1.1.0'
37 | implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
38 |
39 | implementation project(":sdk")
40 | }
41 |
--------------------------------------------------------------------------------
/demo/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 | -keep class org.buyun.alpr.sdk.** {*;}
19 |
20 | # If you keep the line number information, uncomment this to
21 | # hide the original source file name.
22 | #-renamesourcefileattribute SourceFile
23 |
--------------------------------------------------------------------------------
/demo/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
15 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/demo/src/main/java/org/buyun/alpr/AlprVideoSequentialActivity.java:
--------------------------------------------------------------------------------
1 | package org.buyun.alpr;
2 |
3 | import android.os.Bundle;
4 | import android.util.Log;
5 | import android.util.Size;
6 |
7 | import org.buyun.alpr.common.AlprActivity;
8 | import org.buyun.alpr.common.AlprCameraFragment;
9 | import org.json.JSONArray;
10 | import org.json.JSONException;
11 | import org.json.JSONObject;
12 |
13 | import java.util.Arrays;
14 | import java.util.List;
15 |
16 | /**
17 | * Main activity
18 | */
19 | public class AlprVideoSequentialActivity extends AlprActivity {
20 |
21 | static final String TAG = AlprVideoSequentialActivity.class.getCanonicalName();
22 | static final Size PREFERRED_SIZE = new Size(1280, 720);
23 | static final String CONFIG_DEBUG_LEVEL = "info";
24 | static final boolean CONFIG_DEBUG_WRITE_INPUT_IMAGE = false; // must be false unless you're debugging the code
25 | static final int CONFIG_NUM_THREADS = -1;
26 | static final boolean CONFIG_GPGPU_ENABLED = true;
27 | static final int CONFIG_MAX_LATENCY = -1;
28 | static final String CONFIG_CHARSET = "latin";
29 | static final boolean CONFIG_IENV_ENABLED = false;
30 | static final boolean CONFIG_OPENVINO_ENABLED = true;
31 | static final String CONFIG_OPENVINO_DEVICE = "CPU";
32 | static final double CONFIG_DETECT_MINSCORE = 0.1; // 10%
33 | static final boolean CONFIG_CAR_NOPLATE_DETECT_ENABLED = false;
34 | static final double CONFIG_CAR_NOPLATE_DETECT_MINSCORE = 0.8; // 80%
35 | static final List CONFIG_DETECT_ROI = Arrays.asList(0.f, 0.f, 0.f, 0.f);
36 | static final boolean CONFIG_PYRAMIDAL_SEARCH_ENABLED = true;
37 | static final double CONFIG_PYRAMIDAL_SEARCH_SENSITIVITY= 0.28; // 28%
38 | static final double CONFIG_PYRAMIDAL_SEARCH_MINSCORE = 0.5; // 50%
39 | static final int CONFIG_PYRAMIDAL_SEARCH_MIN_IMAGE_SIZE_INPIXELS = 800; // pixels
40 | static final boolean CONFIG_KLASS_LPCI_ENABLED = true;
41 | static final boolean CONFIG_KLASS_VCR_ENABLED = true;
42 | static final boolean CONFIG_KLASS_VMMR_ENABLED = true;
43 | static final boolean CONFIG_KLASS_VBSR_ENABLED = false;
44 | static final double CONFIG_KLASS_VCR_GAMMA = 1.5;
45 | static final double CONFIG_RECOGN_MINSCORE = 0.4; // 40%
46 | static final String CONFIG_RECOGN_SCORE_TYPE = "min";
47 | static final boolean CONFIG_RECOGN_RECTIFY_ENABLED = false;
48 |
49 | @Override
50 | protected void onCreate(Bundle savedInstanceState) {
51 | Log.i(TAG, "onCreate " + this);
52 | super.onCreate(savedInstanceState);
53 |
54 | getSupportFragmentManager().beginTransaction()
55 | .replace(R.id.container, AlprCameraFragment.newInstance(PREFERRED_SIZE, this))
56 | .commit();
57 | }
58 |
59 | @Override
60 | public void onResume() {
61 | super.onResume();
62 |
63 | }
64 |
65 | @Override
66 | public void onDestroy() {
67 | Log.i(TAG, "onDestroy " + this);
68 | super.onDestroy();
69 | }
70 |
71 | @Override
72 | protected int getLayoutResId() {
73 | return R.layout.activity_main;
74 | }
75 |
76 | @Override
77 | protected JSONObject getJsonConfig() {
78 |
79 | JSONObject config = new JSONObject();
80 | try {
81 | config.put("debug_level", CONFIG_DEBUG_LEVEL);
82 | config.put("debug_write_input_image_enabled", CONFIG_DEBUG_WRITE_INPUT_IMAGE);
83 | config.put("debug_internal_data_path", getDebugInternalDataPath());
84 |
85 | config.put("num_threads", CONFIG_NUM_THREADS);
86 | config.put("gpgpu_enabled", CONFIG_GPGPU_ENABLED);
87 | config.put("charset", CONFIG_CHARSET);
88 | config.put("max_latency", CONFIG_MAX_LATENCY);
89 | config.put("ienv_enabled", CONFIG_IENV_ENABLED);
90 | config.put("openvino_enabled", CONFIG_OPENVINO_ENABLED);
91 | config.put("openvino_device", CONFIG_OPENVINO_DEVICE);
92 |
93 | config.put("detect_minscore", CONFIG_DETECT_MINSCORE);
94 | config.put("detect_roi", new JSONArray(getDetectROI()));
95 |
96 | config.put("car_noplate_detect_enabled", CONFIG_CAR_NOPLATE_DETECT_ENABLED);
97 | config.put("car_noplate_detect_min_score", CONFIG_CAR_NOPLATE_DETECT_MINSCORE);
98 |
99 | config.put("pyramidal_search_enabled", CONFIG_PYRAMIDAL_SEARCH_ENABLED);
100 | config.put("pyramidal_search_sensitivity", CONFIG_PYRAMIDAL_SEARCH_SENSITIVITY);
101 | config.put("pyramidal_search_minscore", CONFIG_PYRAMIDAL_SEARCH_MINSCORE);
102 | config.put("pyramidal_search_min_image_size_inpixels", CONFIG_PYRAMIDAL_SEARCH_MIN_IMAGE_SIZE_INPIXELS);
103 |
104 | config.put("klass_lpci_enabled", CONFIG_KLASS_LPCI_ENABLED);
105 | config.put("klass_vcr_enabled", CONFIG_KLASS_VCR_ENABLED);
106 | config.put("klass_vmmr_enabled", CONFIG_KLASS_VMMR_ENABLED);
107 | config.put("klass_vbsr_enabled", CONFIG_KLASS_VBSR_ENABLED);
108 | config.put("klass_vcr_gamma", CONFIG_KLASS_VCR_GAMMA);
109 |
110 | config.put("recogn_minscore", CONFIG_RECOGN_MINSCORE);
111 | config.put("recogn_score_type", CONFIG_RECOGN_SCORE_TYPE);
112 | config.put("recogn_rectify_enabled", CONFIG_RECOGN_RECTIFY_ENABLED);
113 | }
114 | catch (JSONException e) {
115 | e.printStackTrace();
116 | }
117 | return config;
118 | }
119 |
120 |
121 | @Override
122 | protected boolean isParallelDeliveryEnabled() { return false; /* we want to deactivated parallel and use sequential delivery*/ }
123 |
124 | @Override
125 | protected List getDetectROI() { return CONFIG_DETECT_ROI; }
126 | }
127 |
--------------------------------------------------------------------------------
/demo/src/main/java/org/buyun/alpr/common/AlprActivity.java:
--------------------------------------------------------------------------------
1 | package org.buyun.alpr.common;
2 |
3 | import android.graphics.RectF;
4 | import android.media.ExifInterface;
5 | import android.media.Image;
6 | import android.os.Bundle;
7 | import android.os.Environment;
8 | import android.os.SystemClock;
9 | import android.util.Log;
10 | import android.util.Size;
11 | import android.view.Window;
12 | import android.view.WindowManager;
13 |
14 | import androidx.annotation.NonNull;
15 | import androidx.appcompat.app.AppCompatActivity;
16 |
17 | import org.buyun.alpr.sdk.SDK_IMAGE_TYPE;
18 | import org.buyun.alpr.sdk.AlprSdk;
19 | import org.buyun.alpr.sdk.AlprCallback;
20 | import org.buyun.alpr.sdk.AlprResult;
21 | import org.json.JSONException;
22 | import org.json.JSONObject;
23 |
24 | import java.io.File;
25 | import java.util.List;
26 |
27 | /**
28 | * Base activity to subclass to make our life easier
29 | */
30 | public abstract class AlprActivity extends AppCompatActivity implements AlprCameraFragment.AlprCameraFragmentSink {
31 |
32 | static final String TAG = AlprActivity.class.getCanonicalName();
33 |
34 | private String mDebugInternalDataPath = null;
35 |
36 | private boolean mIsProcessing = false;
37 | private boolean mIsPaused = true;
38 |
39 | /**
40 | * Parallel callback delivery function used by the engine to notify for new deferred results
41 | */
42 | static class MyUltAlprSdkParallelDeliveryCallback extends AlprCallback {
43 | static final String TAG = MyUltAlprSdkParallelDeliveryCallback.class.getCanonicalName();
44 |
45 | AlprPlateView mAlprPlateView;
46 | Size mImageSize;
47 | long mTotalDuration = 0;
48 | int mOrientation = 0;
49 |
50 | void setAlprPlateView(@NonNull final AlprPlateView view) {
51 | mAlprPlateView = view;
52 | }
53 |
54 | void setImageSize(@NonNull final Size imageSize, @NonNull final int orientation) {
55 | mImageSize = imageSize;
56 | mOrientation = orientation;
57 | }
58 |
59 | void setDurationTime(final long totalDuration) {
60 | mTotalDuration = totalDuration;
61 | }
62 |
63 | @Override
64 | public void onNewResult(AlprResult result) {
65 | Log.d(TAG, AlprUtils.resultToString(result));
66 | if (mAlprPlateView!= null) {
67 | mAlprPlateView.setResult(result, mImageSize, mTotalDuration, mOrientation);
68 | }
69 | }
70 | static MyUltAlprSdkParallelDeliveryCallback newInstance() {
71 | return new MyUltAlprSdkParallelDeliveryCallback();
72 | }
73 | }
74 |
75 | /**
76 | * The parallel delivery callback. Set to null to disable parallel mode
77 | * and enforce sequential mode.
78 | */
79 | private MyUltAlprSdkParallelDeliveryCallback mParallelDeliveryCallback;
80 |
81 | private AlprPlateView mAlprPlateView;
82 |
83 | @Override
84 | protected void onCreate(final Bundle savedInstanceState) {
85 | Log.i(TAG, "onCreate " + this);
86 | super.onCreate(savedInstanceState);
87 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
88 | requestWindowFeature(Window.FEATURE_NO_TITLE);
89 | getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
90 | setContentView(getLayoutResId());
91 |
92 | // Create folder to dump input images for debugging
93 | File dummyFile = new File(getExternalFilesDir(null), "dummyFile");
94 | if (!dummyFile.getParentFile().exists() && !dummyFile.getParentFile().mkdirs()) {
95 | Log.e(TAG,"mkdir failed: " + dummyFile.getParentFile().getAbsolutePath());
96 | }
97 | mDebugInternalDataPath = dummyFile.getParentFile().exists() ? dummyFile.getParent() : Environment.getExternalStorageDirectory().getAbsolutePath();
98 | dummyFile.delete();
99 |
100 | // Create parallel delivery callback is enabled
101 | mParallelDeliveryCallback = isParallelDeliveryEnabled() ? MyUltAlprSdkParallelDeliveryCallback.newInstance() : null;
102 |
103 | // Init the engine
104 | final JSONObject config = getJsonConfig();
105 | // Retrieve previously stored key from internal storage
106 | final AlprResult alprResult = AlprUtils.assertIsOk(AlprSdk.init(
107 | getAssets(),
108 | config.toString(),
109 | mParallelDeliveryCallback
110 | ));
111 | Log.i(TAG,"ALPR engine initialized: " + AlprUtils.resultToString(alprResult));
112 | }
113 |
114 | @Override
115 | public void onDestroy() {
116 | Log.i(TAG, "onDestroy " + this);
117 | final AlprResult result = AlprUtils.assertIsOk(AlprSdk.deInit());
118 | Log.i(TAG,"ALPR engine deInitialized: " + AlprUtils.resultToString(result));
119 |
120 | super.onDestroy();
121 | }
122 |
123 | @Override
124 | public synchronized void onResume() {
125 | super.onResume();
126 |
127 | mIsPaused = false;
128 | }
129 |
130 | @Override
131 | public synchronized void onPause() {
132 | mIsPaused = true;
133 |
134 | super.onPause();
135 | }
136 |
137 | @Override
138 | public void setAlprPlateView(@NonNull final AlprPlateView view) {
139 | mAlprPlateView = view;
140 | if (mParallelDeliveryCallback != null) {
141 | mParallelDeliveryCallback.setAlprPlateView(view);
142 | }
143 | final List roi = getDetectROI();
144 | assert(roi.size() == 4);
145 | mAlprPlateView.setDetectROI(
146 | new RectF(
147 | roi.get(0).floatValue(),
148 | roi.get(2).floatValue(),
149 | roi.get(1).floatValue(),
150 | roi.get(3).floatValue()
151 | )
152 | );
153 | }
154 |
155 | @Override
156 | public void setImage(@NonNull final Image image, final int jpegOrientation) {
157 |
158 | // On sequential mode we just ignore the processing
159 | if (mIsProcessing || mIsPaused) {
160 | Log.d(TAG, "Inference function not returned yet: Processing or paused");
161 | image.close();
162 | return;
163 | }
164 |
165 | mIsProcessing = true;
166 |
167 | final Size imageSize = new Size(image.getWidth(), image.getHeight());
168 |
169 | // Orientation
170 | // Convert from degree to real EXIF orientation
171 | int exifOrientation;
172 | switch (jpegOrientation) {
173 | case 90: exifOrientation = ExifInterface.ORIENTATION_ROTATE_90; break;
174 | case 180: exifOrientation = ExifInterface.ORIENTATION_ROTATE_180; break;
175 | case 270: exifOrientation = ExifInterface.ORIENTATION_ROTATE_270; break;
176 | case 0: default: exifOrientation = ExifInterface.ORIENTATION_NORMAL; break;
177 | }
178 |
179 | // Update image for the async callback
180 | if (mParallelDeliveryCallback != null) {
181 | mParallelDeliveryCallback.setImageSize((jpegOrientation % 180) == 0 ? imageSize : new Size(imageSize.getHeight(), imageSize.getWidth()), jpegOrientation);
182 | }
183 |
184 | // The actual ALPR inference is done here
185 | // Do not worry about the time taken to perform the inference, the caller
186 | // (most likely the camera fragment) set the current image using a background thread.
187 | final Image.Plane[] planes = image.getPlanes();
188 | final long startTimeInMillis = SystemClock.uptimeMillis();
189 | final AlprResult result = /*AlprUtils.assertIsOk*/(AlprSdk.process(
190 | SDK_IMAGE_TYPE.ULTALPR_SDK_IMAGE_TYPE_YUV420P,
191 | planes[0].getBuffer(),
192 | planes[1].getBuffer(),
193 | planes[2].getBuffer(),
194 | imageSize.getWidth(),
195 | imageSize.getHeight(),
196 | planes[0].getRowStride(),
197 | planes[1].getRowStride(),
198 | planes[2].getRowStride(),
199 | planes[1].getPixelStride(),
200 | exifOrientation
201 | ));
202 | final long durationInMillis = SystemClock.uptimeMillis() - startTimeInMillis; // Total time: Inference + image processing (chroma conversion, rotation...)
203 |
204 | if (mParallelDeliveryCallback != null) {
205 | mParallelDeliveryCallback.setDurationTime(durationInMillis);
206 | }
207 |
208 | // Release the image and signal the inference process is finished
209 | image.close();
210 |
211 | mIsProcessing = false;
212 |
213 | if (result.isOK()) {
214 | Log.d(TAG, AlprUtils.resultToString(result));
215 | } else {
216 | Log.e(TAG, AlprUtils.resultToString(result));
217 | }
218 |
219 | // Display the result if sequential mode. Otherwise, let the parallel callback
220 | // display the result when provided.
221 | // Starting version 3.2 the callback will be called even if the result is empty
222 | if (mAlprPlateView != null && (mParallelDeliveryCallback == null || (result.numPlates() == 0 && result.numCars() == 0))) { // means sequential call or no plate/car to expect from the parallel delivery callback
223 | mAlprPlateView.setResult(result, (jpegOrientation % 180) == 0 ? imageSize : new Size(imageSize.getHeight(), imageSize.getWidth()), durationInMillis, jpegOrientation);
224 | }
225 | }
226 |
227 | /**
228 | * Gets the base folder defining a path where the application can write private
229 | * data.
230 | * @return The path
231 | */
232 | protected String getDebugInternalDataPath() {
233 | return mDebugInternalDataPath;
234 | }
235 |
236 | /**
237 | * Gets the server url used to activate the license. Please contact us to get the correct URL.
238 | * e.g. https://localhost:3600
239 | * @return The URL
240 | */
241 | protected String getActivationServerUrl() {
242 | return "";
243 | }
244 |
245 | protected String getActivationMasterOrSlaveKey() {
246 | return "";
247 | }
248 |
249 | /**
250 | * Returns the layout Id for the activity
251 | * @return
252 | */
253 | protected abstract int getLayoutResId();
254 |
255 | /**
256 | * Returns JSON config to be used to initialize the ALPR/ANPR SDK.
257 | * @return The JSON config
258 | */
259 | protected abstract JSONObject getJsonConfig();
260 |
261 | /**
262 | */
263 | protected abstract boolean isParallelDeliveryEnabled();
264 |
265 | protected abstract List getDetectROI();
266 | }
--------------------------------------------------------------------------------
/demo/src/main/java/org/buyun/alpr/common/AlprBackgroundTask.java:
--------------------------------------------------------------------------------
1 | package org.buyun.alpr.common;
2 |
3 |
4 | import android.os.Handler;
5 | import android.os.HandlerThread;
6 |
7 | public class AlprBackgroundTask {
8 |
9 | private Handler mHandler;
10 | private HandlerThread mThread;
11 |
12 | public synchronized final Handler getHandler() {
13 | return mHandler;
14 | }
15 | public synchronized final boolean isRunning() { return mHandler != null; }
16 |
17 | public synchronized void start(final String threadName) {
18 | if (mThread != null) {
19 | return;
20 | }
21 | mThread = new HandlerThread(threadName);
22 | mThread.start();
23 | mHandler = new Handler(mThread.getLooper());
24 | }
25 |
26 | public synchronized void stop() {
27 | if (mThread == null) {
28 | return;
29 | }
30 | mThread.quitSafely();
31 | try {
32 | mThread.join();
33 | mThread = null;
34 | mHandler = null;
35 | } catch (InterruptedException e) {
36 | e.printStackTrace();
37 | }
38 | }
39 |
40 | public synchronized void post(final Runnable r) {
41 | if (mHandler != null) {
42 | mHandler.post(r);
43 | }
44 | }
45 | }
--------------------------------------------------------------------------------
/demo/src/main/java/org/buyun/alpr/common/AlprCameraFragment.java:
--------------------------------------------------------------------------------
1 | package org.buyun.alpr.common;
2 |
3 | import android.Manifest;
4 | import android.app.Activity;
5 | import android.app.AlertDialog;
6 | import android.app.Dialog;
7 | import android.content.Context;
8 | import android.content.DialogInterface;
9 | import android.content.pm.PackageManager;
10 | import android.content.res.Configuration;
11 | import android.graphics.Color;
12 | import android.graphics.ImageFormat;
13 | import android.graphics.SurfaceTexture;
14 | import android.hardware.camera2.CameraAccessException;
15 | import android.hardware.camera2.CameraCaptureSession;
16 | import android.hardware.camera2.CameraCharacteristics;
17 | import android.hardware.camera2.CameraDevice;
18 | import android.hardware.camera2.CameraManager;
19 | import android.hardware.camera2.CaptureRequest;
20 | import android.hardware.camera2.params.StreamConfigurationMap;
21 | import android.media.Image;
22 | import android.media.ImageReader;
23 | import android.os.Bundle;
24 |
25 | import androidx.annotation.NonNull;
26 | import androidx.core.app.ActivityCompat;
27 | import androidx.fragment.app.DialogFragment;
28 | import androidx.fragment.app.Fragment;
29 | import androidx.core.content.ContextCompat;
30 |
31 | import android.text.TextUtils;
32 | import android.util.Log;
33 | import android.util.Size;
34 | import android.util.SparseIntArray;
35 | import android.view.LayoutInflater;
36 | import android.view.Surface;
37 | import android.view.View;
38 | import android.view.ViewGroup;
39 | import android.widget.Toast;
40 |
41 | import java.util.ArrayList;
42 | import java.util.Arrays;
43 | import java.util.Collections;
44 | import java.util.Comparator;
45 | import java.util.List;
46 | import java.util.concurrent.Semaphore;
47 | import java.util.concurrent.TimeUnit;
48 |
49 | import org.buyun.alpr.R; // FIXME(dmi): must remove
50 |
51 |
52 | public class AlprCameraFragment extends Fragment
53 | implements ActivityCompat.OnRequestPermissionsResultCallback {
54 |
55 | static final int REQUEST_CAMERA_PERMISSION = 1;
56 |
57 | static final String FRAGMENT_DIALOG = "dialog";
58 |
59 | static final String TAG = AlprCameraFragment.class.getCanonicalName();
60 |
61 | static final int VIDEO_FORMAT = ImageFormat.YUV_420_888; // All Android devices are required to support this format
62 |
63 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
64 | static {
65 | ORIENTATIONS.append(Surface.ROTATION_0, 90);
66 | ORIENTATIONS.append(Surface.ROTATION_90, 0);
67 | ORIENTATIONS.append(Surface.ROTATION_180, 270);
68 | ORIENTATIONS.append(Surface.ROTATION_270, 180);
69 | }
70 |
71 | /**
72 | * Using #2: processing and pending.
73 | */
74 | static final int MAX_IMAGES = 2;
75 |
76 | /**
77 | * The camera preview size will be chosen to be the smallest frame by pixel size capable of
78 | * containing a DESIRED_SIZE x DESIRED_SIZE square.
79 | */
80 | static final int MINIMUM_PREVIEW_SIZE = 320;
81 |
82 | private Size mPreferredSize = null;
83 |
84 | /**
85 | * ID of the current {@link CameraDevice}.
86 | */
87 | private String mCameraId;
88 |
89 | private int mJpegOrientation = 1;
90 |
91 | /**
92 | * An {@link AlprGLSurfaceView} for camera preview.
93 | */
94 | private AlprGLSurfaceView mGLSurfaceView;
95 |
96 | private AlprPlateView mPlateView;
97 |
98 | /**
99 | * A {@link CameraCaptureSession } for camera preview.
100 | */
101 | private CameraCaptureSession mCaptureSession;
102 |
103 | /**
104 | * A reference to the opened {@link CameraDevice}.
105 | */
106 | private CameraDevice mCameraDevice;
107 |
108 | /**
109 | * The {@link android.util.Size} of camera preview.
110 | */
111 | private Size mPreviewSize;
112 |
113 | private AlprCameraFragmentSink mSink;
114 |
115 | private final AlprBackgroundTask mBackgroundTaskCamera = new AlprBackgroundTask();
116 | private final AlprBackgroundTask mBackgroundTaskDrawing = new AlprBackgroundTask();
117 | private final AlprBackgroundTask mBackgroundTaskInference = new AlprBackgroundTask();
118 |
119 | /**
120 | * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state.
121 | */
122 | private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
123 |
124 | @Override
125 | public void onOpened(@NonNull CameraDevice cameraDevice) {
126 | // This method is called when the camera is opened. We start camera preview here.
127 | mCameraOpenCloseLock.release();
128 | mCameraDevice = cameraDevice;
129 | createCameraCaptureSession();
130 | }
131 |
132 | @Override
133 | public void onDisconnected(@NonNull CameraDevice cameraDevice) {
134 | mCameraOpenCloseLock.release();
135 | cameraDevice.close();
136 | mCameraDevice = null;
137 | }
138 |
139 | @Override
140 | public void onError(@NonNull CameraDevice cameraDevice, int error) {
141 | mCameraOpenCloseLock.release();
142 | cameraDevice.close();
143 | mCameraDevice = null;
144 | Activity activity = getActivity();
145 | if (null != activity) {
146 | activity.finish();
147 | }
148 | }
149 | };
150 |
151 | private boolean mClosingCamera = false;
152 |
153 | /**
154 | * An {@link ImageReader} that handles still image capture.
155 | */
156 | private ImageReader mImageReaderInference;
157 |
158 | private ImageReader mImageReaderDrawing;
159 |
160 |
161 | /**
162 | * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
163 | * still image is ready to be saved.
164 | */
165 | private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
166 | = new ImageReader.OnImageAvailableListener() {
167 |
168 | @Override
169 | public void onImageAvailable(ImageReader reader) {
170 | if (mClosingCamera) {
171 | Log.d(TAG, "Closing camera");
172 | return;
173 | }
174 | try {
175 | final Image image = reader.acquireLatestImage();
176 | if (image == null) {
177 | return;
178 | }
179 |
180 | final boolean isForDrawing = (reader.getSurface() == mImageReaderDrawing.getSurface());
181 | if (isForDrawing) {
182 | /*mBackgroundTaskDrawing.post(() ->*/ mGLSurfaceView.setImage(image, mJpegOrientation)/*)*/;
183 | }
184 | else {
185 | /*mBackgroundTaskInference.post(() ->*/ mSink.setImage(image, mJpegOrientation)/*)*/;
186 | }
187 |
188 | } catch (final Exception e) {
189 | e.printStackTrace();
190 | Log.e(TAG, e.toString());
191 | }
192 | }
193 | };
194 |
195 | private CaptureRequest.Builder mCaptureRequestBuilder;
196 |
197 | /**
198 | * {@link CaptureRequest} generated by {@link #mCaptureRequestBuilder}
199 | */
200 | private CaptureRequest mCaptureRequest;
201 |
202 | /**
203 | * A {@link Semaphore} to prevent the app from exiting before closing the camera.
204 | */
205 | private Semaphore mCameraOpenCloseLock = new Semaphore(1);
206 |
207 | /**
208 | * Orientation of the camera sensor
209 | */
210 | private int mSensorOrientation;
211 |
212 | /**
213 | * Default constructor automatically called when the fragment is recreated. Required.
214 | * https://stackoverflow.com/questions/51831053/could-not-find-fragment-constructor
215 | */
216 | public AlprCameraFragment() {
217 | // nothing special here
218 | }
219 |
220 | private AlprCameraFragment(final Size preferredSize, final AlprCameraFragmentSink sink) {
221 | mPreferredSize = preferredSize;
222 | mSink = sink;
223 | }
224 |
225 | /**
226 | * Public function to be called to create the fragment.
227 | * @param preferredSize
228 | * @return
229 | */
230 | public static AlprCameraFragment newInstance(final Size preferredSize, final AlprCameraFragmentSink sink) {
231 | return new AlprCameraFragment(preferredSize, sink);
232 | }
233 |
234 | @Override
235 | public View onCreateView(LayoutInflater inflater, ViewGroup container,
236 | Bundle savedInstanceState) {
237 | return inflater.inflate(R.layout.fragment_camera, container, false);
238 | }
239 |
240 | @Override
241 | public void onViewCreated(final View view, Bundle savedInstanceState) {
242 | mGLSurfaceView = (AlprGLSurfaceView) view.findViewById(R.id.glSurfaceView);
243 | mPlateView = (AlprPlateView) view.findViewById(R.id.plateView);
244 | //mPlateView.setBackgroundColor(Color.RED);
245 | }
246 |
247 | @Override
248 | public void onActivityCreated(Bundle savedInstanceState) {
249 | super.onActivityCreated(savedInstanceState);
250 | }
251 |
252 | @Override
253 | public synchronized void onResume() {
254 | super.onResume();
255 | startBackgroundThreads();
256 |
257 | // Forward the plateView to the sink
258 | if (mSink != null && mPlateView != null) {
259 | mSink.setAlprPlateView(mPlateView);
260 | }
261 |
262 | // Open the camera
263 | openCamera(mGLSurfaceView.getWidth(), mGLSurfaceView.getHeight());
264 | }
265 |
266 | @Override
267 | public synchronized void onPause() {
268 | closeCamera();
269 | stopBackgroundThreads();
270 | super.onPause();
271 | }
272 |
273 | private void requestCameraPermission() {
274 | if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
275 | new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
276 | } else {
277 | requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
278 | }
279 | }
280 |
281 | @Override
282 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
283 | @NonNull int[] grantResults) {
284 | if (requestCode == REQUEST_CAMERA_PERMISSION) {
285 | if (grantResults.length != 1 || grantResults[0] != PackageManager.PERMISSION_GRANTED) {
286 | ErrorDialog.newInstance(getString(R.string.request_permission))
287 | .show(getChildFragmentManager(), FRAGMENT_DIALOG);
288 | }
289 | } else {
290 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
291 | }
292 | }
293 |
294 | /**
295 | * Shows a {@link Toast} on the UI thread.
296 | *
297 | * @param text The message to show
298 | */
299 | private void showToast(final String text) {
300 | final Activity activity = getActivity();
301 | if (activity != null) {
302 | activity.runOnUiThread(new Runnable() {
303 | @Override
304 | public void run() {
305 | Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
306 | }
307 | });
308 | }
309 | }
310 |
311 | /**
312 | * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
313 | * width and height are at least as large as the minimum of both, or an exact match if possible.
314 | *
315 | * @param choices The list of sizes that the camera supports for the intended output class
316 | * @param width The minimum desired width
317 | * @param height The minimum desired height
318 | * @return The optimal {@code Size}, or an arbitrary one if none were big enough
319 | */
320 | private static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
321 | final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
322 | final Size desiredSize = new Size(width, height);
323 |
324 | // Collect the supported resolutions that are at least as big as the preview Surface
325 | boolean exactSizeFound = false;
326 | final List bigEnough = new ArrayList();
327 | final List tooSmall = new ArrayList();
328 | for (final Size option : choices) {
329 | if (option.equals(desiredSize)) {
330 | // Set the size but don't return yet so that remaining sizes will still be logged.
331 | exactSizeFound = true;
332 | }
333 |
334 | if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
335 | bigEnough.add(option);
336 | } else {
337 | tooSmall.add(option);
338 | }
339 | }
340 |
341 | Log.i(TAG, "Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
342 | Log.i(TAG, "Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
343 | Log.i(TAG, "Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
344 |
345 | if (exactSizeFound) {
346 | Log.i(TAG, "Exact size match found.");
347 | return desiredSize;
348 | }
349 |
350 | // Pick the smallest of those, assuming we found any
351 | if (bigEnough.size() > 0) {
352 | final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
353 | Log.i(TAG, "Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
354 | return chosenSize;
355 | } else {
356 | Log.e(TAG, "Couldn't find any suitable preview size");
357 | return choices[0];
358 | }
359 | }
360 |
361 | /**
362 | * Sets up member variables related to camera.
363 | *
364 | */
365 | @SuppressWarnings("SuspiciousNameCombination")
366 | private void setUpCameraOutputs() {
367 | Activity activity = getActivity();
368 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
369 | try {
370 | for (String cameraId : manager.getCameraIdList()) {
371 | CameraCharacteristics characteristics
372 | = manager.getCameraCharacteristics(cameraId);
373 |
374 | // We don't use a front facing camera in this sample.
375 | Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
376 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
377 | continue;
378 | }
379 |
380 | StreamConfigurationMap map = characteristics.get(
381 | CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
382 | if (map == null) {
383 | continue;
384 | }
385 |
386 | mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
387 |
388 | // JPEG orientation
389 | // https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#JPEG_ORIENTATION
390 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
391 | mJpegOrientation = (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
392 |
393 | // Danger, W.R.! Attempting to use too large a preview size could exceed the camera
394 | // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
395 | // garbage capture data.
396 | mPreviewSize =
397 | chooseOptimalSize(
398 | map.getOutputSizes(SurfaceTexture.class),
399 | mPreferredSize.getWidth(),
400 | mPreferredSize.getHeight());
401 |
402 | // We fit the aspect ratio of TextureView to the size of preview we picked.
403 | final int orientation = getResources().getConfiguration().orientation;
404 | if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
405 | mGLSurfaceView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
406 | mPlateView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
407 | } else {
408 | mGLSurfaceView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
409 | mPlateView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
410 | }
411 |
412 | mCameraId = cameraId;
413 | return;
414 | }
415 | } catch (CameraAccessException e) {
416 | e.printStackTrace();
417 | } catch (NullPointerException e) {
418 | // Currently an NPE is thrown when the Camera2API is used but not supported on the
419 | // device this code runs.
420 | ErrorDialog.newInstance(getString(R.string.camera_error))
421 | .show(getChildFragmentManager(), FRAGMENT_DIALOG);
422 | }
423 | }
424 |
425 | /**
426 | * Opens the camera specified by {@link AlprCameraFragment#mCameraId}.
427 | */
428 | private void openCamera(int width, int height) {
429 | if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA)
430 | != PackageManager.PERMISSION_GRANTED) {
431 | requestCameraPermission();
432 | return;
433 | }
434 | setUpCameraOutputs();
435 | Activity activity = getActivity();
436 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
437 | try {
438 | if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
439 | throw new RuntimeException("Time out waiting to lock camera opening.");
440 | }
441 | manager.openCamera(mCameraId, mStateCallback, mBackgroundTaskCamera.getHandler());
442 | } catch (CameraAccessException e) {
443 | e.printStackTrace();
444 | } catch (InterruptedException e) {
445 | throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
446 | }
447 | }
448 |
449 | /**
450 | * Closes the current {@link CameraDevice}.
451 | */
452 | private void closeCamera() {
453 | try {
454 | mClosingCamera = true;
455 | mCameraOpenCloseLock.acquire();
456 |
457 | if (null != mCaptureSession) {
458 | mCaptureSession.close();
459 | mCaptureSession = null;
460 | }
461 | if (null != mCameraDevice) {
462 | mCameraDevice.close();
463 | mCameraDevice = null;
464 | }
465 | if (null != mImageReaderInference) {
466 | mImageReaderInference.close();
467 | mImageReaderInference = null;
468 | }
469 | if (null != mImageReaderDrawing) {
470 | mImageReaderDrawing.close();
471 | mImageReaderDrawing = null;
472 | }
473 | } catch (InterruptedException e) {
474 | throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
475 | } finally {
476 | mCameraOpenCloseLock.release();
477 | mClosingCamera = false;
478 | }
479 | }
480 |
481 | /**
482 | * Starts a background threads
483 | */
484 | private void startBackgroundThreads() {
485 | mBackgroundTaskInference.start("InferenceBackgroundThread");
486 | mBackgroundTaskDrawing.start("DrawingBackgroundThread");
487 | mBackgroundTaskCamera.start("CameraBackgroundThread");
488 | }
489 |
490 | /**
491 | * Stops the background threads
492 | */
493 | private void stopBackgroundThreads() {
494 | mBackgroundTaskInference.stop();
495 | mBackgroundTaskDrawing.stop();
496 | mBackgroundTaskCamera.stop();
497 | }
498 |
499 | /**
500 | * Creates a new {@link CameraCaptureSession} for camera preview.
501 | */
502 | private void createCameraCaptureSession() {
503 | try {
504 | // Create Image readers
505 | mImageReaderInference = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
506 | VIDEO_FORMAT, MAX_IMAGES);
507 | mImageReaderInference.setOnImageAvailableListener(
508 | mOnImageAvailableListener, mBackgroundTaskCamera.getHandler());
509 |
510 | mImageReaderDrawing = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
511 | VIDEO_FORMAT, MAX_IMAGES);
512 | mImageReaderDrawing.setOnImageAvailableListener(
513 | mOnImageAvailableListener, mBackgroundTaskCamera.getHandler());
514 |
515 | // We set up a CaptureRequest.Builder with the output Surface to the image reader
516 | mCaptureRequestBuilder
517 | = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
518 | //mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(1, 25));
519 | //mCaptureRequestBuilder.set(CaptureRequest.CONTROL_MODE,
520 | // CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
521 | //mCaptureRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE,
522 | // CaptureRequest.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO);
523 | mCaptureRequestBuilder.addTarget(mImageReaderInference.getSurface());
524 | mCaptureRequestBuilder.addTarget(mImageReaderDrawing.getSurface());
525 |
526 | // Here, we create a CameraCaptureSession
527 | mCameraDevice.createCaptureSession(Arrays.asList(mImageReaderInference.getSurface(), mImageReaderDrawing.getSurface()),
528 | new CameraCaptureSession.StateCallback() {
529 |
530 | @Override
531 | public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
532 | // The camera is already closed
533 | if (null == mCameraDevice) {
534 | return;
535 | }
536 |
537 | // When the session is ready, we start displaying the preview.
538 | mCaptureSession = cameraCaptureSession;
539 | try {
540 | // Auto focus should be continuous
541 | mCaptureRequestBuilder.set(
542 | CaptureRequest.CONTROL_AF_MODE,
543 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
544 | // Flash is automatically enabled when necessary.
545 | mCaptureRequestBuilder.set(
546 | CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
547 |
548 | // Finally, we start grabbing the frames
549 | mCaptureRequest = mCaptureRequestBuilder.build();
550 | mCaptureSession.setRepeatingRequest(mCaptureRequest,
551 | null, mBackgroundTaskCamera.getHandler());
552 |
553 | } catch (CameraAccessException e) {
554 | e.printStackTrace();
555 | }
556 | }
557 |
558 | @Override
559 | public void onConfigureFailed(
560 | @NonNull CameraCaptureSession cameraCaptureSession) {
561 | showToast("Failed");
562 | }
563 | }, mBackgroundTaskCamera.getHandler()
564 | );
565 | } catch (CameraAccessException e) {
566 | e.printStackTrace();
567 | }
568 | }
569 |
570 | /**
571 | *
572 | */
573 | public static interface AlprCameraFragmentSink {
574 |
575 | /**
576 | *
577 | * @param view
578 | */
579 | public void setAlprPlateView(@NonNull final AlprPlateView view);
580 |
581 | /**
582 | *
583 | * @param image
584 | * @param jpegOrientation
585 | */
586 | public void setImage(@NonNull final Image image, final int jpegOrientation);
587 | }
588 |
589 | /**
590 | * Compares two {@code Size}s based on their areas.
591 | */
592 | static class CompareSizesByArea implements Comparator {
593 |
594 | @Override
595 | public int compare(Size lhs, Size rhs) {
596 | // We cast here to ensure the multiplications won't overflow
597 | return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
598 | (long) rhs.getWidth() * rhs.getHeight());
599 | }
600 |
601 | }
602 |
603 | /**
604 | * Shows an error message dialog.
605 | */
606 | public static class ErrorDialog extends DialogFragment {
607 |
608 | private static final String ARG_MESSAGE = "message";
609 |
610 | public static ErrorDialog newInstance(String message) {
611 | ErrorDialog dialog = new ErrorDialog();
612 | Bundle args = new Bundle();
613 | args.putString(ARG_MESSAGE, message);
614 | dialog.setArguments(args);
615 | return dialog;
616 | }
617 |
618 | @NonNull
619 | @Override
620 | public Dialog onCreateDialog(Bundle savedInstanceState) {
621 | final Activity activity = getActivity();
622 | return new AlertDialog.Builder(activity)
623 | .setMessage(getArguments().getString(ARG_MESSAGE))
624 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
625 | @Override
626 | public void onClick(DialogInterface dialogInterface, int i) {
627 | activity.finish();
628 | }
629 | })
630 | .create();
631 | }
632 |
633 | }
634 |
635 | /**
636 | * Shows OK/Cancel confirmation dialog about camera permission.
637 | */
638 | public static class ConfirmationDialog extends DialogFragment {
639 |
640 | @NonNull
641 | @Override
642 | public Dialog onCreateDialog(Bundle savedInstanceState) {
643 | final Fragment parent = getParentFragment();
644 | return new AlertDialog.Builder(getActivity())
645 | .setMessage(R.string.request_permission)
646 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
647 | @Override
648 | public void onClick(DialogInterface dialog, int which) {
649 | parent.requestPermissions(new String[]{Manifest.permission.CAMERA},
650 | REQUEST_CAMERA_PERMISSION);
651 | }
652 | })
653 | .setNegativeButton(android.R.string.cancel,
654 | new DialogInterface.OnClickListener() {
655 | @Override
656 | public void onClick(DialogInterface dialog, int which) {
657 | Activity activity = parent.getActivity();
658 | if (activity != null) {
659 | activity.finish();
660 | }
661 | }
662 | })
663 | .create();
664 | }
665 | }
666 |
667 | }
--------------------------------------------------------------------------------
/demo/src/main/java/org/buyun/alpr/common/AlprGLSurfaceView.java:
--------------------------------------------------------------------------------
1 | package org.buyun.alpr.common;
2 |
3 | import java.nio.ByteBuffer;
4 | import java.nio.ByteOrder;
5 | import java.nio.FloatBuffer;
6 | import java.nio.ShortBuffer;
7 |
8 | import javax.microedition.khronos.egl.EGLConfig;
9 | import javax.microedition.khronos.opengles.GL10;
10 |
11 | import android.graphics.PixelFormat;
12 | import android.media.Image;
13 | import android.opengl.GLES20;
14 | import android.opengl.GLSurfaceView;
15 | import android.util.Log;
16 | import android.view.SurfaceHolder;
17 |
18 |
19 | /**
20 | * GL surface view
21 | */
22 | public class AlprGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer {
23 | private static final String TAG = AlprGLSurfaceView.class.getCanonicalName();
24 |
25 | private static final int FLOAT_SIZE_BYTES = 4;
26 | private static final int SHORT_SIZE_BYTES = 2;
27 | private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
28 | private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
29 | private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
30 |
31 | private static final float[] TRIANGLE_VERTICES_DATA_0 = {
32 | 1, -1, 0, 1, 1, // 0: bottom/right
33 | 1, 1, 0, 1, 0, // 1: top/right
34 | -1, 1, 0, 0, 0, // 2: top/left
35 | -1, -1, 0, 0, 1 // 3: bottom/left
36 | };
37 | private static final short[] INDICES_DATA_0 = {
38 | 0, 1, 2, // triangle #1: bottom/right, top/right, top/left
39 | 2, 3, 0 // triangle #2: top/left, bottom/left, bottom/right
40 | };
41 |
42 | private static final float[] TRIANGLE_VERTICES_DATA_90 = {
43 | 1, -1, 0, 1, 0,
44 | 1, 1, 0, 0, 0,
45 | -1, 1, 0, 0, 1,
46 | -1, -1, 0, 1, 1,
47 | };
48 | private static final short[] INDICES_DATA_90 = {
49 | 3, 0, 1,
50 | 1, 2, 3
51 | };
52 |
53 | private static final float[] TRIANGLE_VERTICES_DATA_180 = {
54 | 1, -1, 0, 0, 0,
55 | 1, 1, 0, 0, 1,
56 | -1, 1, 0, 1, 1,
57 | -1, -1, 0, 1, 0,
58 | };
59 | private static final short[] INDICES_DATA_180 = {
60 | 2, 3, 0,
61 | 0, 1, 2
62 | };
63 |
64 | private static final float[] TRIANGLE_VERTICES_DATA_270 = {
65 | 1, -1, 0, 0, 1,
66 | 1, 1, 0, 1, 1,
67 | -1, 1, 0, 1, 0,
68 | -1, -1, 0, 0, 0,
69 | };
70 | private static final short[] INDICES_DATA_270 = {
71 | 1, 2, 3,
72 | 3, 0, 1
73 | };
74 |
75 | private FloatBuffer mTriangleVertices;
76 | private ShortBuffer mIndices;
77 | private int mJpegOrientation = 0;
78 | private boolean mJpegOrientationChanged = false;
79 |
80 | private static final String VERTEX_SHADER_SOURCE = "precision mediump float;" +
81 | "attribute vec4 aPosition;\n" +
82 | "attribute vec2 aTextureCoord;\n" +
83 | "varying vec2 vTextureCoord;\n" +
84 | "void main() {\n" +
85 | " gl_Position = aPosition;\n" +
86 | " vTextureCoord = aTextureCoord;\n" +
87 | "}\n";
88 |
89 | private static final String FRAGMENT_SHADER_SOURCE = "precision mediump float;" +
90 | "varying vec2 vTextureCoord;" +
91 | "" +
92 | "uniform sampler2D SamplerY; " +
93 | "uniform sampler2D SamplerU;" +
94 | "uniform sampler2D SamplerV;" +
95 | "" +
96 | "const mat3 yuv2rgb = mat3(1.164, 0, 1.596, 1.164, -0.391, -0.813, 1.164, 2.018, 0);" +
97 | "" +
98 | "void main() { " +
99 | " vec3 yuv = vec3(1.1643 * (texture2D(SamplerY, vTextureCoord).r - 0.06274)," +
100 | " texture2D(SamplerU, vTextureCoord).r - 0.5019," +
101 | " texture2D(SamplerV, vTextureCoord).r - 0.5019);" +
102 | " vec3 rgb = yuv * yuv2rgb; " +
103 | " gl_FragColor = vec4(rgb, 1.0);" +
104 | "} ";
105 |
106 | private int mProgram;
107 | private int maPositionHandle;
108 | private int maTextureHandle;
109 | private int muSamplerYHandle;
110 | private int muSamplerUHandle;
111 | private int muSamplerVHandle;
112 | private int[] mTextureY = new int[1];
113 | private int[] mTextureU = new int[1];
114 | private int[] mTextureV = new int[1];
115 |
116 | private boolean mSurfaceCreated;
117 |
118 | private Image mImage = null;
119 | private int mRatioWidth = 0;
120 | private int mRatioHeight = 0;
121 |
122 | public AlprGLSurfaceView(android.content.Context context) {
123 | super(context);
124 | initGL();
125 | }
126 |
127 | public AlprGLSurfaceView(android.content.Context context, android.util.AttributeSet attrs) {
128 | super(context, attrs);
129 | initGL();
130 | }
131 |
132 | private void initGL() {
133 | setEGLContextClientVersion(2);
134 | setEGLConfigChooser(8, 8, 8, 8, 16, 0);
135 | setRenderer(this);
136 | getHolder().setFormat(PixelFormat.TRANSLUCENT);
137 | setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
138 |
139 | mTriangleVertices = ByteBuffer.allocateDirect(TRIANGLE_VERTICES_DATA_0.length
140 | * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
141 | mTriangleVertices.put(TRIANGLE_VERTICES_DATA_0).position(0);
142 |
143 | mIndices = ByteBuffer.allocateDirect(INDICES_DATA_0.length
144 | * SHORT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asShortBuffer();
145 | mIndices.put(INDICES_DATA_0).position(0);
146 | }
147 |
148 | /**
149 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
150 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that
151 | * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
152 | *
153 | * @param width Relative horizontal size
154 | * @param height Relative vertical size
155 | */
156 | public void setAspectRatio(int width, int height) {
157 | if (width < 0 || height < 0) {
158 | throw new IllegalArgumentException("Size cannot be negative.");
159 | }
160 | mRatioWidth = width;
161 | mRatioHeight = height;
162 | requestLayout();
163 | }
164 |
165 | /**
166 | *
167 | * @param
168 | */
169 | public void setImage(final Image image, final int jpegOrientation){
170 | if (!isReady()) {
171 | Log.i(TAG, "Not ready");
172 | image.close();
173 | return;
174 | }
175 | if (mImage != null) {
176 | Log.i(TAG, "Already rendering previous image");
177 | image.close();
178 | return;
179 | }
180 |
181 | // We need to save the image as the rendering is asynchronous
182 | mImage = image;
183 |
184 | if (mJpegOrientation != jpegOrientation) {
185 | Log.i(TAG, "Orientation changed: " + mJpegOrientation + " -> " + jpegOrientation);
186 | mJpegOrientation = jpegOrientation;
187 | mJpegOrientationChanged = true;
188 | }
189 |
190 | // Signal the surface as dirty to force redrawing
191 | requestRender();
192 | }
193 |
194 | public boolean isReady(){
195 | return mSurfaceCreated;
196 | }
197 |
198 | @Override
199 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
200 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
201 | int width = MeasureSpec.getSize(widthMeasureSpec);
202 | int height = MeasureSpec.getSize(heightMeasureSpec);
203 | if (0 == mRatioWidth || 0 == mRatioHeight) {
204 | setMeasuredDimension(width, height);
205 | } else {
206 | if (width < height * mRatioWidth / mRatioHeight) {
207 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
208 | } else {
209 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
210 | }
211 | }
212 | }
213 |
214 | @Override
215 | public void surfaceCreated(SurfaceHolder holder) {
216 | super.surfaceCreated(holder);
217 |
218 | mSurfaceCreated = true;
219 | }
220 |
221 | @Override
222 | public void surfaceDestroyed(SurfaceHolder holder) {
223 | mSurfaceCreated = false;
224 | if (mImage != null) {
225 | mImage.close();
226 | mImage = null;
227 | }
228 | super.surfaceDestroyed(holder);
229 | }
230 |
231 | @Override
232 | public void onDrawFrame(GL10 glUnused) {
233 | if (mImage == null) {
234 | return;
235 | }
236 |
237 | if (mJpegOrientationChanged) {
238 | updateVertices();
239 | mJpegOrientationChanged = false;
240 | }
241 |
242 | final boolean swapSize = (mJpegOrientation % 180) != 0;
243 | final int imageWidth = mImage.getWidth();
244 | final int imageHeight = mImage.getHeight();
245 |
246 | final AlprUtils.AlprTransformationInfo tInfo = new AlprUtils.AlprTransformationInfo(swapSize ? imageHeight : imageWidth, swapSize ? imageWidth : imageHeight, getWidth(), getHeight());
247 | GLES20.glViewport(tInfo.getXOffset(), tInfo.getYOffset(), tInfo.getWidth(), tInfo.getHeight());
248 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT /*| GLES20.GL_DEPTH_BUFFER_BIT*/);
249 | GLES20.glUseProgram(mProgram);
250 | checkGlError("glUseProgram");
251 |
252 | final Image.Plane[] planes = mImage.getPlanes();
253 |
254 | final ByteBuffer bufferY = planes[0].getBuffer();
255 | final ByteBuffer bufferU = planes[1].getBuffer();
256 | final ByteBuffer bufferV = planes[2].getBuffer();
257 |
258 | final int uvPixelStride = planes[1].getPixelStride();
259 |
260 | final int bufferWidthY = planes[0].getRowStride();
261 | final int bufferHeightY = imageHeight;
262 | final int bufferWidthUV = (planes[1].getRowStride() >> (uvPixelStride - 1));
263 | final int bufferHeightUV = (bufferHeightY >> 1); // Always YUV420_888 -> half-height
264 |
265 | final int uvFormat = uvPixelStride == 1 ? GLES20.GL_LUMINANCE : GLES20.GL_LUMINANCE_ALPHA; // Interleaved UV
266 |
267 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
268 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY[0]);
269 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, bufferWidthY, bufferHeightY, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bufferY);
270 | GLES20.glUniform1i(muSamplerYHandle, 0);
271 |
272 | GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
273 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureU[0]);
274 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, uvFormat, bufferWidthUV, bufferHeightUV, 0, uvFormat, GLES20.GL_UNSIGNED_BYTE, bufferU);
275 | GLES20.glUniform1i(muSamplerUHandle, 1);
276 |
277 | GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
278 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureV[0]);
279 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, uvFormat, bufferWidthUV, bufferHeightUV, 0, uvFormat, GLES20.GL_UNSIGNED_BYTE, bufferV);
280 | GLES20.glUniform1i(muSamplerVHandle, 2);
281 |
282 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDICES_DATA_0.length, GLES20.GL_UNSIGNED_SHORT, mIndices);
283 |
284 | mImage.close();
285 | mImage = null;
286 | }
287 |
288 | @Override
289 | public void onSurfaceChanged(GL10 glUnused, int width, int height) {
290 | GLES20.glViewport(0, 0, width, height);
291 | // GLU.gluPerspective(glUnused, 45.0f, (float)width/(float)height, 0.1f, 100.0f);
292 | }
293 |
294 | @Override
295 | public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
296 | GLES20.glEnable(GLES20.GL_BLEND);
297 | GLES20.glDisable(GLES20.GL_DEPTH_TEST);
298 | GLES20.glDisable(GLES20.GL_DITHER);
299 | GLES20.glDisable(GLES20.GL_STENCIL_TEST);
300 | GLES20.glDisable(GL10.GL_DITHER);
301 |
302 | String extensions = GLES20.glGetString(GL10.GL_EXTENSIONS);
303 | Log.d(TAG, "OpenGL extensions=" +extensions);
304 |
305 | // Ignore the passed-in GL10 interface, and use the GLES20
306 | // class's static methods instead.
307 | mProgram = createProgram(VERTEX_SHADER_SOURCE, FRAGMENT_SHADER_SOURCE);
308 | if (mProgram == 0) {
309 | return;
310 | }
311 | maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
312 | checkGlError("glGetAttribLocation aPosition");
313 | if (maPositionHandle == -1) {
314 | throw new RuntimeException("Could not get attrib location for aPosition");
315 | }
316 | maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
317 | checkGlError("glGetAttribLocation aTextureCoord");
318 | if (maTextureHandle == -1) {
319 | throw new RuntimeException("Could not get attrib location for aTextureCoord");
320 | }
321 |
322 | muSamplerYHandle = GLES20.glGetUniformLocation(mProgram, "SamplerY");
323 | if (muSamplerYHandle == -1) {
324 | throw new RuntimeException("Could not get uniform location for SamplerY");
325 | }
326 | muSamplerUHandle = GLES20.glGetUniformLocation(mProgram, "SamplerU");
327 | if (muSamplerUHandle == -1) {
328 | throw new RuntimeException("Could not get uniform location for SamplerU");
329 | }
330 | muSamplerVHandle = GLES20.glGetUniformLocation(mProgram, "SamplerV");
331 | if (muSamplerVHandle == -1) {
332 | throw new RuntimeException("Could not get uniform location for SamplerV");
333 | }
334 |
335 | updateVertices();
336 |
337 | GLES20.glGenTextures(1, mTextureY, 0);
338 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY[0]);
339 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
340 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
341 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
342 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
343 |
344 | GLES20.glGenTextures(1, mTextureU, 0);
345 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureU[0]);
346 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
347 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
348 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
349 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
350 |
351 | GLES20.glGenTextures(1, mTextureV, 0);
352 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureV[0]);
353 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
354 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
355 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
356 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
357 |
358 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
359 | }
360 |
361 | private int loadShader(int shaderType, String source) {
362 | int shader = GLES20.glCreateShader(shaderType);
363 | if (shader != 0) {
364 | GLES20.glShaderSource(shader, source);
365 | GLES20.glCompileShader(shader);
366 | int[] compiled = new int[1];
367 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
368 | if (compiled[0] == 0) {
369 | Log.e(TAG, "Could not compile shader " + shaderType + ":");
370 | Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
371 | GLES20.glDeleteShader(shader);
372 | shader = 0;
373 | }
374 | }
375 | return shader;
376 | }
377 |
378 | private int createProgram(String vertexSource, String fragmentSource) {
379 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
380 | if (vertexShader == 0) {
381 | return 0;
382 | }
383 |
384 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
385 | if (pixelShader == 0) {
386 | return 0;
387 | }
388 |
389 | int program = GLES20.glCreateProgram();
390 | if (program != 0) {
391 | GLES20.glAttachShader(program, vertexShader);
392 | checkGlError("glAttachShader");
393 | GLES20.glAttachShader(program, pixelShader);
394 | checkGlError("glAttachShader");
395 | GLES20.glLinkProgram(program);
396 | int[] linkStatus = new int[1];
397 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
398 | if (linkStatus[0] != GLES20.GL_TRUE) {
399 | Log.e(TAG, "Could not link program: ");
400 | Log.e(TAG, GLES20.glGetProgramInfoLog(program));
401 | GLES20.glDeleteProgram(program);
402 | program = 0;
403 | }
404 | }
405 | return program;
406 | }
407 |
408 | private void updateVertices() {
409 | mTriangleVertices.rewind();
410 | mIndices.rewind();
411 |
412 | switch (mJpegOrientation) {
413 | case 90:
414 | mTriangleVertices.put(TRIANGLE_VERTICES_DATA_90).position(0);
415 | mIndices.put(INDICES_DATA_90).position(0);
416 | break;
417 | case 180:
418 | mTriangleVertices.put(TRIANGLE_VERTICES_DATA_180).position(0);
419 | mIndices.put(INDICES_DATA_180).position(0);
420 | break;
421 | case 270:
422 | mTriangleVertices.put(TRIANGLE_VERTICES_DATA_270).position(0);
423 | mIndices.put(INDICES_DATA_270).position(0);
424 | break;
425 | case 0:
426 | mTriangleVertices.put(TRIANGLE_VERTICES_DATA_0).position(0);
427 | mIndices.put(INDICES_DATA_0).position(0);
428 | break;
429 | default:
430 | throw new RuntimeException("Invalid orientation:" + mJpegOrientation);
431 | }
432 |
433 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
434 | GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
435 | checkGlError("glVertexAttribPointer maPosition");
436 |
437 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
438 | GLES20.glEnableVertexAttribArray(maPositionHandle);
439 | checkGlError("glEnableVertexAttribArray maPositionHandle");
440 | GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
441 | checkGlError("glVertexAttribPointer maTextureHandle");
442 | GLES20.glEnableVertexAttribArray(maTextureHandle);
443 | checkGlError("glEnableVertexAttribArray maTextureHandle");
444 | }
445 |
446 | private void checkGlError(String op) {
447 | int error;
448 | while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
449 | Log.e(TAG, op + ": glError " + error);
450 | throw new RuntimeException(op + ": glError " + error);
451 | }
452 | }
453 | }
--------------------------------------------------------------------------------
/demo/src/main/java/org/buyun/alpr/common/AlprImage.java:
--------------------------------------------------------------------------------
1 | package org.buyun.alpr.common;
2 |
3 | import android.media.Image;
4 |
5 | import java.util.concurrent.atomic.AtomicInteger;
6 |
7 | public class AlprImage {
8 |
9 | Image mImage;
10 | final AtomicInteger mRefCount;
11 |
12 | private AlprImage(final Image image) {
13 | assert image != null;
14 | mImage = image;
15 | mRefCount = new AtomicInteger(0);
16 | }
17 |
18 | public static AlprImage newInstance(final Image image) {
19 | return new AlprImage(image);
20 | }
21 |
22 | public final Image getImage() {
23 | assert mRefCount.intValue() >= 0;
24 | return mImage;
25 | }
26 |
27 | public AlprImage takeRef() {
28 | assert mRefCount.intValue() >= 0;
29 | if (mRefCount.intValue() < 0) {
30 | return null;
31 | }
32 | mRefCount.incrementAndGet();
33 | return this;
34 | }
35 |
36 | public void releaseRef() {
37 | assert mRefCount.intValue() >= 0;
38 | final int refCount = mRefCount.decrementAndGet();
39 | if (refCount <= 0) {
40 | mImage.close();
41 | mImage = null;
42 | }
43 | }
44 |
45 | @Override
46 | protected synchronized void finalize() {
47 | if (mImage != null && mRefCount.intValue() < 0) {
48 | mImage.close();
49 | }
50 | }
51 | }
--------------------------------------------------------------------------------
/demo/src/main/java/org/buyun/alpr/common/AlprPlateView.java:
--------------------------------------------------------------------------------
1 | package org.buyun.alpr.common;
2 |
3 | import android.content.Context;
4 | import android.graphics.Canvas;
5 | import android.graphics.Color;
6 | import android.graphics.DashPathEffect;
7 | import android.graphics.Paint;
8 | import android.graphics.Path;
9 | import android.graphics.PointF;
10 | import android.graphics.Rect;
11 | import android.graphics.RectF;
12 | import android.graphics.Typeface;
13 | import android.util.AttributeSet;
14 | import android.util.Log;
15 | import android.util.Size;
16 | import android.util.TypedValue;
17 | import android.view.View;
18 |
19 | import androidx.annotation.NonNull;
20 |
21 | import org.buyun.alpr.sdk.AlprResult;
22 |
23 | import java.util.HashMap;
24 | import java.util.Iterator;
25 | import java.util.List;
26 | import java.util.Map;
27 |
28 | public class AlprPlateView extends View {
29 |
30 | static final String TAG = AlprPlateView.class.getCanonicalName();
31 |
32 | static final float LPCI_MIN_CONFIDENCE = 80.f;
33 | static final float VCR_MIN_CONFIDENCE = 80.f;
34 | static final float VMMR_MIN_CONFIDENCE = 60.f;
35 | static final float VBSR_MIN_CONFIDENCE = 70.f;
36 | static final float VMMR_FUSE_DEFUSE_MIN_CONFIDENCE = 40.f;
37 | static final int VMMR_FUSE_DEFUSE_MIN_OCCURRENCES = 3;
38 |
39 | static final float TEXT_NUMBER_SIZE_DIP = 20;
40 | static final float TEXT_LPCI_SIZE_DIP = 15;
41 | static final float TEXT_CAR_SIZE_DIP = 15;
42 | static final float TEXT_INFERENCE_TIME_SIZE_DIP = 20;
43 | static final int STROKE_WIDTH = 10;
44 |
45 | private final Paint mPaintTextNumber;
46 | private final Paint mPaintTextNumberBackground;
47 | private final Paint mPaintTextLPCI;
48 | private final Paint mPaintTextLPCIBackground;
49 | private final Paint mPaintTextCar;
50 | private final Paint mPaintTextCarBackground;
51 | private final Paint mPaintBorder;
52 | private final Paint mPaintTextDurationTime;
53 | private final Paint mPaintTextDurationTimeBackground;
54 | private final Paint mPaintDetectROI;
55 |
56 | private int mRatioWidth = 0;
57 | private int mRatioHeight = 0;
58 |
59 | private int mOrientation = 0;
60 |
61 | private long mDurationTimeMillis;
62 |
63 | private Size mImageSize;
64 | private List mPlates = null;
65 | private RectF mDetectROI;
66 |
67 | /**
68 | *
69 | * @param context
70 | * @param attrs
71 | */
72 | public AlprPlateView(final Context context, final AttributeSet attrs) {
73 | super(context, attrs);
74 |
75 | // final Typeface fontALPR = Typeface.createFromAsset(context.getAssets(), "GlNummernschildEng-XgWd.ttf");
76 |
77 | mPaintTextNumber = new Paint();
78 | mPaintTextNumber.setTextSize(TypedValue.applyDimension(
79 | TypedValue.COMPLEX_UNIT_DIP, TEXT_NUMBER_SIZE_DIP, getResources().getDisplayMetrics()));
80 | mPaintTextNumber.setColor(Color.BLACK);
81 | mPaintTextNumber.setStyle(Paint.Style.FILL_AND_STROKE);
82 | // mPaintTextNumber.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
83 |
84 | mPaintTextNumberBackground = new Paint();
85 | mPaintTextNumberBackground.setColor(Color.YELLOW);
86 | mPaintTextNumberBackground.setStrokeWidth(STROKE_WIDTH);
87 | mPaintTextNumberBackground.setStyle(Paint.Style.FILL_AND_STROKE);
88 |
89 | mPaintTextLPCI = new Paint();
90 | mPaintTextLPCI.setTextSize(TypedValue.applyDimension(
91 | TypedValue.COMPLEX_UNIT_DIP, TEXT_LPCI_SIZE_DIP, getResources().getDisplayMetrics()));
92 | mPaintTextLPCI.setColor(Color.WHITE);
93 | mPaintTextLPCI.setStyle(Paint.Style.FILL_AND_STROKE);
94 | // mPaintTextLPCI.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
95 |
96 | mPaintTextLPCIBackground = new Paint();
97 | mPaintTextLPCIBackground.setColor(Color.BLUE);
98 | mPaintTextLPCIBackground.setStrokeWidth(STROKE_WIDTH);
99 | mPaintTextLPCIBackground.setStyle(Paint.Style.FILL_AND_STROKE);
100 |
101 | mPaintTextCar = new Paint();
102 | mPaintTextCar.setTextSize(TypedValue.applyDimension(
103 | TypedValue.COMPLEX_UNIT_DIP, TEXT_CAR_SIZE_DIP, getResources().getDisplayMetrics()));
104 | mPaintTextCar.setColor(Color.BLACK);
105 | mPaintTextCar.setStyle(Paint.Style.FILL_AND_STROKE);
106 | // mPaintTextCar.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
107 |
108 | mPaintTextCarBackground = new Paint();
109 | mPaintTextCarBackground.setColor(Color.RED);
110 | mPaintTextCarBackground.setStrokeWidth(STROKE_WIDTH);
111 | mPaintTextCarBackground.setStyle(Paint.Style.FILL_AND_STROKE);
112 |
113 | mPaintBorder = new Paint();
114 | mPaintBorder.setStrokeWidth(STROKE_WIDTH);
115 | mPaintBorder.setPathEffect(null);
116 | mPaintBorder.setColor(Color.YELLOW);
117 | mPaintBorder.setStyle(Paint.Style.STROKE);
118 |
119 | mPaintTextDurationTime = new Paint();
120 | mPaintTextDurationTime.setTextSize(TypedValue.applyDimension(
121 | TypedValue.COMPLEX_UNIT_DIP, TEXT_INFERENCE_TIME_SIZE_DIP, getResources().getDisplayMetrics()));
122 | mPaintTextDurationTime.setColor(Color.WHITE);
123 | mPaintTextDurationTime.setStyle(Paint.Style.FILL_AND_STROKE);
124 | // mPaintTextDurationTime.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
125 |
126 | mPaintTextDurationTimeBackground = new Paint();
127 | mPaintTextDurationTimeBackground.setColor(Color.BLACK);
128 | mPaintTextDurationTimeBackground.setStrokeWidth(STROKE_WIDTH);
129 | mPaintTextDurationTimeBackground.setStyle(Paint.Style.FILL_AND_STROKE);
130 |
131 | mPaintDetectROI = new Paint();
132 | mPaintDetectROI.setColor(Color.RED);
133 | mPaintDetectROI.setStrokeWidth(STROKE_WIDTH);
134 | mPaintDetectROI.setStyle(Paint.Style.STROKE);
135 | mPaintDetectROI.setPathEffect(new DashPathEffect(new float[] {10,20}, 0));
136 | }
137 |
138 | public void setDetectROI(final RectF roi) { mDetectROI = roi; }
139 |
140 | /**
141 | *
142 | * @param width
143 | * @param height
144 | */
145 | public void setAspectRatio(int width, int height) {
146 | if (width < 0 || height < 0) {
147 | throw new IllegalArgumentException("Size cannot be negative.");
148 | }
149 | mRatioWidth = width;
150 | mRatioHeight = height;
151 | requestLayout();
152 | }
153 |
154 | @Override
155 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
156 | Log.i(TAG, "onMeasure");
157 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
158 | int width = MeasureSpec.getSize(widthMeasureSpec);
159 | int height = MeasureSpec.getSize(heightMeasureSpec);
160 | if (0 == mRatioWidth || 0 == mRatioHeight) {
161 | setMeasuredDimension(width, height);
162 | } else {
163 | if (width < height * mRatioWidth / mRatioHeight) {
164 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
165 | } else {
166 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
167 | }
168 | }
169 | }
170 |
171 | /**
172 | *
173 | * @param result
174 | * @param imageSize
175 | */
176 | public synchronized void setResult(@NonNull final AlprResult result, @NonNull final Size imageSize, @NonNull final long durationTime, @NonNull final int orientation) {
177 | mPlates = AlprUtils.extractPlates(result);
178 | mImageSize = imageSize;
179 | mDurationTimeMillis = durationTime;
180 | mOrientation = orientation;
181 | postInvalidate();
182 | }
183 |
184 | @Override
185 | public synchronized void draw(final Canvas canvas) {
186 | super.draw(canvas);
187 |
188 | if (mImageSize == null) {
189 | Log.i(TAG, "Not initialized yet");
190 | return;
191 | }
192 |
193 | final String mInferenceTimeMillisString = "Point your camera at a License Plate ";
194 | Rect boundsTextmInferenceTimeMillis = new Rect();
195 | mPaintTextDurationTime.getTextBounds(mInferenceTimeMillisString, 0, mInferenceTimeMillisString.length(), boundsTextmInferenceTimeMillis);
196 |
197 | int left = (canvas.getWidth() - boundsTextmInferenceTimeMillis.width()) / 2;
198 | int top = 20;
199 | canvas.drawRect(left, top, left + boundsTextmInferenceTimeMillis.width() + 5, top + boundsTextmInferenceTimeMillis.height() + 20, mPaintTextDurationTimeBackground);
200 | canvas.drawText(mInferenceTimeMillisString, left, 20 + boundsTextmInferenceTimeMillis.height(), mPaintTextDurationTime);
201 |
202 | // Transformation info
203 | final AlprUtils.AlprTransformationInfo tInfo = new AlprUtils.AlprTransformationInfo(mImageSize.getWidth(), mImageSize.getHeight(), getWidth(), getHeight());
204 |
205 | // ROI
206 | if (mDetectROI != null && !mDetectROI.isEmpty()) {
207 | canvas.drawRect(
208 | new RectF(
209 | tInfo.transformX(mDetectROI.left),
210 | tInfo.transformY(mDetectROI.top),
211 | tInfo.transformX(mDetectROI.right),
212 | tInfo.transformY(mDetectROI.bottom)
213 | ),
214 | mPaintDetectROI
215 | );
216 | }
217 |
218 | // Plates
219 | if (mPlates != null && !mPlates.isEmpty()) {
220 | for (final AlprUtils.Plate plate : mPlates) {
221 | // Transform corners
222 | final float[] plateWarpedBox = plate.getWarpedBox();
223 | final PointF plateCornerA = new PointF(tInfo.transformX(plateWarpedBox[0]), tInfo.transformY(plateWarpedBox[1]));
224 | final PointF plateCornerB = new PointF(tInfo.transformX(plateWarpedBox[2]), tInfo.transformY(plateWarpedBox[3]));
225 | final PointF plateCornerC = new PointF(tInfo.transformX(plateWarpedBox[4]), tInfo.transformY(plateWarpedBox[5]));
226 | final PointF plateCornerD = new PointF(tInfo.transformX(plateWarpedBox[6]), tInfo.transformY(plateWarpedBox[7]));
227 | // Draw border
228 | final Path platePathBorder = new Path();
229 | platePathBorder.moveTo(plateCornerA.x, plateCornerA.y);
230 | platePathBorder.lineTo(plateCornerB.x, plateCornerB.y);
231 | platePathBorder.lineTo(plateCornerC.x, plateCornerC.y);
232 | platePathBorder.lineTo(plateCornerD.x, plateCornerD.y);
233 | platePathBorder.lineTo(plateCornerA.x, plateCornerA.y);
234 | platePathBorder.close();
235 | mPaintBorder.setColor(mPaintTextNumberBackground.getColor());
236 | canvas.drawPath(platePathBorder, mPaintBorder);
237 |
238 | // Draw text number
239 | final String number = plate.getNumber();
240 | if (number != null && !number.isEmpty()) {
241 | Rect boundsTextNumber = new Rect();
242 | mPaintTextNumber.getTextBounds(number, 0, number.length(), boundsTextNumber);
243 | final RectF rectTextNumber = new RectF(
244 | plateCornerA.x,
245 | plateCornerA.y - (boundsTextNumber.height() + 10) * 2,
246 | plateCornerA.x + boundsTextNumber.width(),
247 | plateCornerA.y - (boundsTextNumber.height() + 10)
248 | );
249 | final Path pathTextNumber = new Path();
250 | pathTextNumber.moveTo(plateCornerA.x, plateCornerA.y - rectTextNumber.height() - 10);
251 | pathTextNumber.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + rectTextNumber.width())), plateCornerB.y - rectTextNumber.height() - 10);
252 | pathTextNumber.addRect(rectTextNumber, Path.Direction.CCW);
253 | pathTextNumber.close();
254 | canvas.drawPath(pathTextNumber, mPaintTextNumberBackground);
255 | canvas.drawTextOnPath(number, pathTextNumber, 0, 0, mPaintTextNumber);
256 | }
257 |
258 | // Draw Car
259 | if (plate.getCar() != null) {
260 | final AlprUtils.Car car = plate.getCar();
261 | if (car.getConfidence() >= 80.f) {
262 | String color = null;
263 | if (car.getColors() != null) {
264 | final AlprUtils.Car.Attribute colorObj0 = car.getColors().get(0); // sorted, most higher confidence first
265 | if (colorObj0.getConfidence() >= VCR_MIN_CONFIDENCE) {
266 | color = colorObj0.getName();
267 | }
268 | else if (car.getColors().size() >= 2) {
269 | final AlprUtils.Car.Attribute colorObj1 = car.getColors().get(1);
270 | final String colorMix = colorObj0.getName() + "/" + colorObj1.getName();
271 | float confidence = colorObj0.getConfidence();
272 | if ("white/silver,silver/white,gray/silver,silver/gray".indexOf(colorMix) != -1) {
273 | confidence += colorObj1.getConfidence();
274 | }
275 | if (confidence >= VCR_MIN_CONFIDENCE) {
276 | color = (colorMix.indexOf("white") == -1) ? "DarkSilver" : "LightSilver";
277 | confidence = Math.max(colorObj0.getConfidence(), colorObj1.getConfidence());
278 | }
279 | }
280 | }
281 |
282 | String make = null, model = null;
283 | if (car.getMakesModelsYears() != null) {
284 | final List makesModelsYears = car.getMakesModelsYears();
285 | final AlprUtils.Car.MakeModelYear makeModelYear = makesModelsYears.get(0); // sorted, most higher confidence first
286 | if (makeModelYear.getConfidence() >= VMMR_MIN_CONFIDENCE) {
287 | make = makeModelYear.getMake();
288 | model = makeModelYear.getModel();
289 | }
290 | else {
291 | Map makes = new HashMap<>();
292 | Map occurrences = new HashMap<>();
293 | // Fuse makes
294 | for (final AlprUtils.Car.MakeModelYear mmy : makesModelsYears) {
295 | makes.put(mmy.getMake(), AlprUtils.getOrDefault(makes, mmy.getMake(), 0.f) + mmy.getConfidence()); // Map.getOrDefault requires API level 24
296 | occurrences.put(mmy.getMake(), AlprUtils.getOrDefault(occurrences, mmy.getMake(), 0) + 1); // Map.getOrDefault requires API level 24
297 | }
298 | // Find make with highest confidence. Stream requires Java8
299 | Iterator > itMake = makes.entrySet().iterator();
300 | Map.Entry bestMake = itMake.next();
301 | while (itMake.hasNext()) {
302 | Map.Entry makeE = itMake.next();
303 | if (makeE.getValue() > bestMake.getValue()) {
304 | bestMake = makeE;
305 | }
306 | }
307 | // Model fusion
308 | if (bestMake.getValue() >= VMMR_MIN_CONFIDENCE || (occurrences.get(bestMake.getKey()) >= VMMR_FUSE_DEFUSE_MIN_OCCURRENCES && bestMake.getValue() >= VMMR_FUSE_DEFUSE_MIN_CONFIDENCE)) {
309 | make = bestMake.getKey();
310 |
311 | // Fuse models
312 | Map models = new HashMap<>();
313 | for (final AlprUtils.Car.MakeModelYear mmy : makesModelsYears) {
314 | if (make.equals(mmy.getMake())) {
315 | models.put(mmy.getModel(), AlprUtils.getOrDefault(models, mmy.getModel(), 0.f) + mmy.getConfidence()); // Map.getOrDefault requires API level 24
316 | }
317 | }
318 | // Find model with highest confidence. Stream requires Java8
319 | Iterator > itModel = models.entrySet().iterator();
320 | Map.Entry bestModel = itModel.next();
321 | while (itModel.hasNext()) {
322 | Map.Entry modelE = itModel.next();
323 | if (modelE.getValue() > bestModel.getValue()) {
324 | bestModel = modelE;
325 | }
326 | }
327 | model = bestModel.getKey();
328 | }
329 | }
330 | }
331 |
332 | String bodyStyle = null;
333 | if (car.getBodyStyles() != null) {
334 | final AlprUtils.Car.Attribute vbsr = car.getBodyStyles().get(0); // sorted, most higher confidence first
335 | if (vbsr.getConfidence() >= VBSR_MIN_CONFIDENCE) {
336 | bodyStyle = vbsr.getName();
337 | }
338 | }
339 |
340 | // Transform corners
341 | final float[] carWarpedBox = car.getWarpedBox();
342 | final PointF carCornerA = new PointF(tInfo.transformX(carWarpedBox[0]), tInfo.transformY(carWarpedBox[1]));
343 | final PointF carCornerB = new PointF(tInfo.transformX(carWarpedBox[2]), tInfo.transformY(carWarpedBox[3]));
344 | final PointF carCornerC = new PointF(tInfo.transformX(carWarpedBox[4]), tInfo.transformY(carWarpedBox[5]));
345 | final PointF carCornerD = new PointF(tInfo.transformX(carWarpedBox[6]), tInfo.transformY(carWarpedBox[7]));
346 | // Draw border
347 | final Path carPathBorder = new Path();
348 | carPathBorder.moveTo(carCornerA.x, carCornerA.y);
349 | carPathBorder.lineTo(carCornerB.x, carCornerB.y);
350 | carPathBorder.lineTo(carCornerC.x, carCornerC.y);
351 | carPathBorder.lineTo(carCornerD.x, carCornerD.y);
352 | carPathBorder.lineTo(carCornerA.x, carCornerA.y);
353 | carPathBorder.close();
354 | mPaintBorder.setColor(mPaintTextCarBackground.getColor());
355 | canvas.drawPath(carPathBorder, mPaintBorder);
356 |
357 | // Draw car information
358 | final String carText = String.format(
359 | "%s%s%s%s",
360 | make != null ? make : "Car",
361 | model != null ? ", " + model : "",
362 | color != null ? ", " + color : "",
363 | bodyStyle != null ? ", " + bodyStyle : ""
364 | );
365 | Rect boundsTextCar = new Rect();
366 | mPaintTextNumber.getTextBounds(carText, 0, carText.length(), boundsTextCar);
367 | final RectF rectTextNumber = new RectF(
368 | plateCornerA.x,
369 | plateCornerA.y - (boundsTextCar.height() + 5) * 3,
370 | plateCornerA.x + boundsTextCar.width(),
371 | plateCornerA.y - (boundsTextCar.height() + 5) * 2
372 | );
373 | final Path pathTextCar = new Path();
374 | pathTextCar.moveTo(plateCornerA.x, plateCornerA.y - (rectTextNumber.height() + 5) * 2);
375 | pathTextCar.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + rectTextNumber.width())), plateCornerB.y - (rectTextNumber.height() + 5) * 2);
376 | pathTextCar.addRect(rectTextNumber, Path.Direction.CCW);
377 | pathTextCar.close();
378 | canvas.drawPath(pathTextCar, mPaintTextNumberBackground);
379 | canvas.drawTextOnPath(carText, pathTextCar, 0, 0, mPaintTextNumber);
380 | }
381 | }
382 |
383 | if (plate.getCountries() != null) {
384 | final AlprUtils.Country country = plate.getCountries().get(0); // sorted, most higher confidence first
385 | if (country.getConfidence() >= LPCI_MIN_CONFIDENCE) {
386 | final String countryString = country.getCode();
387 | Rect boundsConfidenceLPCI = new Rect();
388 | mPaintTextNumber.getTextBounds(countryString, 0, countryString.length(), boundsConfidenceLPCI);
389 | final RectF rectTextLPCI = new RectF(
390 | plateCornerA.x,
391 | plateCornerA.y - (boundsConfidenceLPCI.height() + 10),
392 | plateCornerA.x + (boundsConfidenceLPCI.width() + 10),
393 | plateCornerA.y
394 | );
395 | final Path pathTextLPCI = new Path();
396 | pathTextLPCI.moveTo(plateCornerA.x, plateCornerA.y);
397 | pathTextLPCI.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + boundsConfidenceLPCI.width())), plateCornerB.y);
398 | pathTextLPCI.addRect(rectTextLPCI, Path.Direction.CCW);
399 | pathTextLPCI.close();
400 | canvas.drawPath(pathTextLPCI, mPaintTextNumberBackground);
401 | canvas.drawTextOnPath(countryString, pathTextLPCI, 0, 0, mPaintTextNumber);
402 | }
403 | }
404 | }
405 | }
406 | }
407 | }
--------------------------------------------------------------------------------
/demo/src/main/java/org/buyun/alpr/common/AlprUtils.java:
--------------------------------------------------------------------------------
1 | package org.buyun.alpr.common;
2 |
3 | import android.content.res.AssetFileDescriptor;
4 | import android.content.res.AssetManager;
5 | import android.graphics.PointF;
6 | import android.util.Log;
7 |
8 | import androidx.annotation.NonNull;
9 |
10 | import org.buyun.alpr.sdk.AlprResult;
11 | import org.json.JSONArray;
12 | import org.json.JSONException;
13 | import org.json.JSONObject;
14 |
15 | import java.io.FileInputStream;
16 | import java.io.IOException;
17 | import java.nio.channels.FileChannel;
18 | import java.util.LinkedList;
19 | import java.util.List;
20 | import java.util.Map;
21 |
22 | /**
23 | * Utility class
24 | */
25 | public class AlprUtils {
26 | static final String TAG = AlprUtils.class.getCanonicalName();
27 | /**
28 | *
29 | */
30 | public static class AlprTransformationInfo {
31 | final int mXOffset;
32 | final int mYOffset;
33 | final float mRatio;
34 | final int mWidth;
35 | final int mHeight;
36 | public AlprTransformationInfo(final int imageWidth, final int imageHeight, final int canvasWidth, final int canvasHeight) {
37 | final float xRatio = (float)canvasWidth / (float)imageWidth;
38 | final float yRatio = (float)canvasHeight / (float)imageHeight;
39 | mRatio = Math.min( xRatio, yRatio );
40 | mWidth = (int)(imageWidth * mRatio);
41 | mHeight = (int)(imageHeight * mRatio);
42 | mXOffset = (canvasWidth - mWidth) >> 1;
43 | mYOffset = (canvasHeight - mHeight) >> 1;
44 | }
45 | public float transformX(final float x) { return x * mRatio + mXOffset; }
46 | public float transformY(final float y) { return y * mRatio + mYOffset; }
47 | public PointF transform(final PointF p) { return new PointF(transformX(p.x), transformY(p.y)); }
48 | public int getXOffset() { return mXOffset; }
49 | public int getYOffset() { return mYOffset; }
50 | public float getRatio() { return mRatio; }
51 | public int getWidth() { return mWidth; }
52 | public int getHeight() { return mHeight; }
53 | }
54 |
55 | static class Car {
56 | static class Attribute {
57 | private int mKlass;
58 | private String mName;
59 | private float mConfidence;
60 |
61 | public int getKlass() { return mKlass; }
62 | public String getName() { return mName; }
63 | public float getConfidence() { return mConfidence; }
64 | }
65 | static class MakeModelYear {
66 | private int mKlass;
67 | private String mMake;
68 | private String mModel;
69 | private String mYear; // Not integer on purpose, could be interval or...
70 | private float mConfidence;
71 |
72 | public int getKlass() { return mKlass; }
73 | public String getMake() { return mMake; }
74 | public String getModel() { return mModel; }
75 | public String getYear() { return mYear; }
76 | public float getConfidence() { return mConfidence; }
77 | }
78 |
79 | private float mConfidence;
80 | private float mWarpedBox[];
81 | private List mColors;
82 | private List mBodyStyles;
83 | private List mMakesModelsYears;
84 |
85 | public float[] getWarpedBox() { return mWarpedBox; }
86 | public float getConfidence() { return mConfidence; }
87 | public List getColors() { return mColors; }
88 | public List getBodyStyles() { return mBodyStyles; }
89 | public List getMakesModelsYears() { return mMakesModelsYears; }
90 | }
91 |
92 | /**
93 | *
94 | */
95 | static class Country {
96 | private int mKlass;
97 | private String mCode;
98 | private String mName;
99 | private String mState;
100 | private String mOther;
101 | private float mConfidence;
102 |
103 | public int getKlass() { return mKlass; }
104 | public String getCode() { return mCode; }
105 | public String getName() { return mName; }
106 | public String getState() { return mState; }
107 | public String getOther() { return mOther; }
108 | public float getConfidence() { return mConfidence; }
109 | }
110 |
111 | /**
112 | *
113 | */
114 | static class Plate {
115 | private String mNumber;
116 | private float mDetectionConfidence;
117 | private float mRecognitionConfidence;
118 | private float mWarpedBox[];
119 | private List mCountries;
120 | private Car mCar;
121 |
122 | public String getNumber() { return mNumber; }
123 | public float getDetectionConfidence() { return mDetectionConfidence; }
124 | public float getRecognitionConfidence() { return mRecognitionConfidence; }
125 | public float[] getWarpedBox() { return mWarpedBox; }
126 |
127 | public List getCountries() { return mCountries; }
128 | public Car getCar() { return mCar; }
129 | }
130 |
131 | static public final long extractFrameId(final AlprResult result) {
132 | final String jsonString = result.json();
133 | if (jsonString != null) {
134 | try {
135 | final JSONObject jObject = new JSONObject(jsonString);
136 | return jObject.getLong("frame_id");
137 | }
138 | catch (JSONException e) { }
139 | }
140 | return 0;
141 | }
142 |
143 | static public final List extractPlates(final AlprResult result) {
144 | final List plates = new LinkedList<>();
145 | if (!result.isOK() || (result.numPlates() == 0 && result.numCars() == 0)) {
146 | return plates;
147 | }
148 | final String jsonString = result.json();
149 | //final String jsonString = "{\"frame_id\":178,\"lantency\":0,\"plates\":[{\"car\":{\"color\":[{\"confidence\":59.76562,\"klass\":11,\"name\":\"white\"},{\"confidence\":27.73438,\"klass\":0,\"name\":\"black\"},{\"confidence\":11.32812,\"klass\":9,\"name\":\"silver\"},{\"confidence\":0.390625,\"klass\":4,\"name\":\"gray\"},{\"confidence\":0.390625,\"klass\":5,\"name\":\"green\"}],\"confidence\":89.45312,\"makeModelYear\":[{\"confidence\":5.46875,\"klass\":8072,\"make\":\"nissan\",\"model\":\"nv\",\"year\":2012},{\"confidence\":3.90625,\"klass\":4885,\"make\":\"gmc\",\"model\":\"yukon 1500\",\"year\":2007},{\"confidence\":1.953125,\"klass\":3950,\"make\":\"ford\",\"model\":\"f150\",\"year\":2001},{\"confidence\":1.953125,\"klass\":4401,\"make\":\"ford\",\"model\":\"ranger\",\"year\":2008},{\"confidence\":1.953125,\"klass\":3954,\"make\":\"ford\",\"model\":\"f150\",\"year\":2005}],\"warpedBox\":[37.26704,655.171,253.8487,655.171,253.8487,897.6935,37.26704,897.6935]},\"confidences\":[86.99596,99.60938],\"country\":[{\"code\":\"RUS\",\"confidence\":99.60938,\"klass\":65,\"name\":\"Russian Federation\",\"other\":\"Private vehicle\",\"state\":\"Republic of Karelia\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":88,\"name\":\"United States of America\",\"state\":\"Iowa\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":80,\"name\":\"United States of America\",\"state\":\"Connecticut\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":81,\"name\":\"United States of America\",\"state\":\"Delaware\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":82,\"name\":\"United States of America\",\"state\":\"Florida\"}],\"text\":\"K643ET10\",\"warpedBox\":[61.73531,819.796,145.57,819.796,145.57,881.916,61.73531,881.916]}]}";
150 | if (jsonString == null) { // No plate
151 | return plates;
152 | }
153 |
154 | try {
155 | final JSONObject jObject = new JSONObject(jsonString);
156 | if (jObject.has("plates")) {
157 | final JSONArray jPlates = jObject.getJSONArray("plates");
158 | for (int i = 0; i < jPlates.length(); ++i) {
159 | final JSONObject jPlate = jPlates.getJSONObject(i);
160 |
161 | // The plate itself (backward-compatible with 2.0.0)
162 | final Plate plate = new Plate();
163 | plate.mWarpedBox = new float[8];
164 | if (jPlate.has("text")) { // Starting 3.2 it's possible to have cars without plates when enabled
165 | final JSONArray jConfidences = jPlate.getJSONArray("confidences");
166 | final JSONArray jWarpedBox = jPlate.getJSONArray("warpedBox");
167 | plate.mNumber = jPlate.getString("text");
168 | for (int j = 0; j < 8; ++j) {
169 | plate.mWarpedBox[j] = (float) jWarpedBox.getDouble(j);
170 | }
171 | plate.mRecognitionConfidence = (float) jConfidences.getDouble(0);
172 | plate.mDetectionConfidence = (float) jConfidences.getDouble(1);
173 | }
174 | else {
175 | plate.mNumber = "";
176 | plate.mRecognitionConfidence = 0.f;
177 | plate.mDetectionConfidence = 0.f;
178 | }
179 |
180 | if (jPlate.has("country")) {
181 | plate.mCountries = new LinkedList<>();
182 | final JSONArray jCountries = jPlate.getJSONArray("country");
183 | for (int k = 0; k < jCountries.length(); ++k) {
184 | final JSONObject jCountry = jCountries.getJSONObject(k);
185 | final Country country = new Country();
186 | country.mKlass = jCountry.getInt("klass");
187 | country.mConfidence = (float) jCountry.getDouble("confidence");
188 | country.mCode = jCountry.getString("code");
189 | country.mName = jCountry.getString("name");
190 | if (jCountry.has("state")) { // optional
191 | country.mState = jCountry.getString("state");
192 | }
193 | if (jCountry.has("other")) { // optional
194 | country.mOther = jCountry.getString("other");
195 | }
196 |
197 | plate.mCountries.add(country);
198 | }
199 | }
200 |
201 | // Car (Added in 3.0.0)
202 | if (jPlate.has("car")) {
203 | final JSONObject jCar = jPlate.getJSONObject("car");
204 | final JSONArray jCarWarpedBox = jCar.getJSONArray("warpedBox");
205 | plate.mCar = new Car();
206 | plate.mCar.mConfidence = (float) jCar.getDouble("confidence");
207 | plate.mCar.mWarpedBox = new float[8];
208 | for (int j = 0; j < 8; ++j) {
209 | plate.mCar.mWarpedBox[j] = (float) jCarWarpedBox.getDouble(j);
210 | }
211 |
212 | if (jCar.has("color")) {
213 | plate.mCar.mColors = new LinkedList<>();
214 | final JSONArray jColors = jCar.getJSONArray("color");
215 | for (int k = 0; k < jColors.length(); ++k) {
216 | final JSONObject jColor = jColors.getJSONObject(k);
217 | final Car.Attribute color = new Car.Attribute();
218 | color.mKlass = jColor.getInt("klass");
219 | color.mConfidence = (float) jColor.getDouble("confidence");
220 | color.mName = jColor.getString("name"); // Name in English
221 |
222 | plate.mCar.mColors.add(color);
223 | }
224 | }
225 |
226 | if (jCar.has("makeModelYear")) {
227 | plate.mCar.mMakesModelsYears = new LinkedList<>();
228 | final JSONArray jMMYs = jCar.getJSONArray("makeModelYear");
229 | for (int k = 0; k < jMMYs.length(); ++k) {
230 | final JSONObject jMMY = jMMYs.getJSONObject(k);
231 | final Car.MakeModelYear mmy = new Car.MakeModelYear();
232 | mmy.mKlass = jMMY.getInt("klass");
233 | mmy.mConfidence = (float) jMMY.getDouble("confidence");
234 | mmy.mMake = jMMY.getString("make");
235 | mmy.mModel = jMMY.getString("model");
236 | mmy.mYear = jMMY.get("year").toString(); // Maybe Integer or String or whatever
237 |
238 | plate.mCar.mMakesModelsYears.add(mmy);
239 | }
240 | }
241 |
242 | if (jCar.has("bodyStyle")) {
243 | plate.mCar.mBodyStyles = new LinkedList<>();
244 | final JSONArray jBodyStyles = jCar.getJSONArray("bodyStyle");
245 | for (int k = 0; k < jBodyStyles.length(); ++k) {
246 | final JSONObject jBodyStyle = jBodyStyles.getJSONObject(k);
247 | final Car.Attribute bodyStyle = new Car.Attribute();
248 | bodyStyle.mKlass = jBodyStyle.getInt("klass");
249 | bodyStyle.mConfidence = (float) jBodyStyle.getDouble("confidence");
250 | bodyStyle.mName = jBodyStyle.getString("name"); // Name in English
251 |
252 | plate.mCar.mBodyStyles.add(bodyStyle);
253 | }
254 | }
255 | }
256 |
257 | plates.add(plate);
258 | }
259 | }
260 | }
261 | catch (JSONException e) {
262 | e.printStackTrace();
263 | Log.e(TAG, e.toString());
264 | }
265 | return plates;
266 | }
267 |
268 | public static V getOrDefault(@NonNull Map map, K key, V defaultValue) {
269 | V v;
270 | return (((v = map.get(key)) != null) || map.containsKey(key))
271 | ? v
272 | : defaultValue;
273 | }
274 |
275 | /**
276 | * Checks if the returned result is success. An assertion will be raised if it's not the case.
277 | * In production you should catch the exception and perform the appropriate action.
278 | * @param result The result to check
279 | * @return The same result
280 | */
281 | static public final AlprResult assertIsOk(final AlprResult result) {
282 | if (!result.isOK()) {
283 | throw new AssertionError("Operation failed: " + result.phrase());
284 | }
285 | return result;
286 | }
287 |
288 | /**
289 | * Converts the result to String.
290 | * @param result
291 | * @return
292 | */
293 | static public final String resultToString(final AlprResult result) {
294 | return "code: " + result.code() + ", phrase: " + result.phrase() + ", numPlates: " + result.numPlates() + ", json: " + result.json();
295 | }
296 |
297 | /**
298 | *
299 | * @param fileName
300 | * @return Must close the returned object
301 | */
302 | static public FileChannel readFileFromAssets(final AssetManager assets, final String fileName) {
303 | FileInputStream inputStream = null;
304 | try {
305 | AssetFileDescriptor fileDescriptor = assets.openFd(fileName);
306 | inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
307 | return inputStream.getChannel();
308 | // To return DirectByteBuffer: fileChannel.map(FileChannel.MapMode.READ_ONLY, fileDescriptor.getStartOffset(), fileDescriptor.getDeclaredLength());
309 | } catch (IOException e) {
310 | e.printStackTrace();
311 | Log.e(TAG, e.toString());
312 | return null;
313 | }
314 | }
315 | }
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-hdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-hdpi/ic_action_info.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-hdpi/tile.9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-hdpi/tile.9.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-mdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-mdpi/ic_action_info.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-xhdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-xhdpi/ic_action_info.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-xxhdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-xxhdpi/ic_action_info.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/demo/src/main/res/drawable-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/demo/src/main/res/drawable-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/demo/src/main/res/layout-land/fragment_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
26 |
27 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/demo/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
--------------------------------------------------------------------------------
/demo/src/main/res/layout/fragment_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
26 |
27 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/demo/src/main/res/values-sw600dp/template-dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | @dimen/margin_huge
4 | @dimen/margin_medium
5 |
6 |
7 |
--------------------------------------------------------------------------------
/demo/src/main/res/values-sw600dp/template-styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/demo/src/main/res/values-v11/template-styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/demo/src/main/res/values-v21/base-colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/demo/src/main/res/values-v21/base-template-styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/demo/src/main/res/values/base-strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Alpr Demo
5 |
6 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/demo/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #cc4285f4
4 |
5 |
--------------------------------------------------------------------------------
/demo/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Picture
4 | Info
5 | This sample needs camera permission.
6 | This device doesn\'t support Camera2 API.
7 |
8 |
--------------------------------------------------------------------------------
/demo/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
6 |
7 |
--------------------------------------------------------------------------------
/demo/src/main/res/values/template-dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4dp
4 | 8dp
5 | 16dp
6 | 32dp
7 | 64dp
8 |
9 | @dimen/margin_medium
10 | @dimen/margin_medium
11 |
12 |
13 |
--------------------------------------------------------------------------------
/demo/src/main/res/values/template-styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
18 |
19 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app's APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 |
21 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FaceOnLive/License-Plate-Recognition-SDK-Android/29a165725855896f9a1de823b9fcaea781886b4d/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 |
86 | # Determine the Java command to use to start the JVM.
87 | if [ -n "$JAVA_HOME" ] ; then
88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
89 | # IBM's JDK on AIX uses strange locations for the executables
90 | JAVACMD="$JAVA_HOME/jre/sh/java"
91 | else
92 | JAVACMD="$JAVA_HOME/bin/java"
93 | fi
94 | if [ ! -x "$JAVACMD" ] ; then
95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
96 |
97 | Please set the JAVA_HOME variable in your environment to match the
98 | location of your Java installation."
99 | fi
100 | else
101 | JAVACMD="java"
102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
103 |
104 | Please set the JAVA_HOME variable in your environment to match the
105 | location of your Java installation."
106 | fi
107 |
108 | # Increase the maximum file descriptors if we can.
109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
110 | MAX_FD_LIMIT=`ulimit -H -n`
111 | if [ $? -eq 0 ] ; then
112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
113 | MAX_FD="$MAX_FD_LIMIT"
114 | fi
115 | ulimit -n $MAX_FD
116 | if [ $? -ne 0 ] ; then
117 | warn "Could not set maximum file descriptor limit: $MAX_FD"
118 | fi
119 | else
120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
121 | fi
122 | fi
123 |
124 | # For Darwin, add options to specify how the application appears in the dock
125 | if $darwin; then
126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
127 | fi
128 |
129 | # For Cygwin or MSYS, switch paths to Windows format before running java
130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
133 |
134 | JAVACMD=`cygpath --unix "$JAVACMD"`
135 |
136 | # We build the pattern for arguments to be converted via cygpath
137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
138 | SEP=""
139 | for dir in $ROOTDIRSRAW ; do
140 | ROOTDIRS="$ROOTDIRS$SEP$dir"
141 | SEP="|"
142 | done
143 | OURCYGPATTERN="(^($ROOTDIRS))"
144 | # Add a user-defined pattern to the cygpath arguments
145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
147 | fi
148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
149 | i=0
150 | for arg in "$@" ; do
151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
153 |
154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
156 | else
157 | eval `echo args$i`="\"$arg\""
158 | fi
159 | i=`expr $i + 1`
160 | done
161 | case $i in
162 | 0) set -- ;;
163 | 1) set -- "$args0" ;;
164 | 2) set -- "$args0" "$args1" ;;
165 | 3) set -- "$args0" "$args1" "$args2" ;;
166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
172 | esac
173 | fi
174 |
175 | # Escape application args
176 | save () {
177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
178 | echo " "
179 | }
180 | APP_ARGS=`save "$@"`
181 |
182 | # Collect all arguments for the java command, following the shell quoting and substitution rules
183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
184 |
185 | exec "$JAVACMD" "$@"
186 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto init
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto init
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :init
68 | @rem Get command-line arguments, handling Windows variants
69 |
70 | if not "%OS%" == "Windows_NT" goto win9xME_args
71 |
72 | :win9xME_args
73 | @rem Slurp the command line arguments.
74 | set CMD_LINE_ARGS=
75 | set _SKIP=2
76 |
77 | :win9xME_args_slurp
78 | if "x%~1" == "x" goto execute
79 |
80 | set CMD_LINE_ARGS=%*
81 |
82 | :execute
83 | @rem Setup the command line
84 |
85 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
86 |
87 |
88 | @rem Execute Gradle
89 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
90 |
91 | :end
92 | @rem End local scope for the variables with windows NT shell
93 | if "%ERRORLEVEL%"=="0" goto mainEnd
94 |
95 | :fail
96 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
97 | rem the _cmd.exe /c_ return code!
98 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
99 | exit /b 1
100 |
101 | :mainEnd
102 | if "%OS%"=="Windows_NT" endlocal
103 |
104 | :omega
105 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':sdk', ':demo'
2 | rootProject.name='AlprDemo'
3 |
--------------------------------------------------------------------------------