20 | * 暂时,自动识别拍照、裁剪。 21 | *
22 | */ 23 | public class MainActivity extends AppCompatActivity { 24 | 25 | private PhotoView mPhotoView; 26 | 27 | private Bitmap mTakePictureBitmap; 28 | 29 | private CameraApiFragment mCameraApiFragment; 30 | 31 | /** 32 | * 对图片进行、对比度的加强 33 | */ 34 | private PhotoEnhanceUtil mPhotoEnhance; 35 | 36 | // Used to load the 'native-lib' library on application startup. 37 | static { 38 | System.loadLibrary("native-lib"); 39 | } 40 | 41 | @Override 42 | protected void onCreate(Bundle savedInstanceState) { 43 | super.onCreate(savedInstanceState); 44 | setContentView(R.layout.activity_main); 45 | 46 | // Example of a call to a native method 47 | TextView tv = (TextView) findViewById(R.id.sample_text); 48 | tv.setText(stringFromJNI()); 49 | } 50 | 51 | @Override 52 | protected void onPostCreate(@Nullable Bundle savedInstanceState) { 53 | super.onPostCreate(savedInstanceState); 54 | 55 | final ImageView showImg = findViewById(R.id.show_img); 56 | mPhotoView = findViewById(R.id.photo_view); 57 | 58 | mCameraApiFragment = CameraApiFragment.newInstance(); 59 | 60 | getSupportFragmentManager().beginTransaction().add(R.id.container, mCameraApiFragment, "api").commit(); 61 | 62 | mCameraApiFragment.setTakePictureCallback(new TakePictureCallback() { 63 | @Override 64 | public void call(Bitmap bitmap) { 65 | mTakePictureBitmap = bitmap; 66 | showImg.setImageBitmap(bitmap); 67 | } 68 | }); 69 | 70 | showImg.setOnClickListener(mOnClick); 71 | showImg.setOnLongClickListener(new View.OnLongClickListener() { 72 | @Override 73 | public boolean onLongClick(View v) { 74 | mPhotoView.setImageBitmap(mTakePictureBitmap); 75 | mPhotoView.setVisibility(View.VISIBLE); 76 | return false; 77 | } 78 | }); 79 | 80 | new Handler().post(new Runnable() { 81 | @Override 82 | public void run() { 83 | // cameraApiFragment.setTopViewMarginTop(100); 84 | } 85 | }); 86 | 87 | } 88 | 89 | @Override 90 | protected void onResume() { 91 | super.onResume(); 92 | 93 | mCameraApiFragment.startDetect(); 94 | } 95 | 96 | @Override 97 | protected void onStop() { 98 | super.onStop(); 99 | mCameraApiFragment.stopDetect(); 100 | } 101 | 102 | private View.OnClickListener mOnClick = new View.OnClickListener() { 103 | @Override 104 | public void onClick(View v) { 105 | 106 | if (v.getId() == R.id.show_img) { 107 | if(mTakePictureBitmap == null){ 108 | return; 109 | } 110 | mPhotoEnhance = new PhotoEnhanceUtil(mTakePictureBitmap); 111 | 112 | mPhotoEnhance.setContrast(200); 113 | Bitmap source = mPhotoEnhance.handleImage(mPhotoEnhance.Enhance_Contrast); 114 | mPhotoView.setImageBitmap(source); 115 | mPhotoView.setVisibility(View.VISIBLE); 116 | return; 117 | } 118 | } 119 | }; 120 | 121 | /** 122 | * A native method that is implemented by the 'native-lib' native library, 123 | * which is packaged with this application. 124 | */ 125 | public native String stringFromJNI(); 126 | 127 | @Override 128 | public void onBackPressed() { 129 | if (mPhotoView.getVisibility() == View.VISIBLE) { 130 | mPhotoView.setVisibility(View.GONE); 131 | } else { 132 | super.onBackPressed(); 133 | } 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /app/src/main/res/drawable-v24/ic_launcher_foreground.xml: -------------------------------------------------------------------------------- 1 |14 | * 相机工具类 15 | *
16 | *17 | * 获取相机ID {@link #getCameraId()} . 18 | * 开启、关闭闪光灯 {@link #startFlash(Camera, boolean)} [Flash为手电筒模式]. 19 | *
20 | */ 21 | 22 | public final class CameraUtil { 23 | 24 | private static final String TAG_LOG = "CameraUtil"; 25 | 26 | private CameraUtil() { 27 | } 28 | 29 | /** 30 | * 获取CameraID用于打开指定相机 31 | * 32 | * @return cameraId. 33 | */ 34 | public static int getCameraId() { 35 | 36 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 37 | 38 | int numberOfCameras = Camera.getNumberOfCameras(); 39 | 40 | for (int i = 0; i < numberOfCameras; i++) { 41 | Camera.getCameraInfo(i, cameraInfo); 42 | 43 | if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { 44 | return i; 45 | } 46 | } 47 | 48 | //No camera found 49 | return -1; 50 | } 51 | 52 | public static boolean startShutterSound(Camera camera, boolean isOpen) { 53 | if (camera == null) { 54 | return false; 55 | } 56 | camera.enableShutterSound(isOpen); 57 | return true; 58 | } 59 | 60 | /** 61 | * 闪光灯的开、关 62 | * 63 | * @param open 是否开启闪光灯 64 | * @return 是否成功执行 65 | */ 66 | public static boolean startFlash(Camera camera, boolean open) { 67 | if (camera == null) { 68 | return false; 69 | } 70 | Camera.Parameters parameters = camera.getParameters(); 71 | if (open) { 72 | parameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH); 73 | } else { 74 | parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); 75 | } 76 | camera.setParameters(parameters); 77 | return true; 78 | } 79 | 80 | public static Size chooseOptimalSize(final Size[] choices, final int width, final int height) { 81 | final int minSize = Math.max(Math.min(width, height), 320); 82 | final Size desiredSize = new Size(width, height); 83 | 84 | // Collect the supported resolutions that are at least as big as the preview Surface 85 | boolean exactSizeFound = false; 86 | final List8 | * 图片处理工具类 9 | *
10 | */ 11 | 12 | public final class ImageUtil { 13 | 14 | /** 15 | * Returns a transformation matrix from one reference frame into another. 16 | * Handles cropping (if maintaining aspect ratio is desired) and rotation. 17 | * 18 | * @param sourceWidth Width of source frame. 19 | * @param sourceHeight Height of source frame. 20 | * @param dstWidth Width of destination frame. 21 | * @param dstHeight Height of destination frame. 22 | * @param applyRotation Amount of rotation to apply from one frame to another. 23 | * Must be a multiple of 90. 24 | * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, 25 | * cropping the image if necessary. 26 | * @return The transformation fulfilling the desired requirements. 27 | */ 28 | public static Matrix getTransformationMatrix( 29 | int sourceWidth 30 | , int sourceHeight 31 | , int dstWidth 32 | , int dstHeight 33 | , int applyRotation 34 | , boolean maintainAspectRatio) { 35 | 36 | final Matrix matrix = new Matrix(); 37 | 38 | if (applyRotation != 0) { 39 | 40 | // Translate so center of image is at origin. 41 | //将图片移动到中心点 42 | matrix.postTranslate(-sourceWidth / 2.0f, -sourceHeight / 2.0f); 43 | 44 | // Rotate around origin. 45 | //将图片旋转 46 | matrix.postRotate(applyRotation); 47 | } 48 | 49 | //如果有的话,说明已经应用的旋转,然后确定每个轴需要多少缩放。 50 | //是否需要转置 51 | final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; 52 | 53 | //如果需要,那么将宽、高进行转置 54 | final int inWidth = transpose ? sourceHeight : sourceWidth; 55 | final int inHeight = transpose ? sourceWidth : sourceHeight; 56 | 57 | 58 | //Apply scaling if necessary. 59 | //判断原始图片,与需要的图片是否需要进行Scaling操作 60 | if (inWidth != dstWidth || inHeight != dstHeight) { 61 | 62 | final float scaleFactoryX = dstWidth / (float) inWidth; 63 | final float scaleFactoryY = dstHeight / (float) inHeight; 64 | 65 | 66 | if (maintainAspectRatio) { 67 | 68 | // Scale by minimum factor so that dst is filled completely while 69 | // maintaining the aspect ratio. Some image may fall off the edge. 70 | //按最小因子进行缩放,使dst完全填充,同时保持纵横比。有些图像可能会从边缘掉下来。 71 | // TODO: 2018/1/2 以下代码可能会导致图片边缘丢失 72 | final float scaleFactor = Math.max(scaleFactoryX, scaleFactoryY); 73 | matrix.postScale(scaleFactor, scaleFactor); 74 | } else { 75 | // Scale exactly to fill dst from src. 76 | //将原始图片精确地完整填充目标图片 77 | matrix.postScale(scaleFactoryX, scaleFactoryY); 78 | } 79 | 80 | } 81 | 82 | 83 | if (applyRotation != 0) { 84 | // Translate back from origin centered reference to destination frame. 85 | //将原点中心的引用转换为目标帧 86 | matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f); 87 | } 88 | 89 | return matrix; 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/ImageUtils.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.common.util; 17 | 18 | import android.graphics.Bitmap; 19 | import android.graphics.Matrix; 20 | import android.os.Environment; 21 | 22 | import com.rossia.life.scan.tensor.env.Logger; 23 | 24 | import java.io.File; 25 | import java.io.FileOutputStream; 26 | 27 | /** 28 | * Utility class for manipulating images. 29 | * @author pd_liu. 30 | **/ 31 | public class ImageUtils { 32 | @SuppressWarnings("unused") 33 | private static final Logger LOGGER = new Logger(); 34 | 35 | static { 36 | try { 37 | System.loadLibrary("tensorflow_demo"); 38 | } catch (UnsatisfiedLinkError e) { 39 | LOGGER.w("Native library not found, native RGB -> YUV conversion may be unavailable."); 40 | } 41 | } 42 | 43 | /** 44 | * Utility method to compute the allocated size in bytes of a YUV420SP image 45 | * of the given dimensions. 46 | */ 47 | public static int getYUVByteSize(final int width, final int height) { 48 | // The luminance plane requires 1 byte per pixel. 49 | final int ySize = width * height; 50 | 51 | // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. 52 | // Each 2x2 block takes 2 bytes to encode, one each for U and V. 53 | final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; 54 | 55 | return ySize + uvSize; 56 | } 57 | 58 | /** 59 | * Saves a Bitmap object to disk for analysis. 60 | * 61 | * @param bitmap The bitmap to save. 62 | */ 63 | public static void saveBitmap(final Bitmap bitmap) { 64 | saveBitmap(bitmap, "preview.png"); 65 | } 66 | 67 | /** 68 | * Saves a Bitmap object to disk for analysis. 69 | * 70 | * @param bitmap The bitmap to save. 71 | * @param filename The location to save the bitmap to. 72 | */ 73 | public static void saveBitmap(final Bitmap bitmap, final String filename) { 74 | final String root = 75 | Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow"; 76 | LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root); 77 | final File myDir = new File(root); 78 | 79 | if (!myDir.mkdirs()) { 80 | LOGGER.i("Make dir failed"); 81 | } 82 | 83 | final String fname = filename; 84 | final File file = new File(myDir, fname); 85 | if (file.exists()) { 86 | file.delete(); 87 | } 88 | try { 89 | final FileOutputStream out = new FileOutputStream(file); 90 | bitmap.compress(Bitmap.CompressFormat.PNG, 99, out); 91 | out.flush(); 92 | out.close(); 93 | } catch (final Exception e) { 94 | LOGGER.e(e, "Exception!"); 95 | } 96 | } 97 | 98 | // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges 99 | // are normalized to eight bits. 100 | static final int kMaxChannelValue = 262143; 101 | 102 | // Always prefer the native implementation if available. 103 | private static boolean useNativeConversion = true; 104 | 105 | public static void convertYUV420SPToARGB8888( 106 | byte[] input, 107 | int width, 108 | int height, 109 | int[] output) { 110 | if (useNativeConversion) { 111 | try { 112 | ImageUtils.convertYUV420SPToARGB8888(input, output, width, height, false); 113 | return; 114 | } catch (UnsatisfiedLinkError e) { 115 | LOGGER.w( 116 | "Native YUV420SP -> RGB implementation not found, falling back to Java implementation"); 117 | useNativeConversion = false; 118 | } 119 | } 120 | 121 | // Java implementation of YUV420SP to ARGB8888 converting 122 | final int frameSize = width * height; 123 | for (int j = 0, yp = 0; j < height; j++) { 124 | int uvp = frameSize + (j >> 1) * width; 125 | int u = 0; 126 | int v = 0; 127 | 128 | for (int i = 0; i < width; i++, yp++) { 129 | int y = 0xff & input[yp]; 130 | if ((i & 1) == 0) { 131 | v = 0xff & input[uvp++]; 132 | u = 0xff & input[uvp++]; 133 | } 134 | 135 | output[yp] = YUV2RGB(y, u, v); 136 | } 137 | } 138 | } 139 | 140 | private static int YUV2RGB(int y, int u, int v) { 141 | // Adjust and check YUV values 142 | y = (y - 16) < 0 ? 0 : (y - 16); 143 | u -= 128; 144 | v -= 128; 145 | 146 | // This is the floating point equivalent. We do the conversion in integer 147 | // because some Android devices do not have floating point in hardware. 148 | // nR = (int)(1.164 * nY + 2.018 * nU); 149 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); 150 | // nB = (int)(1.164 * nY + 1.596 * nV); 151 | int y1192 = 1192 * y; 152 | int r = (y1192 + 1634 * v); 153 | int g = (y1192 - 833 * v - 400 * u); 154 | int b = (y1192 + 2066 * u); 155 | 156 | // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ] 157 | r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r); 158 | g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g); 159 | b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b); 160 | 161 | return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); 162 | } 163 | 164 | 165 | public static void convertYUV420ToARGB8888( 166 | byte[] yData, 167 | byte[] uData, 168 | byte[] vData, 169 | int width, 170 | int height, 171 | int yRowStride, 172 | int uvRowStride, 173 | int uvPixelStride, 174 | int[] out) { 175 | if (useNativeConversion) { 176 | try { 177 | convertYUV420ToARGB8888( 178 | yData, uData, vData, out, width, height, yRowStride, uvRowStride, uvPixelStride, false); 179 | return; 180 | } catch (UnsatisfiedLinkError e) { 181 | LOGGER.w( 182 | "Native YUV420 -> RGB implementation not found, falling back to Java implementation"); 183 | useNativeConversion = false; 184 | } 185 | } 186 | 187 | int yp = 0; 188 | for (int j = 0; j < height; j++) { 189 | int pY = yRowStride * j; 190 | int pUV = uvRowStride * (j >> 1); 191 | 192 | for (int i = 0; i < width; i++) { 193 | int uv_offset = pUV + (i >> 1) * uvPixelStride; 194 | 195 | out[yp++] = YUV2RGB( 196 | 0xff & yData[pY + i], 197 | 0xff & uData[uv_offset], 198 | 0xff & vData[uv_offset]); 199 | } 200 | } 201 | } 202 | 203 | 204 | /** 205 | * Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width and height. The 206 | * input and output must already be allocated and non-null. For efficiency, no error checking is 207 | * performed. 208 | * 209 | * @param input The array of YUV 4:2:0 input data. 210 | * @param output A pre-allocated array for the ARGB 8:8:8:8 output data. 211 | * @param width The width of the input image. 212 | * @param height The height of the input image. 213 | * @param halfSize If true, downsample to 50% in each dimension, otherwise not. 214 | */ 215 | private static native void convertYUV420SPToARGB8888( 216 | byte[] input, int[] output, int width, int height, boolean halfSize); 217 | 218 | /** 219 | * Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width 220 | * and height. The input and output must already be allocated and non-null. 221 | * For efficiency, no error checking is performed. 222 | * 223 | * @param y 224 | * @param u 225 | * @param v 226 | * @param uvPixelStride 227 | * @param width The width of the input image. 228 | * @param height The height of the input image. 229 | * @param halfSize If true, downsample to 50% in each dimension, otherwise not. 230 | * @param output A pre-allocated array for the ARGB 8:8:8:8 output data. 231 | */ 232 | private static native void convertYUV420ToARGB8888( 233 | byte[] y, 234 | byte[] u, 235 | byte[] v, 236 | int[] output, 237 | int width, 238 | int height, 239 | int yRowStride, 240 | int uvRowStride, 241 | int uvPixelStride, 242 | boolean halfSize); 243 | 244 | /** 245 | * Converts YUV420 semi-planar data to RGB 565 data using the supplied width 246 | * and height. The input and output must already be allocated and non-null. 247 | * For efficiency, no error checking is performed. 248 | * 249 | * @param input The array of YUV 4:2:0 input data. 250 | * @param output A pre-allocated array for the RGB 5:6:5 output data. 251 | * @param width The width of the input image. 252 | * @param height The height of the input image. 253 | */ 254 | private static native void convertYUV420SPToRGB565( 255 | byte[] input, byte[] output, int width, int height); 256 | 257 | /** 258 | * Converts 32-bit ARGB8888 image data to YUV420SP data. This is useful, for 259 | * instance, in creating data to feed the classes that rely on raw camera 260 | * preview frames. 261 | * 262 | * @param input An array of input pixels in ARGB8888 format. 263 | * @param output A pre-allocated array for the YUV420SP output data. 264 | * @param width The width of the input image. 265 | * @param height The height of the input image. 266 | */ 267 | private static native void convertARGB8888ToYUV420SP( 268 | int[] input, byte[] output, int width, int height); 269 | 270 | /** 271 | * Converts 16-bit RGB565 image data to YUV420SP data. This is useful, for 272 | * instance, in creating data to feed the classes that rely on raw camera 273 | * preview frames. 274 | * 275 | * @param input An array of input pixels in RGB565 format. 276 | * @param output A pre-allocated array for the YUV420SP output data. 277 | * @param width The width of the input image. 278 | * @param height The height of the input image. 279 | */ 280 | private static native void convertRGB565ToYUV420SP( 281 | byte[] input, byte[] output, int width, int height); 282 | 283 | /** 284 | * Returns a transformation matrix from one reference frame into another. 285 | * Handles cropping (if maintaining aspect ratio is desired) and rotation. 286 | * 287 | * @param srcWidth Width of source frame. 288 | * @param srcHeight Height of source frame. 289 | * @param dstWidth Width of destination frame. 290 | * @param dstHeight Height of destination frame. 291 | * @param applyRotation Amount of rotation to apply from one frame to another. 292 | * Must be a multiple of 90. 293 | * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, 294 | * cropping the image if necessary. 295 | * @return The transformation fulfilling the desired requirements. 296 | */ 297 | public static Matrix getTransformationMatrix( 298 | final int srcWidth, 299 | final int srcHeight, 300 | final int dstWidth, 301 | final int dstHeight, 302 | final int applyRotation, 303 | final boolean maintainAspectRatio) { 304 | final Matrix matrix = new Matrix(); 305 | 306 | if (applyRotation != 0) { 307 | if (applyRotation % 90 != 0) { 308 | LOGGER.w("Rotation of %d % 90 != 0", applyRotation); 309 | } 310 | 311 | // Translate so center of image is at origin. 312 | matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f); 313 | 314 | // Rotate around origin. 315 | matrix.postRotate(applyRotation); 316 | } 317 | 318 | // Account for the already applied rotation, if any, and then determine how 319 | // much scaling is needed for each axis. 320 | final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; 321 | 322 | final int inWidth = transpose ? srcHeight : srcWidth; 323 | final int inHeight = transpose ? srcWidth : srcHeight; 324 | 325 | // Apply scaling if necessary. 326 | if (inWidth != dstWidth || inHeight != dstHeight) { 327 | final float scaleFactorX = dstWidth / (float) inWidth; 328 | final float scaleFactorY = dstHeight / (float) inHeight; 329 | 330 | if (maintainAspectRatio) { 331 | // Scale by minimum factor so that dst is filled completely while 332 | // maintaining the aspect ratio. Some image may fall off the edge. 333 | final float scaleFactor = Math.max(scaleFactorX, scaleFactorY); 334 | matrix.postScale(scaleFactor, scaleFactor); 335 | } else { 336 | // Scale exactly to fill dst from src. 337 | matrix.postScale(scaleFactorX, scaleFactorY); 338 | } 339 | } 340 | 341 | if (applyRotation != 0) { 342 | // Translate back from origin centered reference to destination frame. 343 | matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f); 344 | } 345 | 346 | return matrix; 347 | } 348 | } 349 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/LogUtil.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.common.util; 2 | 3 | import android.util.Log; 4 | 5 | /** 6 | * @author pd_liu on 2017/12/29. 7 | *8 | * Log日志打印工具类 9 | *
10 | */ 11 | 12 | public final class LogUtil { 13 | 14 | private volatile static boolean sDebug = true; 15 | 16 | private LogUtil() { 17 | } 18 | 19 | public static void setDebug(boolean isDebug) { 20 | sDebug = isDebug; 21 | } 22 | 23 | public static void e(String tag, String message) { 24 | if (sDebug) { 25 | Log.e(tag, message); 26 | } 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/PhotoEnhanceUtil.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.common.util; 2 | 3 | import android.graphics.Bitmap; 4 | import android.graphics.Canvas; 5 | import android.graphics.ColorMatrix; 6 | import android.graphics.ColorMatrixColorFilter; 7 | import android.graphics.Paint; 8 | 9 | /** 10 | * @author pd_liu on 2018/1/10. 11 | *12 | * 图片增强 13 | *
14 | *15 | * 支持图片的亮度、对比度、饱和度调节{@link #handleImage(int)} . 16 | *
17 | */ 18 | 19 | public class PhotoEnhanceUtil { 20 | 21 | /** 22 | * 处理图片的模式:饱和度、亮度、对比度 23 | */ 24 | public final int Enhance_Saturation = 0; 25 | public final int Enhance_Brightness = 1; 26 | public final int Enhance_Contrast = 2; 27 | 28 | /** 29 | * Bitmap 30 | */ 31 | private Bitmap mBitmap; 32 | private float saturationNum = 1.0F; 33 | private float brightNum = 0.0F; 34 | private float contrastNum = 1.0F; 35 | private ColorMatrix mAllMatrix = null; 36 | private ColorMatrix saturationMatrix = null; 37 | private ColorMatrix contrastMatrix = null; 38 | private ColorMatrix brightnessMatrix = null; 39 | 40 | public PhotoEnhanceUtil() { 41 | } 42 | 43 | public PhotoEnhanceUtil(Bitmap bitmap) { 44 | this.mBitmap = bitmap; 45 | } 46 | 47 | public float getSaturation() { 48 | return this.saturationNum; 49 | } 50 | 51 | public void setSaturation(int saturationNum) { 52 | this.saturationNum = (float) saturationNum * 1.0F / 128.0F; 53 | } 54 | 55 | public float getBrightness() { 56 | return this.brightNum; 57 | } 58 | 59 | public void setBrightness(int brightNum) { 60 | this.brightNum = (float) (brightNum - 128); 61 | } 62 | 63 | public float getContrast() { 64 | return this.contrastNum; 65 | } 66 | 67 | public void setContrast(int contrastNum) { 68 | this.contrastNum = (float) ((double) (contrastNum / 2 + 64) / 128.0D); 69 | } 70 | 71 | public Bitmap handleImage(int type) { 72 | Bitmap bmp = Bitmap.createBitmap(this.mBitmap.getWidth(), this.mBitmap.getHeight(), Bitmap.Config.ARGB_8888); 73 | Canvas canvas = new Canvas(bmp); 74 | Paint paint = new Paint(); 75 | paint.setAntiAlias(true); 76 | if (this.mAllMatrix == null) { 77 | this.mAllMatrix = new ColorMatrix(); 78 | } 79 | 80 | if (this.saturationMatrix == null) { 81 | this.saturationMatrix = new ColorMatrix(); 82 | } 83 | 84 | if (this.contrastMatrix == null) { 85 | this.contrastMatrix = new ColorMatrix(); 86 | } 87 | 88 | if (this.brightnessMatrix == null) { 89 | this.brightnessMatrix = new ColorMatrix(); 90 | } 91 | 92 | switch (type) { 93 | case 0: 94 | this.saturationMatrix.reset(); 95 | this.saturationMatrix.setSaturation(this.saturationNum); 96 | break; 97 | case 1: 98 | this.brightnessMatrix.reset(); 99 | this.brightnessMatrix.set(new float[]{1.0F, 0.0F, 0.0F, 0.0F, this.brightNum, 0.0F, 1.0F, 0.0F, 0.0F, this.brightNum, 0.0F, 0.0F, 1.0F, 0.0F, this.brightNum, 0.0F, 0.0F, 0.0F, 1.0F, 0.0F}); 100 | break; 101 | case 2: 102 | float regulateBright = 0.0F; 103 | regulateBright = (1.0F - this.contrastNum) * 128.0F; 104 | this.contrastMatrix.reset(); 105 | this.contrastMatrix.set(new float[]{this.contrastNum, 0.0F, 0.0F, 0.0F, regulateBright, 0.0F, this.contrastNum, 0.0F, 0.0F, regulateBright, 0.0F, 0.0F, this.contrastNum, 0.0F, regulateBright, 0.0F, 0.0F, 0.0F, 1.0F, 0.0F}); 106 | } 107 | 108 | this.mAllMatrix.reset(); 109 | this.mAllMatrix.postConcat(this.saturationMatrix); 110 | this.mAllMatrix.postConcat(this.brightnessMatrix); 111 | this.mAllMatrix.postConcat(this.contrastMatrix); 112 | paint.setColorFilter(new ColorMatrixColorFilter(this.mAllMatrix)); 113 | canvas.drawBitmap(this.mBitmap, 0.0F, 0.0F, paint); 114 | return bmp; 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/ScreenUtil.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.common.util; 2 | 3 | import android.app.Activity; 4 | import android.view.Surface; 5 | 6 | /** 7 | * @author pd_liu on 2018/1/5. 8 | */ 9 | 10 | public final class ScreenUtil { 11 | 12 | private static final String TAG_LOG = "ScreenUtil"; 13 | 14 | private ScreenUtil() { 15 | } 16 | 17 | /** 18 | * 获取当前手机旋转的角度,影响因素{portrait、landscape} 19 | * 20 | * @param activity activity. 21 | * @return 当前手机旋转的角度 22 | */ 23 | public static int getScreenOrientation(Activity activity) { 24 | 25 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 26 | 27 | switch (rotation) { 28 | 29 | case Surface.ROTATION_270: 30 | return 270; 31 | 32 | case Surface.ROTATION_180: 33 | return 180; 34 | 35 | case Surface.ROTATION_90: 36 | return 90; 37 | 38 | default: 39 | return 0; 40 | } 41 | } 42 | 43 | /** 44 | * 根据当前手机屏幕的旋转角度,进而计算出Surface预览的角度 45 | * 46 | * @param activity activity. 47 | * @return Camera display orientation. 48 | */ 49 | public static int getDisplayOrientation(Activity activity) { 50 | 51 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 52 | 53 | LogUtil.e(TAG_LOG, "rotation:" + rotation); 54 | switch (rotation) { 55 | 56 | case Surface.ROTATION_0: 57 | return 90; 58 | 59 | case Surface.ROTATION_90: 60 | return 0; 61 | 62 | case Surface.ROTATION_180: 63 | return 270; 64 | 65 | case Surface.ROTATION_270: 66 | return 180; 67 | 68 | default: 69 | return 0; 70 | 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/TensorFlowObjectDetectionAPIModel.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.tensor; 17 | 18 | import android.content.res.AssetManager; 19 | import android.graphics.Bitmap; 20 | import android.graphics.RectF; 21 | import android.os.Trace; 22 | 23 | import com.rossia.life.scan.tensor.env.Logger; 24 | import com.rossia.life.scan.tensor.interf.Classifier; 25 | 26 | import org.tensorflow.Graph; 27 | import org.tensorflow.Operation; 28 | import org.tensorflow.contrib.android.TensorFlowInferenceInterface; 29 | 30 | import java.io.BufferedReader; 31 | import java.io.IOException; 32 | import java.io.InputStream; 33 | import java.io.InputStreamReader; 34 | import java.util.ArrayList; 35 | import java.util.Comparator; 36 | import java.util.List; 37 | import java.util.PriorityQueue; 38 | import java.util.Vector; 39 | 40 | /** 41 | * Wrapper for frozen detection models trained using the Tensorflow Object Detection API: 42 | * github.com/tensorflow/models/tree/master/research/object_detection 43 | */ 44 | public class TensorFlowObjectDetectionAPIModel implements Classifier { 45 | private static final Logger LOGGER = new Logger(); 46 | 47 | // Only return this many results. 48 | private static final int MAX_RESULTS = 100; 49 | 50 | // Config values. 51 | private String inputName; 52 | private int inputSize; 53 | 54 | // Pre-allocated buffers. 55 | private Vectornull, the caller's class name is used as the prefix. 65 | * 66 | * @param tag identifies the source of a log message. 67 | * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is 68 | * being used 69 | */ 70 | public Logger(final String tag, final String messagePrefix) { 71 | this.tag = tag; 72 | final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix; 73 | this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix; 74 | } 75 | 76 | /** 77 | * Creates a Logger using the caller's class name as the message prefix. 78 | */ 79 | public Logger() { 80 | this(DEFAULT_TAG, null); 81 | } 82 | 83 | /** 84 | * Creates a Logger using the caller's class name as the message prefix. 85 | */ 86 | public Logger(final int minLogLevel) { 87 | this(DEFAULT_TAG, null); 88 | this.minLogLevel = minLogLevel; 89 | } 90 | 91 | public void setMinLogLevel(final int minLogLevel) { 92 | this.minLogLevel = minLogLevel; 93 | } 94 | 95 | public boolean isLoggable(final int logLevel) { 96 | return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel); 97 | } 98 | 99 | /** 100 | * Return caller's simple name. 101 | * 102 | * Android getStackTrace() returns an array that looks like this: 103 | * stackTrace[0]: dalvik.system.VMStack 104 | * stackTrace[1]: java.lang.Thread 105 | * stackTrace[2]: com.google.android.apps.unveil.env.UnveilLogger 106 | * stackTrace[3]: com.google.android.apps.unveil.BaseApplication 107 | * 108 | * This function returns the simple version of the first non-filtered name. 109 | * 110 | * @return caller's simple name 111 | */ 112 | private static String getCallerSimpleName() { 113 | // Get the current callstack so we can pull the class of the caller off of it. 114 | final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); 115 | 116 | for (final StackTraceElement elem : stackTrace) { 117 | final String className = elem.getClassName(); 118 | if (!IGNORED_CLASS_NAMES.contains(className)) { 119 | // We're only interested in the simple name of the class, not the complete package. 120 | final String[] classParts = className.split("\\."); 121 | return classParts[classParts.length - 1]; 122 | } 123 | } 124 | 125 | return Logger.class.getSimpleName(); 126 | } 127 | 128 | private String toMessage(final String format, final Object... args) { 129 | return messagePrefix + (args.length > 0 ? String.format(format, args) : format); 130 | } 131 | 132 | public void v(final String format, final Object... args) { 133 | if (isLoggable(Log.VERBOSE)) { 134 | Log.v(tag, toMessage(format, args)); 135 | } 136 | } 137 | 138 | public void v(final Throwable t, final String format, final Object... args) { 139 | if (isLoggable(Log.VERBOSE)) { 140 | Log.v(tag, toMessage(format, args), t); 141 | } 142 | } 143 | 144 | public void d(final String format, final Object... args) { 145 | if (isLoggable(Log.DEBUG)) { 146 | Log.d(tag, toMessage(format, args)); 147 | } 148 | } 149 | 150 | public void d(final Throwable t, final String format, final Object... args) { 151 | if (isLoggable(Log.DEBUG)) { 152 | Log.d(tag, toMessage(format, args), t); 153 | } 154 | } 155 | 156 | public void i(final String format, final Object... args) { 157 | if (isLoggable(Log.INFO)) { 158 | Log.i(tag, toMessage(format, args)); 159 | } 160 | } 161 | 162 | public void i(final Throwable t, final String format, final Object... args) { 163 | if (isLoggable(Log.INFO)) { 164 | Log.i(tag, toMessage(format, args), t); 165 | } 166 | } 167 | 168 | public void w(final String format, final Object... args) { 169 | if (isLoggable(Log.WARN)) { 170 | Log.w(tag, toMessage(format, args)); 171 | } 172 | } 173 | 174 | public void w(final Throwable t, final String format, final Object... args) { 175 | if (isLoggable(Log.WARN)) { 176 | Log.w(tag, toMessage(format, args), t); 177 | } 178 | } 179 | 180 | public void e(final String format, final Object... args) { 181 | if (isLoggable(Log.ERROR)) { 182 | Log.e(tag, toMessage(format, args)); 183 | } 184 | } 185 | 186 | public void e(final Throwable t, final String format, final Object... args) { 187 | if (isLoggable(Log.ERROR)) { 188 | Log.e(tag, toMessage(format, args), t); 189 | } 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/env/Size.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.tensor.env; 17 | 18 | import android.graphics.Bitmap; 19 | import android.text.TextUtils; 20 | 21 | import java.io.Serializable; 22 | import java.util.ArrayList; 23 | import java.util.List; 24 | 25 | /** 26 | * Size class independent of a Camera object. 27 | */ 28 | public class Size implements Comparable
11 | * 分类器 12 | *
13 | *14 | * 识别出的信息保存在 {@link Recognition} 15 | *
16 | * Generic interface for interacting with different recognition engines. 17 | */ 18 | public interface Classifier { 19 | /** 20 | * An immutable result returned by a Classifier describing what was recognized. 21 | */ 22 | public class Recognition { 23 | /** 24 | * A unique identifier for what has been recognized. Specific to the class, not the instance of 25 | * the object. 26 | */ 27 | private final String id; 28 | 29 | /** 30 | * Display name for the recognition. 31 | */ 32 | private final String title; 33 | 34 | /** 35 | * A sortable score for how good the recognition is relative to others. Higher should be better. 36 | */ 37 | private final Float confidence; 38 | 39 | /** 40 | * Optional location within the source image for the location of the recognized object. 41 | */ 42 | private RectF location; 43 | 44 | public Recognition( 45 | final String id, final String title, final Float confidence, final RectF location) { 46 | this.id = id; 47 | this.title = title; 48 | this.confidence = confidence; 49 | this.location = location; 50 | } 51 | 52 | public String getId() { 53 | return id; 54 | } 55 | 56 | public String getTitle() { 57 | return title; 58 | } 59 | 60 | public Float getConfidence() { 61 | return confidence; 62 | } 63 | 64 | public RectF getLocation() { 65 | return new RectF(location); 66 | } 67 | 68 | public void setLocation(RectF location) { 69 | this.location = location; 70 | } 71 | 72 | @Override 73 | public String toString() { 74 | String resultString = ""; 75 | if (id != null) { 76 | resultString += "[" + id + "] "; 77 | } 78 | 79 | if (title != null) { 80 | resultString += title + " "; 81 | } 82 | 83 | if (confidence != null) { 84 | resultString += String.format("(%.1f%%) ", confidence * 100.0f); 85 | } 86 | 87 | if (location != null) { 88 | resultString += location + " "; 89 | } 90 | 91 | return resultString.trim(); 92 | } 93 | } 94 | 95 | List33 | * Object detection tracker box. 34 | *
35 | */ 36 | public class MultiBoxTracker { 37 | 38 | private static final String TAG_LOG = "MultiBoxTracker"; 39 | 40 | private final Logger logger = new Logger(); 41 | 42 | private static final float TEXT_SIZE_DIP = 18; 43 | 44 | // Maximum percentage of a box that can be overlapped by another box at detection time. Otherwise 45 | // the lower scored box (new or old) will be removed. 46 | private static final float MAX_OVERLAP = 0.2f; 47 | 48 | private static final float MIN_SIZE = 16.0f; 49 | 50 | // Allow replacement of the tracked box with new results if 51 | // correlation has dropped below this level. 52 | private static final float MARGINAL_CORRELATION = 0.75f; 53 | 54 | // Consider object to be lost if correlation falls below this threshold. 55 | private static final float MIN_CORRELATION = 0.3f; 56 | 57 | /** 58 | * 当绘制Box边框完成后的Callback 59 | */ 60 | private OnDrawRectCompleteCallback mOnDrawRectCompleteCallback; 61 | 62 | /** 63 | * 当为检测出对象时Callback. 64 | */ 65 | private DetectionNothingCallback mDetectionNothingCallback; 66 | /** 67 | * 这里存储了绘制的颜色值 68 | */ 69 | private static final int[] COLORS = { 70 | Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA, Color.WHITE, 71 | Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"), 72 | Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"), 73 | Color.parseColor("#AA33AA"), Color.parseColor("#0D0068") 74 | }; 75 | 76 | private final Queue13 | * 裁剪图片 14 | *
15 | */ 16 | public class CropImageView extends ImageView { 17 | 18 | private static final String TAG = "CropImageView"; 19 | 20 | 21 | public CropImageView(Context context) { 22 | this(context, null); 23 | } 24 | 25 | public CropImageView(Context context, @Nullable AttributeSet attrs) { 26 | this(context, attrs, 0); 27 | } 28 | 29 | public CropImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 30 | this(context, attrs, defStyleAttr, 0); 31 | } 32 | 33 | public CropImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { 34 | super(context, attrs, defStyleAttr, defStyleRes); 35 | 36 | 37 | } 38 | 39 | @Override 40 | protected void onDraw(Canvas canvas) { 41 | super.onDraw(canvas); 42 | 43 | 44 | } 45 | 46 | @Override 47 | public boolean onTouchEvent(MotionEvent event) { 48 | 49 | int action = event.getAction(); 50 | 51 | switch (action) { 52 | case MotionEvent.ACTION_DOWN: 53 | 54 | break; 55 | 56 | case MotionEvent.ACTION_MOVE: 57 | break; 58 | 59 | case MotionEvent.ACTION_UP: 60 | 61 | break; 62 | default: 63 | } 64 | 65 | 66 | 67 | return super.onTouchEvent(event); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/widget/OverlayView.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.tensor.widget; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.support.annotation.Nullable; 6 | import android.util.AttributeSet; 7 | import android.view.View; 8 | 9 | import java.util.LinkedList; 10 | import java.util.List; 11 | 12 | /** 13 | * @author pd_liu on 2018/1/2. 14 | *15 | * 覆盖视图 16 | *
17 | *18 | * 1、内部实现保存机制 19 | * 2、当调用{@link #invalidate()} {@link #postInvalidate()}时,开始执行存储中的任务{@link #mCallbacks} 20 | *
21 | */ 22 | 23 | public class OverlayView extends View { 24 | 25 | private static final String TAG_LOG = "OverlayView"; 26 | 27 | private final List46 | *
47 | * The method onFinishInflate() will be called after all children have been
48 | * added.
49 | *
50 | * @param context The Context the view is running in, through which it can
51 | * access the current theme, resources, etc.
52 | * @param attrs The attributes of the XML tag that is inflating the view.
53 | * @see #View(Context, AttributeSet, int)
54 | */
55 | public OverlayView(Context context, @Nullable AttributeSet attrs) {
56 | super(context, attrs);
57 | }
58 |
59 | /**
60 | * Perform inflation from XML and apply a class-specific base style from a
61 | * theme attribute. This constructor of View allows subclasses to use their
62 | * own base style when they are inflating. For example, a Button class's
63 | * constructor would call this version of the super class constructor and
64 | * supply R.attr.buttonStyle
for defStyleAttr; this
65 | * allows the theme's button style to modify all of the base view attributes
66 | * (in particular its background) as well as the Button class's attributes.
67 | *
68 | * @param context The Context the view is running in, through which it can
69 | * access the current theme, resources, etc.
70 | * @param attrs The attributes of the XML tag that is inflating the view.
71 | * @param defStyleAttr An attribute in the current theme that contains a
72 | * reference to a style resource that supplies default values for
73 | * the view. Can be 0 to not look for defaults.
74 | * @see #View(Context, AttributeSet)
75 | */
76 | public OverlayView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
77 | super(context, attrs, defStyleAttr);
78 | }
79 |
80 | @Override
81 | protected void onDraw(Canvas canvas) {
82 | super.onDraw(canvas);
83 |
84 | /*
85 | 循环并执行绘制任务
86 | */
87 | for (DrawCallback callback : mCallbacks) {
88 | callback.callback(canvas);
89 | }
90 | }
91 |
92 | /**
93 | * Interface defining the callback for client classes.
94 | */
95 | public interface DrawCallback {
96 | /**
97 | * Callback
98 | *
99 | * @param canvas 画布
100 | */
101 | void callback(Canvas canvas);
102 | }
103 |
104 | public void addCallback(DrawCallback drawCallback) {
105 | mCallbacks.add(drawCallback);
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/scan/src/main/java/com/rossia/life/scan/tensor/widget/ScanImageView.java:
--------------------------------------------------------------------------------
1 | package com.rossia.life.scan.tensor.widget;
2 |
3 | import android.content.Context;
4 | import android.graphics.Canvas;
5 | import android.graphics.Color;
6 | import android.graphics.LinearGradient;
7 | import android.graphics.Paint;
8 | import android.graphics.Shader;
9 | import android.support.annotation.Nullable;
10 | import android.util.AttributeSet;
11 | import android.widget.ImageView;
12 |
13 | import com.rossia.life.scan.common.util.LogUtil;
14 |
15 | /**
16 | * @author pd_liu on 2018/1/4.
17 | *
18 | * ScanImageView:拓展ImageView 19 | * 不仅拥有ImageView属性,还:实现对图片的上下的扫描效果 20 | *
21 | */ 22 | 23 | public class ScanImageView extends ImageView { 24 | 25 | private static final String TAG_LOG = "ScanImageView"; 26 | 27 | private static final int DEFAULT_REPEAT_COUNT = 1; 28 | 29 | /** 30 | * 绘制扫描效果的画笔 31 | */ 32 | private Paint mScanBarPaint; 33 | 34 | /** 35 | * 需要绘制Line points 36 | */ 37 | float mLineStartX = 0f; 38 | float mLineStartY = 0f; 39 | float mLineStopX = 0f; 40 | float mLineStopY = 0f; 41 | 42 | private int mWidth; 43 | private int mHeight; 44 | 45 | /** 46 | * 扫描的重复次数 47 | */ 48 | private int mRepeatCount = DEFAULT_REPEAT_COUNT; 49 | 50 | /** 51 | * 当前扫描到了几次 52 | */ 53 | private int mCurrentRepeatCount; 54 | 55 | /** 56 | * 是否开启绘制效果 57 | */ 58 | private boolean mOpenScanBarActionFlag; 59 | 60 | /** 61 | * 每次扫描的间隔 62 | */ 63 | private float mPerScanInterval; 64 | 65 | private Shader mPaintShader; 66 | 67 | private ScanCompleteCallback mScanCompleteCallback; 68 | 69 | public ScanImageView(Context context) { 70 | this(context, null); 71 | } 72 | 73 | public ScanImageView(Context context, @Nullable AttributeSet attrs) { 74 | this(context, attrs, 0); 75 | } 76 | 77 | public ScanImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 78 | this(context, attrs, defStyleAttr, 0); 79 | } 80 | 81 | public ScanImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { 82 | super(context, attrs, defStyleAttr, defStyleRes); 83 | 84 | mScanBarPaint = new Paint(); 85 | mScanBarPaint.setAntiAlias(true); 86 | mScanBarPaint.setColor(Color.YELLOW); 87 | mScanBarPaint.setStrokeWidth(5f); 88 | 89 | } 90 | 91 | 92 | @Override 93 | protected void onDraw(Canvas canvas) { 94 | super.onDraw(canvas); 95 | 96 | //如果标记为打开扫描效果 97 | if (mOpenScanBarActionFlag) { 98 | 99 | 100 | if (mCurrentRepeatCount < mRepeatCount) { 101 | 102 | /* 103 | 绘制扫描效果 104 | */ 105 | if(mWidth == 0 || mHeight == 0){ 106 | mWidth = getWidth(); 107 | mHeight = getHeight(); 108 | mPerScanInterval = mHeight * 0.01f; 109 | } 110 | 111 | if(mPaintShader == null){ 112 | mPaintShader = new LinearGradient(0,0,0,mScanBarPaint.getStrokeWidth(), Color.YELLOW, Color.BLUE, LinearGradient.TileMode.CLAMP); 113 | } 114 | 115 | mLineStopX = mLineStartX + mWidth; 116 | 117 | if (mLineStartY <= mHeight) { 118 | 119 | //绘制扫描线条 120 | canvas.drawLine(mLineStartX, mLineStartY, mLineStopX, mLineStopY, mScanBarPaint); 121 | 122 | mLineStartY = mLineStartY + mPerScanInterval; 123 | mLineStopY = mLineStopY + mPerScanInterval; 124 | 125 | } else { 126 | mLineStartY = 0f; 127 | mLineStopY = 0f; 128 | mCurrentRepeatCount ++ ; 129 | } 130 | 131 | //invalidate 132 | postInvalidate(); 133 | 134 | } else { 135 | /* 136 | 已经扫描结束 137 | */ 138 | mCurrentRepeatCount = 0; 139 | mLineStartY = 0f; 140 | mLineStopY = 0f; 141 | mWidth = 0; 142 | mHeight = 0; 143 | mOpenScanBarActionFlag = false; 144 | 145 | if (mScanCompleteCallback != null) { 146 | mScanCompleteCallback.complete(); 147 | } 148 | } 149 | 150 | 151 | } 152 | 153 | } 154 | 155 | public void startScan(){ 156 | mCurrentRepeatCount = 0; 157 | mLineStartY = 0f; 158 | mLineStopY = 0f; 159 | mOpenScanBarActionFlag = true; 160 | postInvalidate(); 161 | } 162 | 163 | public void setScanCompleteCallback(ScanCompleteCallback completeCallback) { 164 | mScanCompleteCallback = completeCallback; 165 | } 166 | 167 | public void setopenScanBarAction(boolean openScanBar) { 168 | mOpenScanBarActionFlag = openScanBar; 169 | } 170 | 171 | public boolean isOpenScanBarAction() { 172 | return mOpenScanBarActionFlag; 173 | } 174 | 175 | public interface ScanCompleteCallback { 176 | void complete(); 177 | } 178 | } 179 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/transfer/SensorMoveControl.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.transfer; 2 | 3 | import android.content.Context; 4 | import android.hardware.Sensor; 5 | import android.hardware.SensorEvent; 6 | import android.hardware.SensorEventListener; 7 | import android.hardware.SensorManager; 8 | 9 | import com.rossia.life.scan.common.util.LogUtil; 10 | 11 | import java.util.Calendar; 12 | 13 | /** 14 | * @author pd_liu on 2018/1/16. 15 | *16 | * 传感器移动事件控制器 17 | *
18 | */ 19 | 20 | public class SensorMoveControl implements SensorEventListener { 21 | 22 | private static final String TAG_LOG = "SensorMoveControl"; 23 | 24 | private SensorManager mSensorManager; 25 | 26 | private Sensor mSensor; 27 | 28 | private SensorMoveListener mSensorMoveListener; 29 | 30 | private Calendar mCalendar; 31 | 32 | boolean isFocusing = false; 33 | boolean canFocusIn = false; //内部是否能够对焦控制机制 34 | boolean canFocus = false; 35 | 36 | private int mX, mY, mZ; 37 | 38 | public static final int STATUS_NONE = 0; 39 | public static final int STATUS_STATIC = 1; 40 | public static final int STATUS_MOVE = 2; 41 | private int STATUE = STATUS_NONE; 42 | 43 | private long lastStaticStamp = 0; 44 | 45 | /** 46 | * 延迟的时间 47 | */ 48 | public static final int DELEY_DURATION = 500; 49 | 50 | private SensorMoveControl(Context context) { 51 | mSensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE); 52 | mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER, true); 53 | } 54 | 55 | public static SensorMoveControl newInstance(Context context) { 56 | return new SensorMoveControl(context); 57 | } 58 | 59 | 60 | @Override 61 | public void onSensorChanged(SensorEvent event) { 62 | if (event.sensor == null) { 63 | return; 64 | } 65 | if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { 66 | int x = (int) event.values[0]; 67 | int y = (int) event.values[1]; 68 | int z = (int) event.values[2]; 69 | mCalendar = Calendar.getInstance(); 70 | long stamp = mCalendar.getTimeInMillis();// 1393844912 71 | 72 | int second = mCalendar.get(Calendar.SECOND);// 53 73 | 74 | if (STATUE != STATUS_NONE) { 75 | int px = Math.abs(mX - x); 76 | int py = Math.abs(mY - y); 77 | int pz = Math.abs(mZ - z); 78 | // Log.d(TAG, "pX:" + px + " pY:" + py + " pZ:" + pz + " stamp:" 79 | // + stamp + " second:" + second); 80 | double value = Math.sqrt(px * px + py * py + pz * pz); 81 | if (value > 1.0) {//1.4 82 | // textviewF.setText("检测手机在移动.."); 83 | LogUtil.e(TAG_LOG, "检测手机在移动mobile moving"); 84 | STATUE = STATUS_MOVE; 85 | if (mSensorMoveListener != null) { 86 | mSensorMoveListener.onMoving(); 87 | } 88 | } else { 89 | // textviewF.setText("检测手机静止.."); 90 | LogUtil.e(TAG_LOG, "检测手机静止mobile static"); 91 | //上一次状态是move,记录静态时间点 92 | if (STATUE == STATUS_MOVE) { 93 | lastStaticStamp = stamp; 94 | canFocusIn = true; 95 | } 96 | 97 | if (canFocusIn) { 98 | if (stamp - lastStaticStamp > DELEY_DURATION) { 99 | //移动后静止一段时间,可以发生对焦行为 100 | if (!isFocusing) { 101 | canFocusIn = false; 102 | // onCameraFocus(); 103 | if (mSensorMoveListener != null) { 104 | mSensorMoveListener.onStaticing(); 105 | } 106 | } 107 | } 108 | } 109 | 110 | STATUE = STATUS_STATIC; 111 | } 112 | } else { 113 | lastStaticStamp = stamp; 114 | STATUE = STATUS_STATIC; 115 | } 116 | 117 | mX = x; 118 | mY = y; 119 | mZ = z; 120 | } 121 | } 122 | 123 | public void startSensor() { 124 | STATUE = STATUS_NONE; 125 | canFocusIn = false; 126 | mX = 0; 127 | mY = 0; 128 | mZ = 0; 129 | 130 | canFocus = true; 131 | mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_NORMAL); 132 | } 133 | 134 | public void stopSensor() { 135 | mSensorManager.unregisterListener(this, mSensor); 136 | canFocus = false; 137 | } 138 | 139 | @Override 140 | public void onAccuracyChanged(Sensor sensor, int accuracy) { 141 | 142 | } 143 | 144 | public void setSensorMoveListener(SensorMoveListener sensorMoveListener) { 145 | mSensorMoveListener = sensorMoveListener; 146 | } 147 | 148 | public interface SensorMoveListener { 149 | void onMoving(); 150 | 151 | void onStaticing(); 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/transfer/TransferSample.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.transfer; 2 | 3 | /** 4 | * Created by pd_liu on 2017/12/29. 5 | */ 6 | 7 | public class TransferSample { 8 | 9 | /** 10 | * 加载C++ 的编译产出,这是必须的代码. 11 | */ 12 | static { 13 | System.loadLibrary("native-lib"); 14 | } 15 | 16 | /** 17 | * 这是对外暴露的接口,在接口内部对JNI进行调用. 18 | * 19 | * @param a 数值 20 | */ 21 | public String convertIntToString(int a) { 22 | 23 | /* 24 | 调用底层 25 | */ 26 | StringClass stringClass = new StringClass(); 27 | jni_string(5, stringClass.value); 28 | 29 | return stringClass.value; 30 | } 31 | 32 | /** 33 | * 这是调用C++代码的JNI方法,String 34 | */ 35 | 36 | private static native int jni_string(int input, String out); 37 | 38 | private static native void jni_2(int input, StringClass output); 39 | 40 | class StringClass { 41 | String value; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/ui/detector/CameraApi2Fragment.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.ui.detector; 2 | 3 | import android.os.Bundle; 4 | 5 | /** 6 | * @author by pd_liu on 2017/12/29. 7 | *8 | * 支持Camera2包下的最新Api 9 | *
10 | */ 11 | 12 | public class CameraApi2Fragment { 13 | 14 | private static final String TAG_LOG = "CameraApi2Fragment"; 15 | 16 | private String mCameraID; 17 | 18 | private int mFragmentLayoutID; 19 | 20 | private CameraApi2Fragment(final String cameraId, final int layout){ 21 | this.mCameraID = cameraId; 22 | this.mFragmentLayoutID = layout; 23 | } 24 | 25 | public static CameraApi2Fragment newInstance(String cameraId, int layoutId) { 26 | 27 | CameraApi2Fragment fragment = new CameraApi2Fragment(cameraId, layoutId); 28 | return fragment; 29 | } 30 | 31 | 32 | } 33 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/ui/detector/CameraConnectionFragment.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.ui.detector; 2 | 3 | import android.app.Activity; 4 | import android.content.Context; 5 | import android.graphics.SurfaceTexture; 6 | import android.hardware.camera2.CameraAccessException; 7 | import android.hardware.camera2.CameraCharacteristics; 8 | import android.hardware.camera2.CameraManager; 9 | import android.hardware.camera2.params.StreamConfigurationMap; 10 | import android.net.Uri; 11 | import android.os.Bundle; 12 | import android.support.annotation.Nullable; 13 | import android.support.v4.app.Fragment; 14 | import android.util.SparseIntArray; 15 | import android.view.LayoutInflater; 16 | import android.view.Surface; 17 | import android.view.TextureView; 18 | import android.view.View; 19 | import android.view.ViewGroup; 20 | 21 | import com.rossia.life.scan.R; 22 | import com.rossia.life.scan.common.util.LogUtil; 23 | 24 | /** 25 | * @author pd_liu 2017/12/29. 26 | *27 | * 相机 28 | *
29 | *30 | * Note: 31 | * 1、不支持前置摄像头进行捕捉画面 32 | *
33 | * A simple {@link Fragment} subclass. 34 | * Activities that contain this fragment must implement the 35 | * {@link CameraConnectionFragment.OnFragmentInteractionListener} interface 36 | * to handle interaction events. 37 | * Use the {@link CameraConnectionFragment#newInstance} factory method to 38 | * create an instance of this fragment. 39 | */ 40 | public class CameraConnectionFragment extends Fragment { 41 | 42 | private static final String TAG_LOG = "CameraConnectionFragment"; 43 | 44 | // TODO: Rename parameter arguments, choose names that match 45 | // the fragment initialization parameters, e.g. ARG_ITEM_NUMBER 46 | private static final String ARG_PARAM1 = "param1"; 47 | private static final String ARG_PARAM2 = "param2"; 48 | 49 | // TODO: Rename and change types of parameters 50 | private String mParam1; 51 | private String mParam2; 52 | 53 | /** 54 | * Conversion from screen rotation to JPEG orientation. 55 | */ 56 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); 57 | 58 | static { 59 | ORIENTATIONS.append(Surface.ROTATION_0, 90); 60 | ORIENTATIONS.append(Surface.ROTATION_90, 0); 61 | ORIENTATIONS.append(Surface.ROTATION_180, 270); 62 | ORIENTATIONS.append(Surface.ROTATION_270, 180); 63 | } 64 | 65 | private OnFragmentInteractionListener mListener; 66 | 67 | /** 68 | * Texture view display the camera output resource of image 69 | */ 70 | private TextureView mTextureView; 71 | 72 | /** 73 | * 当前Fragment显示的Layout的资源ID 74 | */ 75 | private int mFragmentContentLayoutID; 76 | 77 | public CameraConnectionFragment(int layoutId) { 78 | // Required empty public constructor 79 | this.mFragmentContentLayoutID = layoutId; 80 | } 81 | 82 | /** 83 | * Use this factory method to create a new instance of 84 | * this fragment using the provided parameters. 85 | * 86 | * @param layoutId Fragment layout resource id. 87 | * @return A new instance of fragment CameraConnectionFragment. 88 | */ 89 | public static CameraConnectionFragment newInstance(int layoutId) { 90 | CameraConnectionFragment fragment = new CameraConnectionFragment(layoutId); 91 | return fragment; 92 | } 93 | 94 | private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() { 95 | @Override 96 | public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 97 | 98 | } 99 | 100 | @Override 101 | public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { 102 | 103 | } 104 | 105 | @Override 106 | public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { 107 | return false; 108 | } 109 | 110 | @Override 111 | public void onSurfaceTextureUpdated(SurfaceTexture surface) { 112 | 113 | } 114 | }; 115 | 116 | @Override 117 | public void onCreate(Bundle savedInstanceState) { 118 | super.onCreate(savedInstanceState); 119 | } 120 | 121 | @Override 122 | public View onCreateView(LayoutInflater inflater, ViewGroup container, 123 | Bundle savedInstanceState) { 124 | // Inflate the layout for this fragment 125 | return inflater.inflate(R.layout.fragment_camera_connection, container, false); 126 | } 127 | 128 | @Override 129 | public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { 130 | super.onViewCreated(view, savedInstanceState); 131 | 132 | //findView. 133 | mTextureView = view.findViewById(R.id.texture_view); 134 | } 135 | 136 | @Override 137 | public void onResume() { 138 | super.onResume(); 139 | 140 | if (mTextureView.isAvailable()) { 141 | startCamera(); 142 | } else { 143 | mTextureView.setSurfaceTextureListener(mSurfaceTextureListener); 144 | } 145 | } 146 | 147 | @Override 148 | public void onPause() { 149 | super.onPause(); 150 | } 151 | 152 | @Override 153 | public void onAttach(Context context) { 154 | super.onAttach(context); 155 | if (context instanceof OnFragmentInteractionListener) { 156 | mListener = (OnFragmentInteractionListener) context; 157 | } else { 158 | throw new RuntimeException(context.toString() 159 | + " must implement OnFragmentInteractionListener"); 160 | } 161 | } 162 | 163 | @Override 164 | public void onDetach() { 165 | super.onDetach(); 166 | mListener = null; 167 | } 168 | 169 | /** 170 | * 启动相机 171 | */ 172 | private void startCamera() { 173 | 174 | try { 175 | //当前的CameraID 176 | String cameraId = chooseCamera(); 177 | 178 | 179 | } catch (CameraAccessException e) { 180 | e.printStackTrace(); 181 | } 182 | } 183 | 184 | /** 185 | * @return CameraId 186 | * @throws CameraAccessException 187 | */ 188 | private String chooseCamera() throws CameraAccessException { 189 | 190 | final Activity activity = getActivity(); 191 | 192 | CameraManager cameraManager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); 193 | 194 | //获取所有的相机ID列表 195 | String[] cameraIdList = cameraManager.getCameraIdList(); 196 | 197 | for (String cameraId : cameraIdList) { 198 | 199 | CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); 200 | 201 | // We don't use a front facing camera in this sample. 202 | final Integer facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING); 203 | 204 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { 205 | /* 206 | 不支持前置摄像头进行捕捉画面 207 | */ 208 | continue; 209 | } 210 | 211 | //此相机设备支持的可用流配置; 还包括每种格式/尺寸组合的最小帧持续时间和停顿持续时间。 212 | StreamConfigurationMap scalerStreamConfigurationMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 213 | 214 | if (scalerStreamConfigurationMap == null) { 215 | // TODO: 2018/1/2 216 | return null; 217 | } 218 | 219 | //相机设备是外部相机,相对于设备的屏幕没有固定的朝向。或则支持硬件等级为INFO_SUPPORTED_HARDWARE_LEVEL_FULL 220 | // if (facing == CameraCharacteristics.LENS_FACING_EXTERNAL || isHardwareLevelSupported(cameraCharacteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL)) { 221 | // LogUtil.e(TAG_LOG, "Camera API level 2 ? : true"); 222 | // } 223 | 224 | return cameraId; 225 | } 226 | 227 | return null; 228 | } 229 | 230 | /** 231 | * Returns true if the device supports the required hardware level, or better. 232 | * 233 | * @param characteristics {@link CameraCharacteristics} 234 | * @param requiredLevel requiredLevel. 235 | * @return whether the device supports the required hardware level. 236 | */ 237 | // private boolean isHardwareLevelSupported(CameraCharacteristics characteristics, int requiredLevel) { 238 | // 239 | // int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3); 240 | // 241 | // //支持的硬件级别是摄像机设备功能的高级描述,将多个功能汇总到一个领域。 242 | // // 每个级别都增加了前一级的附加功能,并且始终是前一级的严格超集。排序是LEGACY < LIMITED < FULL < LEVEL_3。 243 | // if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { 244 | // return requiredLevel == deviceLevel; 245 | // } 246 | // 247 | // return requiredLevel <= deviceLevel; 248 | // } 249 | 250 | /** 251 | * This interface must be implemented by activities that contain this 252 | * fragment to allow an interaction in this fragment to be communicated 253 | * to the activity and potentially other fragments contained in that 254 | * activity. 255 | *256 | * See the Android Training lesson Communicating with Other Fragments for more information. 259 | */ 260 | public interface OnFragmentInteractionListener { 261 | // TODO: Update argument type and name 262 | void onFragmentInteraction(Uri uri); 263 | } 264 | } 265 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/ui/interf/TakePictureCallback.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.ui.interf; 2 | 3 | import android.graphics.Bitmap; 4 | 5 | /** 6 | * @author pd_liu on 2018/1/8. 7 | *
8 | * 拍照Callback 9 | *
10 | */ 11 | 12 | public interface TakePictureCallback { 13 | /** 14 | * Call 15 | * 16 | * @param bitmap 拍照后处理过的图片位图 17 | */ 18 | void call(Bitmap bitmap); 19 | } 20 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/ui/view/DrawColorView.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.ui.view; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.support.annotation.Nullable; 6 | import android.util.AttributeSet; 7 | import android.view.View; 8 | 9 | /** 10 | * @author pd_liu on 2018/1/5. 11 | */ 12 | 13 | public class DrawColorView extends View { 14 | 15 | private DrawColorListener mDrawColorListener; 16 | 17 | /** 18 | * Simple constructor to use when creating a view from code. 19 | * 20 | * @param context The Context the view is running in, through which it can 21 | * access the current theme, resources, etc. 22 | */ 23 | public DrawColorView(Context context) { 24 | super(context); 25 | } 26 | 27 | /** 28 | * Constructor that is called when inflating a view from XML. This is called 29 | * when a view is being constructed from an XML file, supplying attributes 30 | * that were specified in the XML file. This version uses a default style of 31 | * 0, so the only attribute values applied are those in the Context's Theme 32 | * and the given AttributeSet. 33 | *34 | *
35 | * The method onFinishInflate() will be called after all children have been
36 | * added.
37 | *
38 | * @param context The Context the view is running in, through which it can
39 | * access the current theme, resources, etc.
40 | * @param attrs The attributes of the XML tag that is inflating the view.
41 | * @see #View(Context, AttributeSet, int)
42 | */
43 | public DrawColorView(Context context, @Nullable AttributeSet attrs) {
44 | super(context, attrs);
45 | }
46 |
47 | /**
48 | * Perform inflation from XML and apply a class-specific base style from a
49 | * theme attribute. This constructor of View allows subclasses to use their
50 | * own base style when they are inflating. For example, a Button class's
51 | * constructor would call this version of the super class constructor and
52 | * supply R.attr.buttonStyle
for defStyleAttr; this
53 | * allows the theme's button style to modify all of the base view attributes
54 | * (in particular its background) as well as the Button class's attributes.
55 | *
56 | * @param context The Context the view is running in, through which it can
57 | * access the current theme, resources, etc.
58 | * @param attrs The attributes of the XML tag that is inflating the view.
59 | * @param defStyleAttr An attribute in the current theme that contains a
60 | * reference to a style resource that supplies default values for
61 | * the view. Can be 0 to not look for defaults.
62 | * @see #View(Context, AttributeSet)
63 | */
64 | public DrawColorView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
65 | super(context, attrs, defStyleAttr);
66 | }
67 |
68 | /**
69 | * Perform inflation from XML and apply a class-specific base style from a
70 | * theme attribute or style resource. This constructor of View allows
71 | * subclasses to use their own base style when they are inflating.
72 | *
73 | * When determining the final value of a particular attribute, there are 74 | * four inputs that come into play: 75 | *
83 | * Each of these inputs is considered in-order, with the first listed taking
84 | * precedence over the following ones. In other words, if in the
85 | * AttributeSet you have supplied <Button * textColor="#ff000000">
86 | * , then the button's text will always be black, regardless of
87 | * what is specified in any of the styles.
88 | *
89 | * @param context The Context the view is running in, through which it can
90 | * access the current theme, resources, etc.
91 | * @param attrs The attributes of the XML tag that is inflating the view.
92 | * @param defStyleAttr An attribute in the current theme that contains a
93 | * reference to a style resource that supplies default values for
94 | * the view. Can be 0 to not look for defaults.
95 | * @param defStyleRes A resource identifier of a style resource that
96 | * supplies default values for the view, used only if
97 | * defStyleAttr is 0 or can not be found in the theme. Can be 0
98 | * to not look for defaults.
99 | * @see #View(Context, AttributeSet, int)
100 | */
101 | public DrawColorView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) {
102 | super(context, attrs, defStyleAttr, defStyleRes);
103 | }
104 |
105 | @Override
106 | protected void onDraw(Canvas canvas) {
107 | super.onDraw(canvas);
108 | if(mDrawColorListener != null){
109 | mDrawColorListener.drawColor(canvas);
110 | }
111 | }
112 |
113 | public void draw(DrawColorListener listener){
114 | mDrawColorListener = listener;
115 | postInvalidate();
116 | }
117 |
118 | public interface DrawColorListener{
119 | void drawColor(Canvas canvas);
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/scan/src/main/res/drawable/bg_auto_take_pick.xml:
--------------------------------------------------------------------------------
1 |
2 |