├── .gitignore ├── .idea ├── gradle.xml ├── inspectionProfiles │ └── Project_Default.xml ├── misc.xml ├── modules.xml ├── runConfigurations.xml └── vcs.xml ├── README.md ├── app ├── .gitignore ├── CMakeLists.txt ├── build.gradle ├── proguard-rules.pro └── src │ ├── androidTest │ └── java │ │ └── com │ │ └── rossia │ │ └── life │ │ └── documentscan │ │ └── ExampleInstrumentedTest.java │ ├── main │ ├── AndroidManifest.xml │ ├── cpp │ │ └── native-lib.cpp │ ├── java │ │ └── com │ │ │ └── rossia │ │ │ └── life │ │ │ └── documentscan │ │ │ └── MainActivity.java │ └── res │ │ ├── drawable-v24 │ │ └── ic_launcher_foreground.xml │ │ ├── drawable │ │ └── ic_launcher_background.xml │ │ ├── layout │ │ └── activity_main.xml │ │ ├── mipmap-anydpi-v26 │ │ ├── ic_launcher.xml │ │ └── ic_launcher_round.xml │ │ ├── mipmap-hdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-mdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xxhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xxxhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ └── values │ │ ├── colors.xml │ │ ├── strings.xml │ │ └── styles.xml │ └── test │ └── java │ └── com │ └── rossia │ └── life │ └── documentscan │ └── ExampleUnitTest.java ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── scan ├── .gitignore ├── CMakeLists.txt ├── build.gradle ├── libs │ └── libtensorflow_demo.so ├── proguard-rules.pro └── src │ ├── androidTest │ └── java │ │ └── com │ │ └── rossia │ │ └── life │ │ └── scan │ │ └── ExampleInstrumentedTest.java │ ├── main │ ├── AndroidManifest.xml │ ├── assets │ │ ├── BUILD │ │ ├── coco_labels_list.txt │ │ └── ssd_mobilenet_v1_android_export.pb │ ├── cpp │ │ ├── CMakeLists.txt │ │ ├── __init__.py │ │ ├── imageutils_jni.cc │ │ ├── include │ │ │ ├── Scanner.h │ │ │ └── android_utils.h │ │ ├── native-lib.cpp │ │ ├── rgb2yuv.cc │ │ ├── rgb2yuv.h │ │ ├── yuv2rgb.cc │ │ └── yuv2rgb.h │ ├── java │ │ └── com │ │ │ └── rossia │ │ │ └── life │ │ │ └── scan │ │ │ ├── common │ │ │ └── util │ │ │ │ ├── CameraUtil.java │ │ │ │ ├── ImageUtil.java │ │ │ │ ├── ImageUtils.java │ │ │ │ ├── LogUtil.java │ │ │ │ ├── PhotoEnhanceUtil.java │ │ │ │ └── ScreenUtil.java │ │ │ ├── tensor │ │ │ ├── TensorFlowObjectDetectionAPIModel.java │ │ │ ├── env │ │ │ │ ├── BorderedText.java │ │ │ │ ├── Logger.java │ │ │ │ ├── Size.java │ │ │ │ └── SplitTimer.java │ │ │ ├── interf │ │ │ │ └── Classifier.java │ │ │ ├── tracking │ │ │ │ ├── MultiBoxTracker.java │ │ │ │ └── ObjectTracker.java │ │ │ └── widget │ │ │ │ ├── CropImageView.java │ │ │ │ ├── OverlayView.java │ │ │ │ └── ScanImageView.java │ │ │ ├── transfer │ │ │ ├── SensorMoveControl.java │ │ │ └── TransferSample.java │ │ │ └── ui │ │ │ ├── detector │ │ │ ├── CameraApi2Fragment.java │ │ │ ├── CameraApiFragment.java │ │ │ └── CameraConnectionFragment.java │ │ │ ├── interf │ │ │ └── TakePictureCallback.java │ │ │ └── view │ │ │ └── DrawColorView.java │ └── res │ │ ├── drawable │ │ ├── bg_auto_take_pick.xml │ │ ├── bg_take_picture.xml │ │ └── btn_background.xml │ │ ├── layout │ │ └── fragment_camera_connection.xml │ │ ├── mipmap-xhdpi │ │ ├── camera_icon_shutter.png │ │ ├── camera_icon_shutter_pressed.png │ │ ├── ic_launcher.png │ │ ├── ic_launcher_round.png │ │ ├── scan_flash_off.png │ │ └── scan_flash_on.png │ │ ├── mipmap-xxhdpi │ │ ├── camera_icon_shutter.png │ │ ├── camera_icon_shutter_pressed.png │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ └── values │ │ ├── attrs.xml │ │ ├── colors.xml │ │ ├── dimens.xml │ │ └── strings.xml │ └── test │ └── java │ └── com │ └── rossia │ └── life │ └── scan │ └── ExampleUnitTest.java └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | .gradle 3 | /local.properties 4 | /.idea/workspace.xml 5 | /.idea/libraries 6 | .DS_Store 7 | /build 8 | /captures 9 | .externalNativeBuild 10 | -------------------------------------------------------------------------------- /.idea/gradle.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 18 | 19 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | Android 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 49 | 50 | 51 | 52 | 53 | 1.8 54 | 55 | 60 | 61 | 62 | 63 | 64 | 65 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /.idea/runConfigurations.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 11 | 12 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DocumentScan 2 | 仿有道云笔记App端 文档扫描 功能 3 | 4 | # Usage: 5 | 1、Add it in your root build.gradle at the end of repositories: 6 | 7 | allprojects { 8 | repositories { 9 | ... 10 | maven { url 'https://jitpack.io' } 11 | } 12 | } 13 | 2、Add it in your root build.gradle at the end of repositories: 14 | 15 | dependencies { 16 | compile 'com.github.pdliugithub:DocumentScan:+' 17 | } 18 | 19 | 3、Code: 20 | 21 | //Create instance. 22 | CameraApiFragment cameraApiFragment = CameraApiFragment.newInstance(); 23 | 24 | //add. 25 | getSupportFragmentManager().beginTransaction().add(R.id.container, cameraApiFragment, "api").commit(); 26 | 27 | //take picture callback. 28 | cameraApiFragment.setTakePictureCallback(new TakePictureCallback() { 29 | @Override 30 | public void call(Bitmap bitmap) { 31 | showImg.setImageBitmap(bitmap); 32 | } 33 | }); 34 | 35 | [![](https://jitpack.io/v/pdliugithub/DocumentScan.svg)](https://jitpack.io/#pdliugithub/DocumentScan) 36 | 37 | \\ // 38 | \\ .ooo. // 39 | .@@@@@@@@. 40 | :@@@@@@@@@@@@@: 41 | :@@. '@@@@@' .@@: 42 | @@@@@@@@@@@@@@@@@ 43 | @@@@@@@@@@@@@@@@@ 44 | 45 | :@@ :@@@@@@@@@@@@@@@@@. @@: 46 | @@@ '@@@@@@@@@@@@@@@@@, @@@ 47 | @@@ '@@@@@@@@@@@@@@@@@, @@@ 48 | @@@ '@@@@@@@@@@@@@@@@@, @@@ 49 | @@@ '@@@@@@@@@@@@@@@@@, @@@ 50 | @@@ '@@@@@@@@@@@@@@@@@, @@@ 51 | @@@ '@@@@@@@@@@@@@@@@@, @@@ 52 | @@@@@@@@@@@@@@@@@ 53 | '@@@@@@@@@@@@@@@' 54 | @@@@ @@@@ 55 | @@@@ @@@@ 56 | @@@@ @@@@ 57 | '@@' '@@' 58 | 59 | ___________ 60 | -------------------------------------------------------------------------------- /app/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | -------------------------------------------------------------------------------- /app/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # For more information about using CMake with Android Studio, read the 2 | # documentation: https://d.android.com/studio/projects/add-native-code.html 3 | 4 | # Sets the minimum version of CMake required to build the native library. 5 | 6 | cmake_minimum_required(VERSION 3.4.1) 7 | 8 | # Creates and names a library, sets it as either STATIC 9 | # or SHARED, and provides the relative paths to its source code. 10 | # You can define multiple libraries, and CMake builds them for you. 11 | # Gradle automatically packages shared libraries with your APK. 12 | 13 | add_library( # Sets the name of the library. 14 | native-lib 15 | 16 | # Sets the library as a shared library. 17 | SHARED 18 | 19 | # Provides a relative path to your source file(s). 20 | src/main/cpp/native-lib.cpp ) 21 | 22 | # Searches for a specified prebuilt library and stores the path as a 23 | # variable. Because CMake includes system libraries in the search path by 24 | # default, you only need to specify the name of the public NDK library 25 | # you want to add. CMake verifies that the library exists before 26 | # completing its build. 27 | 28 | find_library( # Sets the name of the path variable. 29 | log-lib 30 | 31 | # Specifies the name of the NDK library that 32 | # you want CMake to locate. 33 | log ) 34 | 35 | # Specifies libraries CMake should link to your target library. You 36 | # can link multiple libraries, such as libraries you define in this 37 | # build script, prebuilt third-party libraries, or system libraries. 38 | 39 | target_link_libraries( # Specifies the target library. 40 | native-lib 41 | 42 | # Links the target library to the log library 43 | # included in the NDK. 44 | ${log-lib} ) -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 26 5 | defaultConfig { 6 | applicationId "com.rossia.life.documentscan" 7 | minSdkVersion 19 8 | targetSdkVersion 26 9 | versionCode 1 10 | versionName "1.0" 11 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" 12 | } 13 | buildTypes { 14 | release { 15 | minifyEnabled false 16 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 17 | } 18 | } 19 | } 20 | 21 | dependencies { 22 | implementation fileTree(include: ['*.jar'], dir: 'libs') 23 | implementation 'com.android.support:appcompat-v7:26.1.0' 24 | implementation 'com.android.support.constraint:constraint-layout:1.0.2' 25 | testImplementation 'junit:junit:4.12' 26 | androidTestImplementation 'com.android.support.test:runner:1.0.1' 27 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1' 28 | implementation project(':scan') 29 | compile 'com.github.chrisbanes:PhotoView:2.0.0' 30 | } 31 | -------------------------------------------------------------------------------- /app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # You can control the set of applied configuration files using the 3 | # proguardFiles setting in build.gradle. 4 | # 5 | # For more details, see 6 | # http://developer.android.com/guide/developing/tools/proguard.html 7 | 8 | # If your project uses WebView with JS, uncomment the following 9 | # and specify the fully qualified class name to the JavaScript interface 10 | # class: 11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 12 | # public *; 13 | #} 14 | 15 | # Uncomment this to preserve the line number information for 16 | # debugging stack traces. 17 | #-keepattributes SourceFile,LineNumberTable 18 | 19 | # If you keep the line number information, uncomment this to 20 | # hide the original source file name. 21 | #-renamesourcefileattribute SourceFile 22 | -------------------------------------------------------------------------------- /app/src/androidTest/java/com/rossia/life/documentscan/ExampleInstrumentedTest.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.documentscan; 2 | 3 | import android.content.Context; 4 | import android.support.test.InstrumentationRegistry; 5 | import android.support.test.runner.AndroidJUnit4; 6 | 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | 10 | import static org.junit.Assert.*; 11 | 12 | /** 13 | * Instrumented test, which will execute on an Android device. 14 | * 15 | * @see Testing documentation 16 | */ 17 | @RunWith(AndroidJUnit4.class) 18 | public class ExampleInstrumentedTest { 19 | @Test 20 | public void useAppContext() throws Exception { 21 | // Context of the app under test. 22 | Context appContext = InstrumentationRegistry.getTargetContext(); 23 | 24 | assertEquals("com.rossia.life.documentscan", appContext.getPackageName()); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 17 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /app/src/main/cpp/native-lib.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | extern "C" 5 | JNIEXPORT jstring 6 | 7 | JNICALL 8 | Java_com_rossia_life_documentscan_MainActivity_stringFromJNI( 9 | JNIEnv *env, 10 | jobject /* this */) { 11 | std::string hello = "Hello from C++"; 12 | return env->NewStringUTF(hello.c_str()); 13 | } 14 | -------------------------------------------------------------------------------- /app/src/main/java/com/rossia/life/documentscan/MainActivity.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.documentscan; 2 | 3 | import android.graphics.Bitmap; 4 | import android.os.Bundle; 5 | import android.os.Handler; 6 | import android.support.annotation.Nullable; 7 | import android.support.v7.app.AppCompatActivity; 8 | import android.view.View; 9 | import android.widget.ImageView; 10 | import android.widget.TextView; 11 | 12 | import com.github.chrisbanes.photoview.PhotoView; 13 | import com.rossia.life.scan.common.util.PhotoEnhanceUtil; 14 | import com.rossia.life.scan.ui.detector.CameraApiFragment; 15 | import com.rossia.life.scan.ui.interf.TakePictureCallback; 16 | 17 | /** 18 | * @author pd_liu 2017/1/10. 19 | *

20 | * 暂时,自动识别拍照、裁剪。 21 | *

22 | */ 23 | public class MainActivity extends AppCompatActivity { 24 | 25 | private PhotoView mPhotoView; 26 | 27 | private Bitmap mTakePictureBitmap; 28 | 29 | private CameraApiFragment mCameraApiFragment; 30 | 31 | /** 32 | * 对图片进行、对比度的加强 33 | */ 34 | private PhotoEnhanceUtil mPhotoEnhance; 35 | 36 | // Used to load the 'native-lib' library on application startup. 37 | static { 38 | System.loadLibrary("native-lib"); 39 | } 40 | 41 | @Override 42 | protected void onCreate(Bundle savedInstanceState) { 43 | super.onCreate(savedInstanceState); 44 | setContentView(R.layout.activity_main); 45 | 46 | // Example of a call to a native method 47 | TextView tv = (TextView) findViewById(R.id.sample_text); 48 | tv.setText(stringFromJNI()); 49 | } 50 | 51 | @Override 52 | protected void onPostCreate(@Nullable Bundle savedInstanceState) { 53 | super.onPostCreate(savedInstanceState); 54 | 55 | final ImageView showImg = findViewById(R.id.show_img); 56 | mPhotoView = findViewById(R.id.photo_view); 57 | 58 | mCameraApiFragment = CameraApiFragment.newInstance(); 59 | 60 | getSupportFragmentManager().beginTransaction().add(R.id.container, mCameraApiFragment, "api").commit(); 61 | 62 | mCameraApiFragment.setTakePictureCallback(new TakePictureCallback() { 63 | @Override 64 | public void call(Bitmap bitmap) { 65 | mTakePictureBitmap = bitmap; 66 | showImg.setImageBitmap(bitmap); 67 | } 68 | }); 69 | 70 | showImg.setOnClickListener(mOnClick); 71 | showImg.setOnLongClickListener(new View.OnLongClickListener() { 72 | @Override 73 | public boolean onLongClick(View v) { 74 | mPhotoView.setImageBitmap(mTakePictureBitmap); 75 | mPhotoView.setVisibility(View.VISIBLE); 76 | return false; 77 | } 78 | }); 79 | 80 | new Handler().post(new Runnable() { 81 | @Override 82 | public void run() { 83 | // cameraApiFragment.setTopViewMarginTop(100); 84 | } 85 | }); 86 | 87 | } 88 | 89 | @Override 90 | protected void onResume() { 91 | super.onResume(); 92 | 93 | mCameraApiFragment.startDetect(); 94 | } 95 | 96 | @Override 97 | protected void onStop() { 98 | super.onStop(); 99 | mCameraApiFragment.stopDetect(); 100 | } 101 | 102 | private View.OnClickListener mOnClick = new View.OnClickListener() { 103 | @Override 104 | public void onClick(View v) { 105 | 106 | if (v.getId() == R.id.show_img) { 107 | if(mTakePictureBitmap == null){ 108 | return; 109 | } 110 | mPhotoEnhance = new PhotoEnhanceUtil(mTakePictureBitmap); 111 | 112 | mPhotoEnhance.setContrast(200); 113 | Bitmap source = mPhotoEnhance.handleImage(mPhotoEnhance.Enhance_Contrast); 114 | mPhotoView.setImageBitmap(source); 115 | mPhotoView.setVisibility(View.VISIBLE); 116 | return; 117 | } 118 | } 119 | }; 120 | 121 | /** 122 | * A native method that is implemented by the 'native-lib' native library, 123 | * which is packaged with this application. 124 | */ 125 | public native String stringFromJNI(); 126 | 127 | @Override 128 | public void onBackPressed() { 129 | if (mPhotoView.getVisibility() == View.VISIBLE) { 130 | mPhotoView.setVisibility(View.GONE); 131 | } else { 132 | super.onBackPressed(); 133 | } 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /app/src/main/res/drawable-v24/ic_launcher_foreground.xml: -------------------------------------------------------------------------------- 1 | 7 | 12 | 13 | 19 | 22 | 25 | 26 | 27 | 28 | 34 | 35 | -------------------------------------------------------------------------------- /app/src/main/res/drawable/ic_launcher_background.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 10 | 15 | 20 | 25 | 30 | 35 | 40 | 45 | 50 | 55 | 60 | 65 | 70 | 75 | 80 | 85 | 90 | 95 | 100 | 105 | 110 | 115 | 120 | 125 | 130 | 135 | 140 | 145 | 150 | 155 | 160 | 165 | 170 | 171 | -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 | 19 | 20 | 24 | 25 | 31 | 32 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /app/src/main/res/mipmap-hdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-hdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-hdpi/ic_launcher_round.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-hdpi/ic_launcher_round.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-mdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-mdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-mdpi/ic_launcher_round.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-mdpi/ic_launcher_round.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-xhdpi/ic_launcher_round.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-xxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-xxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-xxxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png -------------------------------------------------------------------------------- /app/src/main/res/values/colors.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | #3F51B5 4 | #303F9F 5 | #FF4081 6 | 7 | -------------------------------------------------------------------------------- /app/src/main/res/values/strings.xml: -------------------------------------------------------------------------------- 1 | 2 | SCM文档识别 3 | 4 | -------------------------------------------------------------------------------- /app/src/main/res/values/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 10 | 11 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /app/src/test/java/com/rossia/life/documentscan/ExampleUnitTest.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.documentscan; 2 | 3 | import org.junit.Test; 4 | 5 | import static org.junit.Assert.*; 6 | 7 | /** 8 | * Example local unit test, which will execute on the development machine (host). 9 | * 10 | * @see Testing documentation 11 | */ 12 | public class ExampleUnitTest { 13 | @Test 14 | public void addition_isCorrect() throws Exception { 15 | assertEquals(4, 2 + 2); 16 | } 17 | } -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | // Top-level build file where you can add configuration options common to all sub-projects/modules. 2 | 3 | buildscript { 4 | 5 | repositories { 6 | google() 7 | jcenter() 8 | } 9 | dependencies { 10 | classpath 'com.android.tools.build:gradle:3.0.1' 11 | 12 | 13 | // NOTE: Do not place your application dependencies here; they belong 14 | // in the individual module build.gradle files 15 | } 16 | } 17 | 18 | allprojects { 19 | repositories { 20 | google() 21 | jcenter() 22 | maven { url "https://jitpack.io" } 23 | } 24 | } 25 | 26 | task clean(type: Delete) { 27 | delete rootProject.buildDir 28 | } 29 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | # Project-wide Gradle settings. 2 | 3 | # IDE (e.g. Android Studio) users: 4 | # Gradle settings configured through the IDE *will override* 5 | # any settings specified in this file. 6 | 7 | # For more details on how to configure your build environment visit 8 | # http://www.gradle.org/docs/current/userguide/build_environment.html 9 | 10 | # Specifies the JVM arguments used for the daemon process. 11 | # The setting is particularly useful for tweaking memory settings. 12 | org.gradle.jvmargs=-Xmx1536m 13 | 14 | # When configured, Gradle will run in incubating parallel mode. 15 | # This option should only be used with decoupled projects. More details, visit 16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects 17 | # org.gradle.parallel=true 18 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Thu Dec 28 17:50:42 CST 2017 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.1-all.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # Attempt to set APP_HOME 46 | # Resolve links: $0 may be a link 47 | PRG="$0" 48 | # Need this for relative symlinks. 49 | while [ -h "$PRG" ] ; do 50 | ls=`ls -ld "$PRG"` 51 | link=`expr "$ls" : '.*-> \(.*\)$'` 52 | if expr "$link" : '/.*' > /dev/null; then 53 | PRG="$link" 54 | else 55 | PRG=`dirname "$PRG"`"/$link" 56 | fi 57 | done 58 | SAVED="`pwd`" 59 | cd "`dirname \"$PRG\"`/" >/dev/null 60 | APP_HOME="`pwd -P`" 61 | cd "$SAVED" >/dev/null 62 | 63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 64 | 65 | # Determine the Java command to use to start the JVM. 66 | if [ -n "$JAVA_HOME" ] ; then 67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 68 | # IBM's JDK on AIX uses strange locations for the executables 69 | JAVACMD="$JAVA_HOME/jre/sh/java" 70 | else 71 | JAVACMD="$JAVA_HOME/bin/java" 72 | fi 73 | if [ ! -x "$JAVACMD" ] ; then 74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 75 | 76 | Please set the JAVA_HOME variable in your environment to match the 77 | location of your Java installation." 78 | fi 79 | else 80 | JAVACMD="java" 81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 82 | 83 | Please set the JAVA_HOME variable in your environment to match the 84 | location of your Java installation." 85 | fi 86 | 87 | # Increase the maximum file descriptors if we can. 88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 89 | MAX_FD_LIMIT=`ulimit -H -n` 90 | if [ $? -eq 0 ] ; then 91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 92 | MAX_FD="$MAX_FD_LIMIT" 93 | fi 94 | ulimit -n $MAX_FD 95 | if [ $? -ne 0 ] ; then 96 | warn "Could not set maximum file descriptor limit: $MAX_FD" 97 | fi 98 | else 99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 100 | fi 101 | fi 102 | 103 | # For Darwin, add options to specify how the application appears in the dock 104 | if $darwin; then 105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 106 | fi 107 | 108 | # For Cygwin, switch paths to Windows format before running java 109 | if $cygwin ; then 110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 112 | JAVACMD=`cygpath --unix "$JAVACMD"` 113 | 114 | # We build the pattern for arguments to be converted via cygpath 115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 116 | SEP="" 117 | for dir in $ROOTDIRSRAW ; do 118 | ROOTDIRS="$ROOTDIRS$SEP$dir" 119 | SEP="|" 120 | done 121 | OURCYGPATTERN="(^($ROOTDIRS))" 122 | # Add a user-defined pattern to the cygpath arguments 123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 125 | fi 126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 127 | i=0 128 | for arg in "$@" ; do 129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 131 | 132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 134 | else 135 | eval `echo args$i`="\"$arg\"" 136 | fi 137 | i=$((i+1)) 138 | done 139 | case $i in 140 | (0) set -- ;; 141 | (1) set -- "$args0" ;; 142 | (2) set -- "$args0" "$args1" ;; 143 | (3) set -- "$args0" "$args1" "$args2" ;; 144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 150 | esac 151 | fi 152 | 153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 154 | function splitJvmOpts() { 155 | JVM_OPTS=("$@") 156 | } 157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 159 | 160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 161 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 12 | set DEFAULT_JVM_OPTS= 13 | 14 | set DIRNAME=%~dp0 15 | if "%DIRNAME%" == "" set DIRNAME=. 16 | set APP_BASE_NAME=%~n0 17 | set APP_HOME=%DIRNAME% 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windowz variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | if "%@eval[2+2]" == "4" goto 4NT_args 53 | 54 | :win9xME_args 55 | @rem Slurp the command line arguments. 56 | set CMD_LINE_ARGS= 57 | set _SKIP=2 58 | 59 | :win9xME_args_slurp 60 | if "x%~1" == "x" goto execute 61 | 62 | set CMD_LINE_ARGS=%* 63 | goto execute 64 | 65 | :4NT_args 66 | @rem Get arguments from the 4NT Shell from JP Software 67 | set CMD_LINE_ARGS=%$ 68 | 69 | :execute 70 | @rem Setup the command line 71 | 72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if "%ERRORLEVEL%"=="0" goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 85 | exit /b 1 86 | 87 | :mainEnd 88 | if "%OS%"=="Windows_NT" endlocal 89 | 90 | :omega 91 | -------------------------------------------------------------------------------- /scan/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | -------------------------------------------------------------------------------- /scan/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # For more information about using CMake with Android Studio, read the 2 | # documentation: https://d.android.com/studio/projects/add-native-code.html 3 | 4 | # Sets the minimum version of CMake required to build the native library. 5 | 6 | cmake_minimum_required(VERSION 3.4.1) 7 | 8 | # Creates and names a library, sets it as either STATIC 9 | # or SHARED, and provides the relative paths to its source code. 10 | # You can define multiple libraries, and CMake builds them for you. 11 | # Gradle automatically packages shared libraries with your APK. 12 | 13 | add_library( # Sets the name of the library. 14 | native-lib 15 | 16 | # Sets the library as a shared library. 17 | SHARED 18 | 19 | # Provides a relative path to your source file(s). 20 | src/main/cpp/native-lib.cpp ) 21 | 22 | # Searches for a specified prebuilt library and stores the path as a 23 | # variable. Because CMake includes system libraries in the search path by 24 | # default, you only need to specify the name of the public NDK library 25 | # you want to add. CMake verifies that the library exists before 26 | # completing its build. 27 | 28 | find_library( # Sets the name of the path variable. 29 | log-lib 30 | 31 | # Specifies the name of the NDK library that 32 | # you want CMake to locate. 33 | log ) 34 | 35 | # Specifies libraries CMake should link to your target library. You 36 | # can link multiple libraries, such as libraries you define in this 37 | # build script, prebuilt third-party libraries, or system libraries. 38 | 39 | target_link_libraries( # Specifies the target library. 40 | native-lib 41 | 42 | # Links the target library to the log library 43 | # included in the NDK. 44 | ${log-lib} ) -------------------------------------------------------------------------------- /scan/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.library' 2 | 3 | android { 4 | compileSdkVersion 26 5 | 6 | defaultConfig { 7 | minSdkVersion 19 8 | targetSdkVersion 26 9 | versionCode 1 10 | versionName "1.0" 11 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" 12 | externalNativeBuild { 13 | cmake { 14 | cppFlags "-frtti -fexceptions" 15 | } 16 | } 17 | } 18 | 19 | buildTypes { 20 | release { 21 | minifyEnabled false 22 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 23 | } 24 | } 25 | 26 | externalNativeBuild { 27 | cmake { 28 | path "CMakeLists.txt" 29 | } 30 | } 31 | 32 | } 33 | 34 | dependencies { 35 | implementation fileTree(include: ['*.jar'], dir: 'libs') 36 | implementation 'com.android.support:appcompat-v7:26.1.0' 37 | implementation 'com.android.support:support-v4:26.1.0' 38 | testImplementation 'junit:junit:4.12' 39 | androidTestImplementation 'com.android.support.test:runner:1.0.1' 40 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1' 41 | compile 'org.tensorflow:tensorflow-android:+' 42 | } 43 | -------------------------------------------------------------------------------- /scan/libs/libtensorflow_demo.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/libs/libtensorflow_demo.so -------------------------------------------------------------------------------- /scan/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # You can control the set of applied configuration files using the 3 | # proguardFiles setting in build.gradle. 4 | # 5 | # For more details, see 6 | # http://developer.android.com/guide/developing/tools/proguard.html 7 | 8 | # If your project uses WebView with JS, uncomment the following 9 | # and specify the fully qualified class name to the JavaScript interface 10 | # class: 11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 12 | # public *; 13 | #} 14 | 15 | # Uncomment this to preserve the line number information for 16 | # debugging stack traces. 17 | #-keepattributes SourceFile,LineNumberTable 18 | 19 | # If you keep the line number information, uncomment this to 20 | # hide the original source file name. 21 | #-renamesourcefileattribute SourceFile 22 | -------------------------------------------------------------------------------- /scan/src/androidTest/java/com/rossia/life/scan/ExampleInstrumentedTest.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan; 2 | 3 | import android.content.Context; 4 | import android.support.test.InstrumentationRegistry; 5 | import android.support.test.runner.AndroidJUnit4; 6 | 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | 10 | import static org.junit.Assert.*; 11 | 12 | /** 13 | * Instrumented test, which will execute on an Android device. 14 | * 15 | * @see Testing documentation 16 | */ 17 | @RunWith(AndroidJUnit4.class) 18 | public class ExampleInstrumentedTest { 19 | @Test 20 | public void useAppContext() throws Exception { 21 | // Context of the app under test. 22 | Context appContext = InstrumentationRegistry.getTargetContext(); 23 | 24 | assertEquals("com.rossia.life.scan.test", appContext.getPackageName()); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /scan/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 3 | -------------------------------------------------------------------------------- /scan/src/main/assets/BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) # Apache 2.0 4 | 5 | # It is necessary to use this filegroup rather than globbing the files in this 6 | # folder directly the examples/android:tensorflow_demo target due to the fact 7 | # that assets_dir is necessarily set to "" there (to allow using other 8 | # arbitrary targets as assets). 9 | filegroup( 10 | name = "asset_files", 11 | srcs = glob( 12 | ["**/*"], 13 | exclude = ["BUILD"], 14 | ), 15 | ) 16 | -------------------------------------------------------------------------------- /scan/src/main/assets/coco_labels_list.txt: -------------------------------------------------------------------------------- 1 | ??? 2 | person 3 | bicycle 4 | car 5 | motorcycle 6 | airplane 7 | bus 8 | train 9 | truck 10 | boat 11 | traffic light 12 | fire hydrant 13 | ??? 14 | stop sign 15 | parking meter 16 | bench 17 | bird 18 | cat 19 | dog 20 | horse 21 | sheep 22 | cow 23 | elephant 24 | bear 25 | zebra 26 | giraffe 27 | ??? 28 | backpack 29 | umbrella 30 | ??? 31 | ??? 32 | handbag 33 | tie 34 | suitcase 35 | frisbee 36 | skis 37 | snowboard 38 | sports ball 39 | kite 40 | baseball bat 41 | baseball glove 42 | skateboard 43 | surfboard 44 | tennis racket 45 | bottle 46 | ??? 47 | wine glass 48 | cup 49 | fork 50 | knife 51 | spoon 52 | bowl 53 | banana 54 | apple 55 | sandwich 56 | orange 57 | broccoli 58 | carrot 59 | hot dog 60 | pizza 61 | donut 62 | cake 63 | chair 64 | couch 65 | potted plant 66 | bed 67 | ??? 68 | dining table 69 | ??? 70 | ??? 71 | toilet 72 | ??? 73 | tv 74 | laptop 75 | mouse 76 | remote 77 | keyboard 78 | cell phone 79 | microwave 80 | oven 81 | toaster 82 | sink 83 | refrigerator 84 | ??? 85 | book 86 | clock 87 | vase 88 | scissors 89 | teddy bear 90 | hair drier 91 | toothbrush 92 | -------------------------------------------------------------------------------- /scan/src/main/assets/ssd_mobilenet_v1_android_export.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/assets/ssd_mobilenet_v1_android_export.pb -------------------------------------------------------------------------------- /scan/src/main/cpp/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (C) 2016 The Android Open Source Project 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | project(TENSORFLOW_DEMO) 18 | cmake_minimum_required(VERSION 3.4.1) 19 | 20 | set(CMAKE_VERBOSE_MAKEFILE on) 21 | 22 | get_filename_component(TF_SRC_ROOT ${CMAKE_SOURCE_DIR}/../../../.. ABSOLUTE) 23 | get_filename_component(SAMPLE_SRC_DIR ${CMAKE_SOURCE_DIR}/.. ABSOLUTE) 24 | 25 | if (ANDROID_ABI MATCHES "^armeabi-v7a$") 26 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mfloat-abi=softfp -mfpu=neon") 27 | elseif(ANDROID_ABI MATCHES "^arm64-v8a") 28 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -ftree-vectorize") 29 | endif() 30 | 31 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DSTANDALONE_DEMO_LIB \ 32 | -std=c++11 -fno-exceptions -fno-rtti -O2 -Wno-narrowing \ 33 | -fPIE") 34 | set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} \ 35 | -Wl,--allow-multiple-definition \ 36 | -Wl,--whole-archive -fPIE -v") 37 | 38 | file(GLOB_RECURSE tensorflow_demo_sources ${SAMPLE_SRC_DIR}/jni/*.*) 39 | add_library(tensorflow_demo SHARED 40 | ${tensorflow_demo_sources}) 41 | target_include_directories(tensorflow_demo PRIVATE 42 | ${TF_SRC_ROOT} 43 | ${CMAKE_SOURCE_DIR}) 44 | 45 | target_link_libraries(tensorflow_demo 46 | android 47 | log 48 | jnigraphics 49 | m 50 | atomic 51 | z) 52 | -------------------------------------------------------------------------------- /scan/src/main/cpp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/cpp/__init__.py -------------------------------------------------------------------------------- /scan/src/main/cpp/imageutils_jni.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | // This file binds the native image utility code to the Java class 17 | // which exposes them. 18 | 19 | #include 20 | #include 21 | #include 22 | 23 | #include "tensorflow/examples/android/jni/rgb2yuv.h" 24 | #include "tensorflow/examples/android/jni/yuv2rgb.h" 25 | 26 | #define IMAGEUTILS_METHOD(METHOD_NAME) \ 27 | Java_org_tensorflow_demo_env_ImageUtils_##METHOD_NAME // NOLINT 28 | 29 | #ifdef __cplusplus 30 | extern "C" { 31 | #endif 32 | 33 | JNIEXPORT void JNICALL 34 | IMAGEUTILS_METHOD(convertYUV420SPToARGB8888)( 35 | JNIEnv* env, jclass clazz, jbyteArray input, jintArray output, 36 | jint width, jint height, jboolean halfSize); 37 | 38 | JNIEXPORT void JNICALL IMAGEUTILS_METHOD(convertYUV420ToARGB8888)( 39 | JNIEnv* env, jclass clazz, jbyteArray y, jbyteArray u, jbyteArray v, 40 | jintArray output, jint width, jint height, jint y_row_stride, 41 | jint uv_row_stride, jint uv_pixel_stride, jboolean halfSize); 42 | 43 | JNIEXPORT void JNICALL IMAGEUTILS_METHOD(convertYUV420SPToRGB565)( 44 | JNIEnv* env, jclass clazz, jbyteArray input, jbyteArray output, jint width, 45 | jint height); 46 | 47 | JNIEXPORT void JNICALL 48 | IMAGEUTILS_METHOD(convertARGB8888ToYUV420SP)( 49 | JNIEnv* env, jclass clazz, jintArray input, jbyteArray output, 50 | jint width, jint height); 51 | 52 | JNIEXPORT void JNICALL 53 | IMAGEUTILS_METHOD(convertRGB565ToYUV420SP)( 54 | JNIEnv* env, jclass clazz, jbyteArray input, jbyteArray output, 55 | jint width, jint height); 56 | 57 | #ifdef __cplusplus 58 | } 59 | #endif 60 | 61 | JNIEXPORT void JNICALL 62 | IMAGEUTILS_METHOD(convertYUV420SPToARGB8888)( 63 | JNIEnv* env, jclass clazz, jbyteArray input, jintArray output, 64 | jint width, jint height, jboolean halfSize) { 65 | jboolean inputCopy = JNI_FALSE; 66 | jbyte* const i = env->GetByteArrayElements(input, &inputCopy); 67 | 68 | jboolean outputCopy = JNI_FALSE; 69 | jint* const o = env->GetIntArrayElements(output, &outputCopy); 70 | 71 | if (halfSize) { 72 | ConvertYUV420SPToARGB8888HalfSize(reinterpret_cast(i), 73 | reinterpret_cast(o), width, 74 | height); 75 | } else { 76 | ConvertYUV420SPToARGB8888(reinterpret_cast(i), 77 | reinterpret_cast(i) + width * height, 78 | reinterpret_cast(o), width, height); 79 | } 80 | 81 | env->ReleaseByteArrayElements(input, i, JNI_ABORT); 82 | env->ReleaseIntArrayElements(output, o, 0); 83 | } 84 | 85 | JNIEXPORT void JNICALL IMAGEUTILS_METHOD(convertYUV420ToARGB8888)( 86 | JNIEnv* env, jclass clazz, jbyteArray y, jbyteArray u, jbyteArray v, 87 | jintArray output, jint width, jint height, jint y_row_stride, 88 | jint uv_row_stride, jint uv_pixel_stride, jboolean halfSize) { 89 | jboolean inputCopy = JNI_FALSE; 90 | jbyte* const y_buff = env->GetByteArrayElements(y, &inputCopy); 91 | jboolean outputCopy = JNI_FALSE; 92 | jint* const o = env->GetIntArrayElements(output, &outputCopy); 93 | 94 | if (halfSize) { 95 | ConvertYUV420SPToARGB8888HalfSize(reinterpret_cast(y_buff), 96 | reinterpret_cast(o), width, 97 | height); 98 | } else { 99 | jbyte* const u_buff = env->GetByteArrayElements(u, &inputCopy); 100 | jbyte* const v_buff = env->GetByteArrayElements(v, &inputCopy); 101 | 102 | ConvertYUV420ToARGB8888( 103 | reinterpret_cast(y_buff), reinterpret_cast(u_buff), 104 | reinterpret_cast(v_buff), reinterpret_cast(o), 105 | width, height, y_row_stride, uv_row_stride, uv_pixel_stride); 106 | 107 | env->ReleaseByteArrayElements(u, u_buff, JNI_ABORT); 108 | env->ReleaseByteArrayElements(v, v_buff, JNI_ABORT); 109 | } 110 | 111 | env->ReleaseByteArrayElements(y, y_buff, JNI_ABORT); 112 | env->ReleaseIntArrayElements(output, o, 0); 113 | } 114 | 115 | JNIEXPORT void JNICALL IMAGEUTILS_METHOD(convertYUV420SPToRGB565)( 116 | JNIEnv* env, jclass clazz, jbyteArray input, jbyteArray output, jint width, 117 | jint height) { 118 | jboolean inputCopy = JNI_FALSE; 119 | jbyte* const i = env->GetByteArrayElements(input, &inputCopy); 120 | 121 | jboolean outputCopy = JNI_FALSE; 122 | jbyte* const o = env->GetByteArrayElements(output, &outputCopy); 123 | 124 | ConvertYUV420SPToRGB565(reinterpret_cast(i), 125 | reinterpret_cast(o), width, height); 126 | 127 | env->ReleaseByteArrayElements(input, i, JNI_ABORT); 128 | env->ReleaseByteArrayElements(output, o, 0); 129 | } 130 | 131 | JNIEXPORT void JNICALL 132 | IMAGEUTILS_METHOD(convertARGB8888ToYUV420SP)( 133 | JNIEnv* env, jclass clazz, jintArray input, jbyteArray output, 134 | jint width, jint height) { 135 | jboolean inputCopy = JNI_FALSE; 136 | jint* const i = env->GetIntArrayElements(input, &inputCopy); 137 | 138 | jboolean outputCopy = JNI_FALSE; 139 | jbyte* const o = env->GetByteArrayElements(output, &outputCopy); 140 | 141 | ConvertARGB8888ToYUV420SP(reinterpret_cast(i), 142 | reinterpret_cast(o), width, height); 143 | 144 | env->ReleaseIntArrayElements(input, i, JNI_ABORT); 145 | env->ReleaseByteArrayElements(output, o, 0); 146 | } 147 | 148 | JNIEXPORT void JNICALL 149 | IMAGEUTILS_METHOD(convertRGB565ToYUV420SP)( 150 | JNIEnv* env, jclass clazz, jbyteArray input, jbyteArray output, 151 | jint width, jint height) { 152 | jboolean inputCopy = JNI_FALSE; 153 | jbyte* const i = env->GetByteArrayElements(input, &inputCopy); 154 | 155 | jboolean outputCopy = JNI_FALSE; 156 | jbyte* const o = env->GetByteArrayElements(output, &outputCopy); 157 | 158 | ConvertRGB565ToYUV420SP(reinterpret_cast(i), 159 | reinterpret_cast(o), width, height); 160 | 161 | env->ReleaseByteArrayElements(input, i, JNI_ABORT); 162 | env->ReleaseByteArrayElements(output, o, 0); 163 | } 164 | -------------------------------------------------------------------------------- /scan/src/main/cpp/include/Scanner.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by qiulinmin on 8/1/17. 3 | // 4 | 5 | #ifndef CROPPER_DOC_SCANNER_H 6 | #define CROPPER_DOC_SCANNER_H 7 | 8 | #include 9 | 10 | namespace scanner{ 11 | 12 | class Scanner { 13 | public: 14 | int resizeThreshold = 500; 15 | 16 | Scanner(cv::Mat& bitmap); 17 | virtual ~Scanner(); 18 | std::vector scanPoint(); 19 | private: 20 | cv::Mat srcBitmap; 21 | float resizeScale = 1.0f; 22 | 23 | cv::Mat resizeImage(); 24 | 25 | cv::Mat preprocessImage(cv::Mat& image); 26 | 27 | std::vector selectPoints(std::vector points, int selectTimes); 28 | 29 | std::vector sortPointClockwise(std::vector vector); 30 | 31 | long long pointSideLine(cv::Point& lineP1, cv::Point& lineP2, cv::Point& point); 32 | }; 33 | 34 | } 35 | 36 | #endif //CROPPER_DOC_SCANNER_H 37 | -------------------------------------------------------------------------------- /scan/src/main/cpp/include/android_utils.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by qiulinmin on 17-5-15. 3 | // 4 | 5 | #ifndef IMG_ANDROID_UTILS_H 6 | #define IMG_ANDROID_UTILS_H 7 | 8 | #include 9 | #include 10 | 11 | using namespace cv; 12 | 13 | void bitmap_to_mat(JNIEnv *env, jobject &srcBitmap, Mat &srcMat); 14 | 15 | void mat_to_bitmap(JNIEnv *env, Mat &srcMat, jobject &dstBitmap); 16 | 17 | #endif //IMG_ANDROID_UTILS_H 18 | -------------------------------------------------------------------------------- /scan/src/main/cpp/native-lib.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | extern "C" 5 | JNIEXPORT jstring 6 | 7 | JNICALL 8 | Java_com_rossia_life_documentscan_MainActivity_stringFromJNI( 9 | JNIEnv *env, 10 | jobject /* this */) { 11 | std::string hello = "Hello from C++"; 12 | return env->NewStringUTF(hello.c_str()); 13 | } 14 | extern "C" 15 | JNIEXPORT jint JNICALL 16 | Java_com_rossia_life_scan_transfer_TransferSample_jni_1string(JNIEnv *env, jclass type, jint input, 17 | jstring out_) { 18 | const char *out = env->GetStringUTFChars(out_, 0); 19 | 20 | // TODO 21 | 22 | env->ReleaseStringUTFChars(out_, out); 23 | }extern "C" 24 | JNIEXPORT void JNICALL 25 | Java_com_rossia_life_scan_transfer_TransferSample_jni_12(JNIEnv *env, jclass type, jint input, 26 | jobject output) { 27 | 28 | // TODO 29 | 30 | } -------------------------------------------------------------------------------- /scan/src/main/cpp/rgb2yuv.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | // These utility functions allow for the conversion of RGB data to YUV data. 17 | 18 | #include "tensorflow/examples/android/jni/rgb2yuv.h" 19 | 20 | static inline void WriteYUV(const int x, const int y, const int width, 21 | const int r8, const int g8, const int b8, 22 | uint8_t* const pY, uint8_t* const pUV) { 23 | // Using formulas from http://msdn.microsoft.com/en-us/library/ms893078 24 | *pY = ((66 * r8 + 129 * g8 + 25 * b8 + 128) >> 8) + 16; 25 | 26 | // Odd widths get rounded up so that UV blocks on the side don't get cut off. 27 | const int blocks_per_row = (width + 1) / 2; 28 | 29 | // 2 bytes per UV block 30 | const int offset = 2 * (((y / 2) * blocks_per_row + (x / 2))); 31 | 32 | // U and V are the average values of all 4 pixels in the block. 33 | if (!(x & 1) && !(y & 1)) { 34 | // Explicitly clear the block if this is the first pixel in it. 35 | pUV[offset] = 0; 36 | pUV[offset + 1] = 0; 37 | } 38 | 39 | // V (with divide by 4 factored in) 40 | #ifdef __APPLE__ 41 | const int u_offset = 0; 42 | const int v_offset = 1; 43 | #else 44 | const int u_offset = 1; 45 | const int v_offset = 0; 46 | #endif 47 | pUV[offset + v_offset] += ((112 * r8 - 94 * g8 - 18 * b8 + 128) >> 10) + 32; 48 | 49 | // U (with divide by 4 factored in) 50 | pUV[offset + u_offset] += ((-38 * r8 - 74 * g8 + 112 * b8 + 128) >> 10) + 32; 51 | } 52 | 53 | void ConvertARGB8888ToYUV420SP(const uint32_t* const input, 54 | uint8_t* const output, int width, int height) { 55 | uint8_t* pY = output; 56 | uint8_t* pUV = output + (width * height); 57 | const uint32_t* in = input; 58 | 59 | for (int y = 0; y < height; y++) { 60 | for (int x = 0; x < width; x++) { 61 | const uint32_t rgb = *in++; 62 | #ifdef __APPLE__ 63 | const int nB = (rgb >> 8) & 0xFF; 64 | const int nG = (rgb >> 16) & 0xFF; 65 | const int nR = (rgb >> 24) & 0xFF; 66 | #else 67 | const int nR = (rgb >> 16) & 0xFF; 68 | const int nG = (rgb >> 8) & 0xFF; 69 | const int nB = rgb & 0xFF; 70 | #endif 71 | WriteYUV(x, y, width, nR, nG, nB, pY++, pUV); 72 | } 73 | } 74 | } 75 | 76 | void ConvertRGB565ToYUV420SP(const uint16_t* const input, uint8_t* const output, 77 | const int width, const int height) { 78 | uint8_t* pY = output; 79 | uint8_t* pUV = output + (width * height); 80 | const uint16_t* in = input; 81 | 82 | for (int y = 0; y < height; y++) { 83 | for (int x = 0; x < width; x++) { 84 | const uint32_t rgb = *in++; 85 | 86 | const int r5 = ((rgb >> 11) & 0x1F); 87 | const int g6 = ((rgb >> 5) & 0x3F); 88 | const int b5 = (rgb & 0x1F); 89 | 90 | // Shift left, then fill in the empty low bits with a copy of the high 91 | // bits so we can stretch across the entire 0 - 255 range. 92 | const int r8 = r5 << 3 | r5 >> 2; 93 | const int g8 = g6 << 2 | g6 >> 4; 94 | const int b8 = b5 << 3 | b5 >> 2; 95 | 96 | WriteYUV(x, y, width, r8, g8, b8, pY++, pUV); 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /scan/src/main/cpp/rgb2yuv.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_ 17 | #define ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_ 18 | 19 | #include 20 | 21 | #ifdef __cplusplus 22 | extern "C" { 23 | #endif 24 | 25 | void ConvertARGB8888ToYUV420SP(const uint32_t* const input, 26 | uint8_t* const output, int width, int height); 27 | 28 | void ConvertRGB565ToYUV420SP(const uint16_t* const input, uint8_t* const output, 29 | const int width, const int height); 30 | 31 | #ifdef __cplusplus 32 | } 33 | #endif 34 | 35 | #endif // ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_ 36 | -------------------------------------------------------------------------------- /scan/src/main/cpp/yuv2rgb.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | // This is a collection of routines which converts various YUV image formats 17 | // to ARGB. 18 | 19 | #include "tensorflow/examples/android/jni/yuv2rgb.h" 20 | 21 | #ifndef MAX 22 | #define MAX(a, b) ({__typeof__(a) _a = (a); __typeof__(b) _b = (b); _a > _b ? _a : _b; }) 23 | #define MIN(a, b) ({__typeof__(a) _a = (a); __typeof__(b) _b = (b); _a < _b ? _a : _b; }) 24 | #endif 25 | 26 | // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges 27 | // are normalized to eight bits. 28 | static const int kMaxChannelValue = 262143; 29 | 30 | static inline uint32_t YUV2RGB(int nY, int nU, int nV) { 31 | nY -= 16; 32 | nU -= 128; 33 | nV -= 128; 34 | if (nY < 0) nY = 0; 35 | 36 | // This is the floating point equivalent. We do the conversion in integer 37 | // because some Android devices do not have floating point in hardware. 38 | // nR = (int)(1.164 * nY + 2.018 * nU); 39 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); 40 | // nB = (int)(1.164 * nY + 1.596 * nV); 41 | 42 | int nR = 1192 * nY + 1634 * nV; 43 | int nG = 1192 * nY - 833 * nV - 400 * nU; 44 | int nB = 1192 * nY + 2066 * nU; 45 | 46 | nR = MIN(kMaxChannelValue, MAX(0, nR)); 47 | nG = MIN(kMaxChannelValue, MAX(0, nG)); 48 | nB = MIN(kMaxChannelValue, MAX(0, nB)); 49 | 50 | nR = (nR >> 10) & 0xff; 51 | nG = (nG >> 10) & 0xff; 52 | nB = (nB >> 10) & 0xff; 53 | 54 | return 0xff000000 | (nR << 16) | (nG << 8) | nB; 55 | } 56 | 57 | // Accepts a YUV 4:2:0 image with a plane of 8 bit Y samples followed by 58 | // separate u and v planes with arbitrary row and column strides, 59 | // containing 8 bit 2x2 subsampled chroma samples. 60 | // Converts to a packed ARGB 32 bit output of the same pixel dimensions. 61 | void ConvertYUV420ToARGB8888(const uint8_t* const yData, 62 | const uint8_t* const uData, 63 | const uint8_t* const vData, uint32_t* const output, 64 | const int width, const int height, 65 | const int y_row_stride, const int uv_row_stride, 66 | const int uv_pixel_stride) { 67 | uint32_t* out = output; 68 | 69 | for (int y = 0; y < height; y++) { 70 | const uint8_t* pY = yData + y_row_stride * y; 71 | 72 | const int uv_row_start = uv_row_stride * (y >> 1); 73 | const uint8_t* pU = uData + uv_row_start; 74 | const uint8_t* pV = vData + uv_row_start; 75 | 76 | for (int x = 0; x < width; x++) { 77 | const int uv_offset = (x >> 1) * uv_pixel_stride; 78 | *out++ = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]); 79 | } 80 | } 81 | } 82 | 83 | // Accepts a YUV 4:2:0 image with a plane of 8 bit Y samples followed by an 84 | // interleaved U/V plane containing 8 bit 2x2 subsampled chroma samples, 85 | // except the interleave order of U and V is reversed. Converts to a packed 86 | // ARGB 32 bit output of the same pixel dimensions. 87 | void ConvertYUV420SPToARGB8888(const uint8_t* const yData, 88 | const uint8_t* const uvData, 89 | uint32_t* const output, const int width, 90 | const int height) { 91 | const uint8_t* pY = yData; 92 | const uint8_t* pUV = uvData; 93 | uint32_t* out = output; 94 | 95 | for (int y = 0; y < height; y++) { 96 | for (int x = 0; x < width; x++) { 97 | int nY = *pY++; 98 | int offset = (y >> 1) * width + 2 * (x >> 1); 99 | #ifdef __APPLE__ 100 | int nU = pUV[offset]; 101 | int nV = pUV[offset + 1]; 102 | #else 103 | int nV = pUV[offset]; 104 | int nU = pUV[offset + 1]; 105 | #endif 106 | 107 | *out++ = YUV2RGB(nY, nU, nV); 108 | } 109 | } 110 | } 111 | 112 | // The same as above, but downsamples each dimension to half size. 113 | void ConvertYUV420SPToARGB8888HalfSize(const uint8_t* const input, 114 | uint32_t* const output, int width, 115 | int height) { 116 | const uint8_t* pY = input; 117 | const uint8_t* pUV = input + (width * height); 118 | uint32_t* out = output; 119 | int stride = width; 120 | width >>= 1; 121 | height >>= 1; 122 | 123 | for (int y = 0; y < height; y++) { 124 | for (int x = 0; x < width; x++) { 125 | int nY = (pY[0] + pY[1] + pY[stride] + pY[stride + 1]) >> 2; 126 | pY += 2; 127 | #ifdef __APPLE__ 128 | int nU = *pUV++; 129 | int nV = *pUV++; 130 | #else 131 | int nV = *pUV++; 132 | int nU = *pUV++; 133 | #endif 134 | 135 | *out++ = YUV2RGB(nY, nU, nV); 136 | } 137 | pY += stride; 138 | } 139 | } 140 | 141 | // Accepts a YUV 4:2:0 image with a plane of 8 bit Y samples followed by an 142 | // interleaved U/V plane containing 8 bit 2x2 subsampled chroma samples, 143 | // except the interleave order of U and V is reversed. Converts to a packed 144 | // RGB 565 bit output of the same pixel dimensions. 145 | void ConvertYUV420SPToRGB565(const uint8_t* const input, uint16_t* const output, 146 | const int width, const int height) { 147 | const uint8_t* pY = input; 148 | const uint8_t* pUV = input + (width * height); 149 | uint16_t* out = output; 150 | 151 | for (int y = 0; y < height; y++) { 152 | for (int x = 0; x < width; x++) { 153 | int nY = *pY++; 154 | int offset = (y >> 1) * width + 2 * (x >> 1); 155 | #ifdef __APPLE__ 156 | int nU = pUV[offset]; 157 | int nV = pUV[offset + 1]; 158 | #else 159 | int nV = pUV[offset]; 160 | int nU = pUV[offset + 1]; 161 | #endif 162 | 163 | nY -= 16; 164 | nU -= 128; 165 | nV -= 128; 166 | if (nY < 0) nY = 0; 167 | 168 | // This is the floating point equivalent. We do the conversion in integer 169 | // because some Android devices do not have floating point in hardware. 170 | // nR = (int)(1.164 * nY + 2.018 * nU); 171 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); 172 | // nB = (int)(1.164 * nY + 1.596 * nV); 173 | 174 | int nR = 1192 * nY + 1634 * nV; 175 | int nG = 1192 * nY - 833 * nV - 400 * nU; 176 | int nB = 1192 * nY + 2066 * nU; 177 | 178 | nR = MIN(kMaxChannelValue, MAX(0, nR)); 179 | nG = MIN(kMaxChannelValue, MAX(0, nG)); 180 | nB = MIN(kMaxChannelValue, MAX(0, nB)); 181 | 182 | // Shift more than for ARGB8888 and apply appropriate bitmask. 183 | nR = (nR >> 13) & 0x1f; 184 | nG = (nG >> 12) & 0x3f; 185 | nB = (nB >> 13) & 0x1f; 186 | 187 | // R is high 5 bits, G is middle 6 bits, and B is low 5 bits. 188 | *out++ = (nR << 11) | (nG << 5) | nB; 189 | } 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /scan/src/main/cpp/yuv2rgb.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | // This is a collection of routines which converts various YUV image formats 17 | // to (A)RGB. 18 | 19 | #ifndef ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_ 20 | #define ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_ 21 | 22 | #include 23 | 24 | #ifdef __cplusplus 25 | extern "C" { 26 | #endif 27 | 28 | void ConvertYUV420ToARGB8888(const uint8_t* const yData, 29 | const uint8_t* const uData, 30 | const uint8_t* const vData, uint32_t* const output, 31 | const int width, const int height, 32 | const int y_row_stride, const int uv_row_stride, 33 | const int uv_pixel_stride); 34 | 35 | // Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width 36 | // and height. The input and output must already be allocated and non-null. 37 | // For efficiency, no error checking is performed. 38 | void ConvertYUV420SPToARGB8888(const uint8_t* const pY, 39 | const uint8_t* const pUV, uint32_t* const output, 40 | const int width, const int height); 41 | 42 | // The same as above, but downsamples each dimension to half size. 43 | void ConvertYUV420SPToARGB8888HalfSize(const uint8_t* const input, 44 | uint32_t* const output, int width, 45 | int height); 46 | 47 | // Converts YUV420 semi-planar data to RGB 565 data using the supplied width 48 | // and height. The input and output must already be allocated and non-null. 49 | // For efficiency, no error checking is performed. 50 | void ConvertYUV420SPToRGB565(const uint8_t* const input, uint16_t* const output, 51 | const int width, const int height); 52 | 53 | #ifdef __cplusplus 54 | } 55 | #endif 56 | 57 | #endif // ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_ 58 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/CameraUtil.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.common.util; 2 | 3 | import android.hardware.Camera; 4 | import android.util.Size; 5 | 6 | import java.util.ArrayList; 7 | import java.util.Collections; 8 | import java.util.Comparator; 9 | import java.util.List; 10 | 11 | /** 12 | * @author pd_liu on 2018/1/5. 13 | *

14 | * 相机工具类 15 | *

16 | *

17 | * 获取相机ID {@link #getCameraId()} . 18 | * 开启、关闭闪光灯 {@link #startFlash(Camera, boolean)} [Flash为手电筒模式]. 19 | *

20 | */ 21 | 22 | public final class CameraUtil { 23 | 24 | private static final String TAG_LOG = "CameraUtil"; 25 | 26 | private CameraUtil() { 27 | } 28 | 29 | /** 30 | * 获取CameraID用于打开指定相机 31 | * 32 | * @return cameraId. 33 | */ 34 | public static int getCameraId() { 35 | 36 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 37 | 38 | int numberOfCameras = Camera.getNumberOfCameras(); 39 | 40 | for (int i = 0; i < numberOfCameras; i++) { 41 | Camera.getCameraInfo(i, cameraInfo); 42 | 43 | if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { 44 | return i; 45 | } 46 | } 47 | 48 | //No camera found 49 | return -1; 50 | } 51 | 52 | public static boolean startShutterSound(Camera camera, boolean isOpen) { 53 | if (camera == null) { 54 | return false; 55 | } 56 | camera.enableShutterSound(isOpen); 57 | return true; 58 | } 59 | 60 | /** 61 | * 闪光灯的开、关 62 | * 63 | * @param open 是否开启闪光灯 64 | * @return 是否成功执行 65 | */ 66 | public static boolean startFlash(Camera camera, boolean open) { 67 | if (camera == null) { 68 | return false; 69 | } 70 | Camera.Parameters parameters = camera.getParameters(); 71 | if (open) { 72 | parameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH); 73 | } else { 74 | parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); 75 | } 76 | camera.setParameters(parameters); 77 | return true; 78 | } 79 | 80 | public static Size chooseOptimalSize(final Size[] choices, final int width, final int height) { 81 | final int minSize = Math.max(Math.min(width, height), 320); 82 | final Size desiredSize = new Size(width, height); 83 | 84 | // Collect the supported resolutions that are at least as big as the preview Surface 85 | boolean exactSizeFound = false; 86 | final List bigEnough = new ArrayList(); 87 | final List tooSmall = new ArrayList(); 88 | for (final Size option : choices) { 89 | if (option.equals(desiredSize)) { 90 | // Set the size but don't return yet so that remaining sizes will still be logged. 91 | exactSizeFound = true; 92 | } 93 | 94 | if (option.getHeight() >= minSize && option.getWidth() >= minSize) { 95 | bigEnough.add(option); 96 | } else { 97 | tooSmall.add(option); 98 | } 99 | } 100 | 101 | 102 | if (exactSizeFound) { 103 | return desiredSize; 104 | } 105 | 106 | // Pick the smallest of those, assuming we found any 107 | if (bigEnough.size() > 0) { 108 | final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea()); 109 | LogUtil.e(TAG_LOG, "Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight()); 110 | return chosenSize; 111 | } else { 112 | LogUtil.e(TAG_LOG, "Couldn't find any suitable preview size"); 113 | return choices[0]; 114 | } 115 | } 116 | 117 | /** 118 | * Compares two {@code Size}s based on their areas. 119 | */ 120 | static class CompareSizesByArea implements Comparator { 121 | @Override 122 | public int compare(final Size lhs, final Size rhs) { 123 | // We cast here to ensure the multiplications won't overflow 124 | return Long.signum( 125 | (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); 126 | } 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/ImageUtil.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.common.util; 2 | 3 | import android.graphics.Matrix; 4 | 5 | /** 6 | * @author pd_liu on 2018/1/2. 7 | *

8 | * 图片处理工具类 9 | *

10 | */ 11 | 12 | public final class ImageUtil { 13 | 14 | /** 15 | * Returns a transformation matrix from one reference frame into another. 16 | * Handles cropping (if maintaining aspect ratio is desired) and rotation. 17 | * 18 | * @param sourceWidth Width of source frame. 19 | * @param sourceHeight Height of source frame. 20 | * @param dstWidth Width of destination frame. 21 | * @param dstHeight Height of destination frame. 22 | * @param applyRotation Amount of rotation to apply from one frame to another. 23 | * Must be a multiple of 90. 24 | * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, 25 | * cropping the image if necessary. 26 | * @return The transformation fulfilling the desired requirements. 27 | */ 28 | public static Matrix getTransformationMatrix( 29 | int sourceWidth 30 | , int sourceHeight 31 | , int dstWidth 32 | , int dstHeight 33 | , int applyRotation 34 | , boolean maintainAspectRatio) { 35 | 36 | final Matrix matrix = new Matrix(); 37 | 38 | if (applyRotation != 0) { 39 | 40 | // Translate so center of image is at origin. 41 | //将图片移动到中心点 42 | matrix.postTranslate(-sourceWidth / 2.0f, -sourceHeight / 2.0f); 43 | 44 | // Rotate around origin. 45 | //将图片旋转 46 | matrix.postRotate(applyRotation); 47 | } 48 | 49 | //如果有的话,说明已经应用的旋转,然后确定每个轴需要多少缩放。 50 | //是否需要转置 51 | final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; 52 | 53 | //如果需要,那么将宽、高进行转置 54 | final int inWidth = transpose ? sourceHeight : sourceWidth; 55 | final int inHeight = transpose ? sourceWidth : sourceHeight; 56 | 57 | 58 | //Apply scaling if necessary. 59 | //判断原始图片,与需要的图片是否需要进行Scaling操作 60 | if (inWidth != dstWidth || inHeight != dstHeight) { 61 | 62 | final float scaleFactoryX = dstWidth / (float) inWidth; 63 | final float scaleFactoryY = dstHeight / (float) inHeight; 64 | 65 | 66 | if (maintainAspectRatio) { 67 | 68 | // Scale by minimum factor so that dst is filled completely while 69 | // maintaining the aspect ratio. Some image may fall off the edge. 70 | //按最小因子进行缩放,使dst完全填充,同时保持纵横比。有些图像可能会从边缘掉下来。 71 | // TODO: 2018/1/2 以下代码可能会导致图片边缘丢失 72 | final float scaleFactor = Math.max(scaleFactoryX, scaleFactoryY); 73 | matrix.postScale(scaleFactor, scaleFactor); 74 | } else { 75 | // Scale exactly to fill dst from src. 76 | //将原始图片精确地完整填充目标图片 77 | matrix.postScale(scaleFactoryX, scaleFactoryY); 78 | } 79 | 80 | } 81 | 82 | 83 | if (applyRotation != 0) { 84 | // Translate back from origin centered reference to destination frame. 85 | //将原点中心的引用转换为目标帧 86 | matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f); 87 | } 88 | 89 | return matrix; 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/ImageUtils.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.common.util; 17 | 18 | import android.graphics.Bitmap; 19 | import android.graphics.Matrix; 20 | import android.os.Environment; 21 | 22 | import com.rossia.life.scan.tensor.env.Logger; 23 | 24 | import java.io.File; 25 | import java.io.FileOutputStream; 26 | 27 | /** 28 | * Utility class for manipulating images. 29 | * @author pd_liu. 30 | **/ 31 | public class ImageUtils { 32 | @SuppressWarnings("unused") 33 | private static final Logger LOGGER = new Logger(); 34 | 35 | static { 36 | try { 37 | System.loadLibrary("tensorflow_demo"); 38 | } catch (UnsatisfiedLinkError e) { 39 | LOGGER.w("Native library not found, native RGB -> YUV conversion may be unavailable."); 40 | } 41 | } 42 | 43 | /** 44 | * Utility method to compute the allocated size in bytes of a YUV420SP image 45 | * of the given dimensions. 46 | */ 47 | public static int getYUVByteSize(final int width, final int height) { 48 | // The luminance plane requires 1 byte per pixel. 49 | final int ySize = width * height; 50 | 51 | // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. 52 | // Each 2x2 block takes 2 bytes to encode, one each for U and V. 53 | final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; 54 | 55 | return ySize + uvSize; 56 | } 57 | 58 | /** 59 | * Saves a Bitmap object to disk for analysis. 60 | * 61 | * @param bitmap The bitmap to save. 62 | */ 63 | public static void saveBitmap(final Bitmap bitmap) { 64 | saveBitmap(bitmap, "preview.png"); 65 | } 66 | 67 | /** 68 | * Saves a Bitmap object to disk for analysis. 69 | * 70 | * @param bitmap The bitmap to save. 71 | * @param filename The location to save the bitmap to. 72 | */ 73 | public static void saveBitmap(final Bitmap bitmap, final String filename) { 74 | final String root = 75 | Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow"; 76 | LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root); 77 | final File myDir = new File(root); 78 | 79 | if (!myDir.mkdirs()) { 80 | LOGGER.i("Make dir failed"); 81 | } 82 | 83 | final String fname = filename; 84 | final File file = new File(myDir, fname); 85 | if (file.exists()) { 86 | file.delete(); 87 | } 88 | try { 89 | final FileOutputStream out = new FileOutputStream(file); 90 | bitmap.compress(Bitmap.CompressFormat.PNG, 99, out); 91 | out.flush(); 92 | out.close(); 93 | } catch (final Exception e) { 94 | LOGGER.e(e, "Exception!"); 95 | } 96 | } 97 | 98 | // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges 99 | // are normalized to eight bits. 100 | static final int kMaxChannelValue = 262143; 101 | 102 | // Always prefer the native implementation if available. 103 | private static boolean useNativeConversion = true; 104 | 105 | public static void convertYUV420SPToARGB8888( 106 | byte[] input, 107 | int width, 108 | int height, 109 | int[] output) { 110 | if (useNativeConversion) { 111 | try { 112 | ImageUtils.convertYUV420SPToARGB8888(input, output, width, height, false); 113 | return; 114 | } catch (UnsatisfiedLinkError e) { 115 | LOGGER.w( 116 | "Native YUV420SP -> RGB implementation not found, falling back to Java implementation"); 117 | useNativeConversion = false; 118 | } 119 | } 120 | 121 | // Java implementation of YUV420SP to ARGB8888 converting 122 | final int frameSize = width * height; 123 | for (int j = 0, yp = 0; j < height; j++) { 124 | int uvp = frameSize + (j >> 1) * width; 125 | int u = 0; 126 | int v = 0; 127 | 128 | for (int i = 0; i < width; i++, yp++) { 129 | int y = 0xff & input[yp]; 130 | if ((i & 1) == 0) { 131 | v = 0xff & input[uvp++]; 132 | u = 0xff & input[uvp++]; 133 | } 134 | 135 | output[yp] = YUV2RGB(y, u, v); 136 | } 137 | } 138 | } 139 | 140 | private static int YUV2RGB(int y, int u, int v) { 141 | // Adjust and check YUV values 142 | y = (y - 16) < 0 ? 0 : (y - 16); 143 | u -= 128; 144 | v -= 128; 145 | 146 | // This is the floating point equivalent. We do the conversion in integer 147 | // because some Android devices do not have floating point in hardware. 148 | // nR = (int)(1.164 * nY + 2.018 * nU); 149 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); 150 | // nB = (int)(1.164 * nY + 1.596 * nV); 151 | int y1192 = 1192 * y; 152 | int r = (y1192 + 1634 * v); 153 | int g = (y1192 - 833 * v - 400 * u); 154 | int b = (y1192 + 2066 * u); 155 | 156 | // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ] 157 | r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r); 158 | g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g); 159 | b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b); 160 | 161 | return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); 162 | } 163 | 164 | 165 | public static void convertYUV420ToARGB8888( 166 | byte[] yData, 167 | byte[] uData, 168 | byte[] vData, 169 | int width, 170 | int height, 171 | int yRowStride, 172 | int uvRowStride, 173 | int uvPixelStride, 174 | int[] out) { 175 | if (useNativeConversion) { 176 | try { 177 | convertYUV420ToARGB8888( 178 | yData, uData, vData, out, width, height, yRowStride, uvRowStride, uvPixelStride, false); 179 | return; 180 | } catch (UnsatisfiedLinkError e) { 181 | LOGGER.w( 182 | "Native YUV420 -> RGB implementation not found, falling back to Java implementation"); 183 | useNativeConversion = false; 184 | } 185 | } 186 | 187 | int yp = 0; 188 | for (int j = 0; j < height; j++) { 189 | int pY = yRowStride * j; 190 | int pUV = uvRowStride * (j >> 1); 191 | 192 | for (int i = 0; i < width; i++) { 193 | int uv_offset = pUV + (i >> 1) * uvPixelStride; 194 | 195 | out[yp++] = YUV2RGB( 196 | 0xff & yData[pY + i], 197 | 0xff & uData[uv_offset], 198 | 0xff & vData[uv_offset]); 199 | } 200 | } 201 | } 202 | 203 | 204 | /** 205 | * Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width and height. The 206 | * input and output must already be allocated and non-null. For efficiency, no error checking is 207 | * performed. 208 | * 209 | * @param input The array of YUV 4:2:0 input data. 210 | * @param output A pre-allocated array for the ARGB 8:8:8:8 output data. 211 | * @param width The width of the input image. 212 | * @param height The height of the input image. 213 | * @param halfSize If true, downsample to 50% in each dimension, otherwise not. 214 | */ 215 | private static native void convertYUV420SPToARGB8888( 216 | byte[] input, int[] output, int width, int height, boolean halfSize); 217 | 218 | /** 219 | * Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width 220 | * and height. The input and output must already be allocated and non-null. 221 | * For efficiency, no error checking is performed. 222 | * 223 | * @param y 224 | * @param u 225 | * @param v 226 | * @param uvPixelStride 227 | * @param width The width of the input image. 228 | * @param height The height of the input image. 229 | * @param halfSize If true, downsample to 50% in each dimension, otherwise not. 230 | * @param output A pre-allocated array for the ARGB 8:8:8:8 output data. 231 | */ 232 | private static native void convertYUV420ToARGB8888( 233 | byte[] y, 234 | byte[] u, 235 | byte[] v, 236 | int[] output, 237 | int width, 238 | int height, 239 | int yRowStride, 240 | int uvRowStride, 241 | int uvPixelStride, 242 | boolean halfSize); 243 | 244 | /** 245 | * Converts YUV420 semi-planar data to RGB 565 data using the supplied width 246 | * and height. The input and output must already be allocated and non-null. 247 | * For efficiency, no error checking is performed. 248 | * 249 | * @param input The array of YUV 4:2:0 input data. 250 | * @param output A pre-allocated array for the RGB 5:6:5 output data. 251 | * @param width The width of the input image. 252 | * @param height The height of the input image. 253 | */ 254 | private static native void convertYUV420SPToRGB565( 255 | byte[] input, byte[] output, int width, int height); 256 | 257 | /** 258 | * Converts 32-bit ARGB8888 image data to YUV420SP data. This is useful, for 259 | * instance, in creating data to feed the classes that rely on raw camera 260 | * preview frames. 261 | * 262 | * @param input An array of input pixels in ARGB8888 format. 263 | * @param output A pre-allocated array for the YUV420SP output data. 264 | * @param width The width of the input image. 265 | * @param height The height of the input image. 266 | */ 267 | private static native void convertARGB8888ToYUV420SP( 268 | int[] input, byte[] output, int width, int height); 269 | 270 | /** 271 | * Converts 16-bit RGB565 image data to YUV420SP data. This is useful, for 272 | * instance, in creating data to feed the classes that rely on raw camera 273 | * preview frames. 274 | * 275 | * @param input An array of input pixels in RGB565 format. 276 | * @param output A pre-allocated array for the YUV420SP output data. 277 | * @param width The width of the input image. 278 | * @param height The height of the input image. 279 | */ 280 | private static native void convertRGB565ToYUV420SP( 281 | byte[] input, byte[] output, int width, int height); 282 | 283 | /** 284 | * Returns a transformation matrix from one reference frame into another. 285 | * Handles cropping (if maintaining aspect ratio is desired) and rotation. 286 | * 287 | * @param srcWidth Width of source frame. 288 | * @param srcHeight Height of source frame. 289 | * @param dstWidth Width of destination frame. 290 | * @param dstHeight Height of destination frame. 291 | * @param applyRotation Amount of rotation to apply from one frame to another. 292 | * Must be a multiple of 90. 293 | * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, 294 | * cropping the image if necessary. 295 | * @return The transformation fulfilling the desired requirements. 296 | */ 297 | public static Matrix getTransformationMatrix( 298 | final int srcWidth, 299 | final int srcHeight, 300 | final int dstWidth, 301 | final int dstHeight, 302 | final int applyRotation, 303 | final boolean maintainAspectRatio) { 304 | final Matrix matrix = new Matrix(); 305 | 306 | if (applyRotation != 0) { 307 | if (applyRotation % 90 != 0) { 308 | LOGGER.w("Rotation of %d % 90 != 0", applyRotation); 309 | } 310 | 311 | // Translate so center of image is at origin. 312 | matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f); 313 | 314 | // Rotate around origin. 315 | matrix.postRotate(applyRotation); 316 | } 317 | 318 | // Account for the already applied rotation, if any, and then determine how 319 | // much scaling is needed for each axis. 320 | final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; 321 | 322 | final int inWidth = transpose ? srcHeight : srcWidth; 323 | final int inHeight = transpose ? srcWidth : srcHeight; 324 | 325 | // Apply scaling if necessary. 326 | if (inWidth != dstWidth || inHeight != dstHeight) { 327 | final float scaleFactorX = dstWidth / (float) inWidth; 328 | final float scaleFactorY = dstHeight / (float) inHeight; 329 | 330 | if (maintainAspectRatio) { 331 | // Scale by minimum factor so that dst is filled completely while 332 | // maintaining the aspect ratio. Some image may fall off the edge. 333 | final float scaleFactor = Math.max(scaleFactorX, scaleFactorY); 334 | matrix.postScale(scaleFactor, scaleFactor); 335 | } else { 336 | // Scale exactly to fill dst from src. 337 | matrix.postScale(scaleFactorX, scaleFactorY); 338 | } 339 | } 340 | 341 | if (applyRotation != 0) { 342 | // Translate back from origin centered reference to destination frame. 343 | matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f); 344 | } 345 | 346 | return matrix; 347 | } 348 | } 349 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/LogUtil.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.common.util; 2 | 3 | import android.util.Log; 4 | 5 | /** 6 | * @author pd_liu on 2017/12/29. 7 | *

8 | * Log日志打印工具类 9 | *

10 | */ 11 | 12 | public final class LogUtil { 13 | 14 | private volatile static boolean sDebug = true; 15 | 16 | private LogUtil() { 17 | } 18 | 19 | public static void setDebug(boolean isDebug) { 20 | sDebug = isDebug; 21 | } 22 | 23 | public static void e(String tag, String message) { 24 | if (sDebug) { 25 | Log.e(tag, message); 26 | } 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/PhotoEnhanceUtil.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.common.util; 2 | 3 | import android.graphics.Bitmap; 4 | import android.graphics.Canvas; 5 | import android.graphics.ColorMatrix; 6 | import android.graphics.ColorMatrixColorFilter; 7 | import android.graphics.Paint; 8 | 9 | /** 10 | * @author pd_liu on 2018/1/10. 11 | *

12 | * 图片增强 13 | *

14 | *

15 | * 支持图片的亮度、对比度、饱和度调节{@link #handleImage(int)} . 16 | *

17 | */ 18 | 19 | public class PhotoEnhanceUtil { 20 | 21 | /** 22 | * 处理图片的模式:饱和度、亮度、对比度 23 | */ 24 | public final int Enhance_Saturation = 0; 25 | public final int Enhance_Brightness = 1; 26 | public final int Enhance_Contrast = 2; 27 | 28 | /** 29 | * Bitmap 30 | */ 31 | private Bitmap mBitmap; 32 | private float saturationNum = 1.0F; 33 | private float brightNum = 0.0F; 34 | private float contrastNum = 1.0F; 35 | private ColorMatrix mAllMatrix = null; 36 | private ColorMatrix saturationMatrix = null; 37 | private ColorMatrix contrastMatrix = null; 38 | private ColorMatrix brightnessMatrix = null; 39 | 40 | public PhotoEnhanceUtil() { 41 | } 42 | 43 | public PhotoEnhanceUtil(Bitmap bitmap) { 44 | this.mBitmap = bitmap; 45 | } 46 | 47 | public float getSaturation() { 48 | return this.saturationNum; 49 | } 50 | 51 | public void setSaturation(int saturationNum) { 52 | this.saturationNum = (float) saturationNum * 1.0F / 128.0F; 53 | } 54 | 55 | public float getBrightness() { 56 | return this.brightNum; 57 | } 58 | 59 | public void setBrightness(int brightNum) { 60 | this.brightNum = (float) (brightNum - 128); 61 | } 62 | 63 | public float getContrast() { 64 | return this.contrastNum; 65 | } 66 | 67 | public void setContrast(int contrastNum) { 68 | this.contrastNum = (float) ((double) (contrastNum / 2 + 64) / 128.0D); 69 | } 70 | 71 | public Bitmap handleImage(int type) { 72 | Bitmap bmp = Bitmap.createBitmap(this.mBitmap.getWidth(), this.mBitmap.getHeight(), Bitmap.Config.ARGB_8888); 73 | Canvas canvas = new Canvas(bmp); 74 | Paint paint = new Paint(); 75 | paint.setAntiAlias(true); 76 | if (this.mAllMatrix == null) { 77 | this.mAllMatrix = new ColorMatrix(); 78 | } 79 | 80 | if (this.saturationMatrix == null) { 81 | this.saturationMatrix = new ColorMatrix(); 82 | } 83 | 84 | if (this.contrastMatrix == null) { 85 | this.contrastMatrix = new ColorMatrix(); 86 | } 87 | 88 | if (this.brightnessMatrix == null) { 89 | this.brightnessMatrix = new ColorMatrix(); 90 | } 91 | 92 | switch (type) { 93 | case 0: 94 | this.saturationMatrix.reset(); 95 | this.saturationMatrix.setSaturation(this.saturationNum); 96 | break; 97 | case 1: 98 | this.brightnessMatrix.reset(); 99 | this.brightnessMatrix.set(new float[]{1.0F, 0.0F, 0.0F, 0.0F, this.brightNum, 0.0F, 1.0F, 0.0F, 0.0F, this.brightNum, 0.0F, 0.0F, 1.0F, 0.0F, this.brightNum, 0.0F, 0.0F, 0.0F, 1.0F, 0.0F}); 100 | break; 101 | case 2: 102 | float regulateBright = 0.0F; 103 | regulateBright = (1.0F - this.contrastNum) * 128.0F; 104 | this.contrastMatrix.reset(); 105 | this.contrastMatrix.set(new float[]{this.contrastNum, 0.0F, 0.0F, 0.0F, regulateBright, 0.0F, this.contrastNum, 0.0F, 0.0F, regulateBright, 0.0F, 0.0F, this.contrastNum, 0.0F, regulateBright, 0.0F, 0.0F, 0.0F, 1.0F, 0.0F}); 106 | } 107 | 108 | this.mAllMatrix.reset(); 109 | this.mAllMatrix.postConcat(this.saturationMatrix); 110 | this.mAllMatrix.postConcat(this.brightnessMatrix); 111 | this.mAllMatrix.postConcat(this.contrastMatrix); 112 | paint.setColorFilter(new ColorMatrixColorFilter(this.mAllMatrix)); 113 | canvas.drawBitmap(this.mBitmap, 0.0F, 0.0F, paint); 114 | return bmp; 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/common/util/ScreenUtil.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.common.util; 2 | 3 | import android.app.Activity; 4 | import android.view.Surface; 5 | 6 | /** 7 | * @author pd_liu on 2018/1/5. 8 | */ 9 | 10 | public final class ScreenUtil { 11 | 12 | private static final String TAG_LOG = "ScreenUtil"; 13 | 14 | private ScreenUtil() { 15 | } 16 | 17 | /** 18 | * 获取当前手机旋转的角度,影响因素{portrait、landscape} 19 | * 20 | * @param activity activity. 21 | * @return 当前手机旋转的角度 22 | */ 23 | public static int getScreenOrientation(Activity activity) { 24 | 25 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 26 | 27 | switch (rotation) { 28 | 29 | case Surface.ROTATION_270: 30 | return 270; 31 | 32 | case Surface.ROTATION_180: 33 | return 180; 34 | 35 | case Surface.ROTATION_90: 36 | return 90; 37 | 38 | default: 39 | return 0; 40 | } 41 | } 42 | 43 | /** 44 | * 根据当前手机屏幕的旋转角度,进而计算出Surface预览的角度 45 | * 46 | * @param activity activity. 47 | * @return Camera display orientation. 48 | */ 49 | public static int getDisplayOrientation(Activity activity) { 50 | 51 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 52 | 53 | LogUtil.e(TAG_LOG, "rotation:" + rotation); 54 | switch (rotation) { 55 | 56 | case Surface.ROTATION_0: 57 | return 90; 58 | 59 | case Surface.ROTATION_90: 60 | return 0; 61 | 62 | case Surface.ROTATION_180: 63 | return 270; 64 | 65 | case Surface.ROTATION_270: 66 | return 180; 67 | 68 | default: 69 | return 0; 70 | 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/TensorFlowObjectDetectionAPIModel.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.tensor; 17 | 18 | import android.content.res.AssetManager; 19 | import android.graphics.Bitmap; 20 | import android.graphics.RectF; 21 | import android.os.Trace; 22 | 23 | import com.rossia.life.scan.tensor.env.Logger; 24 | import com.rossia.life.scan.tensor.interf.Classifier; 25 | 26 | import org.tensorflow.Graph; 27 | import org.tensorflow.Operation; 28 | import org.tensorflow.contrib.android.TensorFlowInferenceInterface; 29 | 30 | import java.io.BufferedReader; 31 | import java.io.IOException; 32 | import java.io.InputStream; 33 | import java.io.InputStreamReader; 34 | import java.util.ArrayList; 35 | import java.util.Comparator; 36 | import java.util.List; 37 | import java.util.PriorityQueue; 38 | import java.util.Vector; 39 | 40 | /** 41 | * Wrapper for frozen detection models trained using the Tensorflow Object Detection API: 42 | * github.com/tensorflow/models/tree/master/research/object_detection 43 | */ 44 | public class TensorFlowObjectDetectionAPIModel implements Classifier { 45 | private static final Logger LOGGER = new Logger(); 46 | 47 | // Only return this many results. 48 | private static final int MAX_RESULTS = 100; 49 | 50 | // Config values. 51 | private String inputName; 52 | private int inputSize; 53 | 54 | // Pre-allocated buffers. 55 | private Vector labels = new Vector(); 56 | private int[] intValues; 57 | private byte[] byteValues; 58 | private float[] outputLocations; 59 | private float[] outputScores; 60 | private float[] outputClasses; 61 | private float[] outputNumDetections; 62 | private String[] outputNames; 63 | 64 | private boolean logStats = false; 65 | 66 | private TensorFlowInferenceInterface inferenceInterface; 67 | 68 | /** 69 | * Initializes a native TensorFlow session for classifying images. 70 | * 71 | * @param assetManager The asset manager to be used to load assets. 72 | * @param modelFilename The filepath of the model GraphDef protocol buffer. 73 | * @param labelFilename The filepath of label file for classes. 74 | */ 75 | public static Classifier create( 76 | final AssetManager assetManager, 77 | final String modelFilename, 78 | final String labelFilename, 79 | final int inputSize) throws IOException { 80 | final TensorFlowObjectDetectionAPIModel d = new TensorFlowObjectDetectionAPIModel(); 81 | 82 | InputStream labelsInput = null; 83 | String actualFilename = labelFilename.split("file:///android_asset/")[1]; 84 | labelsInput = assetManager.open(actualFilename); 85 | BufferedReader br = null; 86 | br = new BufferedReader(new InputStreamReader(labelsInput)); 87 | String line; 88 | while ((line = br.readLine()) != null) { 89 | LOGGER.w(line); 90 | d.labels.add(line); 91 | } 92 | br.close(); 93 | 94 | 95 | d.inferenceInterface = new TensorFlowInferenceInterface(assetManager, modelFilename); 96 | 97 | final Graph g = d.inferenceInterface.graph(); 98 | 99 | d.inputName = "image_tensor"; 100 | // The inputName node has a shape of [N, H, W, C], where 101 | // N is the batch size 102 | // H = W are the height and width 103 | // C is the number of channels (3 for our purposes - RGB) 104 | final Operation inputOp = g.operation(d.inputName); 105 | if (inputOp == null) { 106 | throw new RuntimeException("Failed to find input Node '" + d.inputName + "'"); 107 | } 108 | d.inputSize = inputSize; 109 | // The outputScoresName node has a shape of [N, NumLocations], where N 110 | // is the batch size. 111 | final Operation outputOp1 = g.operation("detection_scores"); 112 | if (outputOp1 == null) { 113 | throw new RuntimeException("Failed to find output Node 'detection_scores'"); 114 | } 115 | final Operation outputOp2 = g.operation("detection_boxes"); 116 | if (outputOp2 == null) { 117 | throw new RuntimeException("Failed to find output Node 'detection_boxes'"); 118 | } 119 | final Operation outputOp3 = g.operation("detection_classes"); 120 | if (outputOp3 == null) { 121 | throw new RuntimeException("Failed to find output Node 'detection_classes'"); 122 | } 123 | 124 | // Pre-allocate buffers. 125 | d.outputNames = new String[] {"detection_boxes", "detection_scores", 126 | "detection_classes", "num_detections"}; 127 | d.intValues = new int[d.inputSize * d.inputSize]; 128 | d.byteValues = new byte[d.inputSize * d.inputSize * 3]; 129 | d.outputScores = new float[MAX_RESULTS]; 130 | d.outputLocations = new float[MAX_RESULTS * 4]; 131 | d.outputClasses = new float[MAX_RESULTS]; 132 | d.outputNumDetections = new float[1]; 133 | return d; 134 | } 135 | 136 | private TensorFlowObjectDetectionAPIModel() {} 137 | 138 | @Override 139 | public List recognizeImage(final Bitmap bitmap) { 140 | // Log this method so that it can be analyzed with systrace. 141 | Trace.beginSection("recognizeImage"); 142 | 143 | Trace.beginSection("preprocessBitmap"); 144 | // Preprocess the image data from 0-255 int to normalized float based 145 | // on the provided parameters. 146 | bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); 147 | 148 | for (int i = 0; i < intValues.length; ++i) { 149 | byteValues[i * 3 + 2] = (byte) (intValues[i] & 0xFF); 150 | byteValues[i * 3 + 1] = (byte) ((intValues[i] >> 8) & 0xFF); 151 | byteValues[i * 3 + 0] = (byte) ((intValues[i] >> 16) & 0xFF); 152 | } 153 | Trace.endSection(); // preprocessBitmap 154 | 155 | // Copy the input data into TensorFlow. 156 | Trace.beginSection("feed"); 157 | inferenceInterface.feed(inputName, byteValues, 1, inputSize, inputSize, 3); 158 | Trace.endSection(); 159 | 160 | // Run the inference call. 161 | Trace.beginSection("run"); 162 | inferenceInterface.run(outputNames, logStats); 163 | Trace.endSection(); 164 | 165 | // Copy the output Tensor back into the output array. 166 | Trace.beginSection("fetch"); 167 | outputLocations = new float[MAX_RESULTS * 4]; 168 | outputScores = new float[MAX_RESULTS]; 169 | outputClasses = new float[MAX_RESULTS]; 170 | outputNumDetections = new float[1]; 171 | inferenceInterface.fetch(outputNames[0], outputLocations); 172 | inferenceInterface.fetch(outputNames[1], outputScores); 173 | inferenceInterface.fetch(outputNames[2], outputClasses); 174 | inferenceInterface.fetch(outputNames[3], outputNumDetections); 175 | Trace.endSection(); 176 | 177 | // Find the best detections. 178 | final PriorityQueue pq = 179 | new PriorityQueue( 180 | 1, 181 | new Comparator() { 182 | @Override 183 | public int compare(final Recognition lhs, final Recognition rhs) { 184 | // Intentionally reversed to put high confidence at the head of the queue. 185 | return Float.compare(rhs.getConfidence(), lhs.getConfidence()); 186 | } 187 | }); 188 | 189 | // Scale them back to the input size. 190 | for (int i = 0; i < outputScores.length; ++i) { 191 | final RectF detection = 192 | new RectF( 193 | outputLocations[4 * i + 1] * inputSize, 194 | outputLocations[4 * i] * inputSize, 195 | outputLocations[4 * i + 3] * inputSize, 196 | outputLocations[4 * i + 2] * inputSize); 197 | pq.add( 198 | new Recognition("" + i, labels.get((int) outputClasses[i]), outputScores[i], detection)); 199 | } 200 | 201 | final ArrayList recognitions = new ArrayList(); 202 | for (int i = 0; i < Math.min(pq.size(), MAX_RESULTS); ++i) { 203 | recognitions.add(pq.poll()); 204 | } 205 | Trace.endSection(); // "recognizeImage" 206 | return recognitions; 207 | } 208 | 209 | @Override 210 | public void enableStatLogging(final boolean logStats) { 211 | this.logStats = logStats; 212 | } 213 | 214 | @Override 215 | public String getStatString() { 216 | return inferenceInterface.getStatString(); 217 | } 218 | 219 | @Override 220 | public void close() { 221 | inferenceInterface.close(); 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/env/BorderedText.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.tensor.env; 17 | 18 | import android.graphics.Canvas; 19 | import android.graphics.Color; 20 | import android.graphics.Paint; 21 | import android.graphics.Paint.Align; 22 | import android.graphics.Paint.Style; 23 | import android.graphics.Rect; 24 | import android.graphics.Typeface; 25 | 26 | import java.util.Vector; 27 | 28 | /** 29 | * A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. 30 | */ 31 | public class BorderedText { 32 | private final Paint interiorPaint; 33 | private final Paint exteriorPaint; 34 | 35 | private final float textSize; 36 | 37 | /** 38 | * Creates a left-aligned bordered text object with a white interior, and a black exterior with 39 | * the specified text size. 40 | * 41 | * @param textSize text size in pixels 42 | */ 43 | public BorderedText(final float textSize) { 44 | this(Color.WHITE, Color.BLACK, textSize); 45 | } 46 | 47 | /** 48 | * Create a bordered text object with the specified interior and exterior colors, text size and 49 | * alignment. 50 | * 51 | * @param interiorColor the interior text color 52 | * @param exteriorColor the exterior text color 53 | * @param textSize text size in pixels 54 | */ 55 | public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) { 56 | interiorPaint = new Paint(); 57 | interiorPaint.setTextSize(textSize); 58 | interiorPaint.setColor(interiorColor); 59 | interiorPaint.setStyle(Style.FILL); 60 | interiorPaint.setAntiAlias(false); 61 | interiorPaint.setAlpha(255); 62 | 63 | exteriorPaint = new Paint(); 64 | exteriorPaint.setTextSize(textSize); 65 | exteriorPaint.setColor(exteriorColor); 66 | exteriorPaint.setStyle(Style.FILL_AND_STROKE); 67 | exteriorPaint.setStrokeWidth(textSize / 8); 68 | exteriorPaint.setAntiAlias(false); 69 | exteriorPaint.setAlpha(255); 70 | 71 | this.textSize = textSize; 72 | } 73 | 74 | public void setTypeface(Typeface typeface) { 75 | interiorPaint.setTypeface(typeface); 76 | exteriorPaint.setTypeface(typeface); 77 | } 78 | 79 | public void drawText(final Canvas canvas, final float posX, final float posY, final String text) { 80 | canvas.drawText(text, posX, posY, exteriorPaint); 81 | canvas.drawText(text, posX, posY, interiorPaint); 82 | } 83 | 84 | public void drawLines(Canvas canvas, final float posX, final float posY, Vector lines) { 85 | int lineNum = 0; 86 | for (final String line : lines) { 87 | drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line); 88 | ++lineNum; 89 | } 90 | } 91 | 92 | public void setInteriorColor(final int color) { 93 | interiorPaint.setColor(color); 94 | } 95 | 96 | public void setExteriorColor(final int color) { 97 | exteriorPaint.setColor(color); 98 | } 99 | 100 | public float getTextSize() { 101 | return textSize; 102 | } 103 | 104 | public void setAlpha(final int alpha) { 105 | interiorPaint.setAlpha(alpha); 106 | exteriorPaint.setAlpha(alpha); 107 | } 108 | 109 | public void getTextBounds( 110 | final String line, final int index, final int count, final Rect lineBounds) { 111 | interiorPaint.getTextBounds(line, index, count, lineBounds); 112 | } 113 | 114 | public void setTextAlign(final Align align) { 115 | interiorPaint.setTextAlign(align); 116 | exteriorPaint.setTextAlign(align); 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/env/Logger.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.tensor.env; 17 | 18 | import android.util.Log; 19 | 20 | import java.util.HashSet; 21 | import java.util.Set; 22 | 23 | /** 24 | * Wrapper for the platform log function, allows convenient message prefixing and log disabling. 25 | */ 26 | public final class Logger { 27 | private static final String DEFAULT_TAG = "tensorflow"; 28 | private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG; 29 | 30 | // Classes to be ignored when examining the stack trace 31 | private static final Set IGNORED_CLASS_NAMES; 32 | 33 | static { 34 | IGNORED_CLASS_NAMES = new HashSet(3); 35 | IGNORED_CLASS_NAMES.add("dalvik.system.VMStack"); 36 | IGNORED_CLASS_NAMES.add("java.lang.Thread"); 37 | IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName()); 38 | } 39 | 40 | private final String tag; 41 | private final String messagePrefix; 42 | private int minLogLevel = DEFAULT_MIN_LOG_LEVEL; 43 | 44 | /** 45 | * Creates a Logger using the class name as the message prefix. 46 | * 47 | * @param clazz the simple name of this class is used as the message prefix. 48 | */ 49 | public Logger(final Class clazz) { 50 | this(clazz.getSimpleName()); 51 | } 52 | 53 | /** 54 | * Creates a Logger using the specified message prefix. 55 | * 56 | * @param messagePrefix is prepended to the text of every message. 57 | */ 58 | public Logger(final String messagePrefix) { 59 | this(DEFAULT_TAG, messagePrefix); 60 | } 61 | 62 | /** 63 | * Creates a Logger with a custom tag and a custom message prefix. If the message prefix 64 | * is set to
null
, the caller's class name is used as the prefix. 65 | * 66 | * @param tag identifies the source of a log message. 67 | * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is 68 | * being used 69 | */ 70 | public Logger(final String tag, final String messagePrefix) { 71 | this.tag = tag; 72 | final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix; 73 | this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix; 74 | } 75 | 76 | /** 77 | * Creates a Logger using the caller's class name as the message prefix. 78 | */ 79 | public Logger() { 80 | this(DEFAULT_TAG, null); 81 | } 82 | 83 | /** 84 | * Creates a Logger using the caller's class name as the message prefix. 85 | */ 86 | public Logger(final int minLogLevel) { 87 | this(DEFAULT_TAG, null); 88 | this.minLogLevel = minLogLevel; 89 | } 90 | 91 | public void setMinLogLevel(final int minLogLevel) { 92 | this.minLogLevel = minLogLevel; 93 | } 94 | 95 | public boolean isLoggable(final int logLevel) { 96 | return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel); 97 | } 98 | 99 | /** 100 | * Return caller's simple name. 101 | * 102 | * Android getStackTrace() returns an array that looks like this: 103 | * stackTrace[0]: dalvik.system.VMStack 104 | * stackTrace[1]: java.lang.Thread 105 | * stackTrace[2]: com.google.android.apps.unveil.env.UnveilLogger 106 | * stackTrace[3]: com.google.android.apps.unveil.BaseApplication 107 | * 108 | * This function returns the simple version of the first non-filtered name. 109 | * 110 | * @return caller's simple name 111 | */ 112 | private static String getCallerSimpleName() { 113 | // Get the current callstack so we can pull the class of the caller off of it. 114 | final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); 115 | 116 | for (final StackTraceElement elem : stackTrace) { 117 | final String className = elem.getClassName(); 118 | if (!IGNORED_CLASS_NAMES.contains(className)) { 119 | // We're only interested in the simple name of the class, not the complete package. 120 | final String[] classParts = className.split("\\."); 121 | return classParts[classParts.length - 1]; 122 | } 123 | } 124 | 125 | return Logger.class.getSimpleName(); 126 | } 127 | 128 | private String toMessage(final String format, final Object... args) { 129 | return messagePrefix + (args.length > 0 ? String.format(format, args) : format); 130 | } 131 | 132 | public void v(final String format, final Object... args) { 133 | if (isLoggable(Log.VERBOSE)) { 134 | Log.v(tag, toMessage(format, args)); 135 | } 136 | } 137 | 138 | public void v(final Throwable t, final String format, final Object... args) { 139 | if (isLoggable(Log.VERBOSE)) { 140 | Log.v(tag, toMessage(format, args), t); 141 | } 142 | } 143 | 144 | public void d(final String format, final Object... args) { 145 | if (isLoggable(Log.DEBUG)) { 146 | Log.d(tag, toMessage(format, args)); 147 | } 148 | } 149 | 150 | public void d(final Throwable t, final String format, final Object... args) { 151 | if (isLoggable(Log.DEBUG)) { 152 | Log.d(tag, toMessage(format, args), t); 153 | } 154 | } 155 | 156 | public void i(final String format, final Object... args) { 157 | if (isLoggable(Log.INFO)) { 158 | Log.i(tag, toMessage(format, args)); 159 | } 160 | } 161 | 162 | public void i(final Throwable t, final String format, final Object... args) { 163 | if (isLoggable(Log.INFO)) { 164 | Log.i(tag, toMessage(format, args), t); 165 | } 166 | } 167 | 168 | public void w(final String format, final Object... args) { 169 | if (isLoggable(Log.WARN)) { 170 | Log.w(tag, toMessage(format, args)); 171 | } 172 | } 173 | 174 | public void w(final Throwable t, final String format, final Object... args) { 175 | if (isLoggable(Log.WARN)) { 176 | Log.w(tag, toMessage(format, args), t); 177 | } 178 | } 179 | 180 | public void e(final String format, final Object... args) { 181 | if (isLoggable(Log.ERROR)) { 182 | Log.e(tag, toMessage(format, args)); 183 | } 184 | } 185 | 186 | public void e(final Throwable t, final String format, final Object... args) { 187 | if (isLoggable(Log.ERROR)) { 188 | Log.e(tag, toMessage(format, args), t); 189 | } 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/env/Size.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.tensor.env; 17 | 18 | import android.graphics.Bitmap; 19 | import android.text.TextUtils; 20 | 21 | import java.io.Serializable; 22 | import java.util.ArrayList; 23 | import java.util.List; 24 | 25 | /** 26 | * Size class independent of a Camera object. 27 | */ 28 | public class Size implements Comparable, Serializable { 29 | 30 | // 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when 31 | // upgrading. 32 | public static final long serialVersionUID = 7689808733290872361L; 33 | 34 | public final int width; 35 | public final int height; 36 | 37 | public Size(final int width, final int height) { 38 | this.width = width; 39 | this.height = height; 40 | } 41 | 42 | public Size(final Bitmap bmp) { 43 | this.width = bmp.getWidth(); 44 | this.height = bmp.getHeight(); 45 | } 46 | 47 | /** 48 | * Rotate a size by the given number of degrees. 49 | * @param size Size to rotate. 50 | * @param rotation Degrees {0, 90, 180, 270} to rotate the size. 51 | * @return Rotated size. 52 | */ 53 | public static Size getRotatedSize(final Size size, final int rotation) { 54 | if (rotation % 180 != 0) { 55 | // The phone is portrait, therefore the camera is sideways and frame should be rotated. 56 | return new Size(size.height, size.width); 57 | } 58 | return size; 59 | } 60 | 61 | public static Size parseFromString(String sizeString) { 62 | if (TextUtils.isEmpty(sizeString)) { 63 | return null; 64 | } 65 | 66 | sizeString = sizeString.trim(); 67 | 68 | // The expected format is "x". 69 | final String[] components = sizeString.split("x"); 70 | if (components.length == 2) { 71 | try { 72 | final int width = Integer.parseInt(components[0]); 73 | final int height = Integer.parseInt(components[1]); 74 | return new Size(width, height); 75 | } catch (final NumberFormatException e) { 76 | return null; 77 | } 78 | } else { 79 | return null; 80 | } 81 | } 82 | 83 | public static List sizeStringToList(final String sizes) { 84 | final List sizeList = new ArrayList(); 85 | if (sizes != null) { 86 | final String[] pairs = sizes.split(","); 87 | for (final String pair : pairs) { 88 | final Size size = Size.parseFromString(pair); 89 | if (size != null) { 90 | sizeList.add(size); 91 | } 92 | } 93 | } 94 | return sizeList; 95 | } 96 | 97 | public static String sizeListToString(final List sizes) { 98 | String sizesString = ""; 99 | if (sizes != null && sizes.size() > 0) { 100 | sizesString = sizes.get(0).toString(); 101 | for (int i = 1; i < sizes.size(); i++) { 102 | sizesString += "," + sizes.get(i).toString(); 103 | } 104 | } 105 | return sizesString; 106 | } 107 | 108 | public final float aspectRatio() { 109 | return (float) width / (float) height; 110 | } 111 | 112 | @Override 113 | public int compareTo(final Size other) { 114 | return width * height - other.width * other.height; 115 | } 116 | 117 | @Override 118 | public boolean equals(final Object other) { 119 | if (other == null) { 120 | return false; 121 | } 122 | 123 | if (!(other instanceof Size)) { 124 | return false; 125 | } 126 | 127 | final Size otherSize = (Size) other; 128 | return (width == otherSize.width && height == otherSize.height); 129 | } 130 | 131 | @Override 132 | public int hashCode() { 133 | return width * 32713 + height; 134 | } 135 | 136 | @Override 137 | public String toString() { 138 | return dimensionsAsString(width, height); 139 | } 140 | 141 | public static final String dimensionsAsString(final int width, final int height) { 142 | return width + "x" + height; 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/env/SplitTimer.java: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | package com.rossia.life.scan.tensor.env; 17 | 18 | import android.os.SystemClock; 19 | 20 | /** 21 | * A simple utility timer for measuring CPU time and wall-clock splits. 22 | */ 23 | public class SplitTimer { 24 | private final Logger logger; 25 | 26 | private long lastWallTime; 27 | private long lastCpuTime; 28 | 29 | public SplitTimer(final String name) { 30 | logger = new Logger(name); 31 | newSplit(); 32 | } 33 | 34 | public void newSplit() { 35 | lastWallTime = SystemClock.uptimeMillis(); 36 | lastCpuTime = SystemClock.currentThreadTimeMillis(); 37 | } 38 | 39 | public void endSplit(final String splitName) { 40 | final long currWallTime = SystemClock.uptimeMillis(); 41 | final long currCpuTime = SystemClock.currentThreadTimeMillis(); 42 | 43 | logger.i( 44 | "%s: cpu=%dms wall=%dms", 45 | splitName, currCpuTime - lastCpuTime, currWallTime - lastWallTime); 46 | 47 | lastWallTime = currWallTime; 48 | lastCpuTime = currCpuTime; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/interf/Classifier.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.tensor.interf; 2 | 3 | import android.graphics.Bitmap; 4 | import android.graphics.RectF; 5 | 6 | import java.util.List; 7 | 8 | /** 9 | * @author pd_liu 2018/1/2. 10 | *

11 | * 分类器 12 | *

13 | *

14 | * 识别出的信息保存在 {@link Recognition} 15 | *

16 | * Generic interface for interacting with different recognition engines. 17 | */ 18 | public interface Classifier { 19 | /** 20 | * An immutable result returned by a Classifier describing what was recognized. 21 | */ 22 | public class Recognition { 23 | /** 24 | * A unique identifier for what has been recognized. Specific to the class, not the instance of 25 | * the object. 26 | */ 27 | private final String id; 28 | 29 | /** 30 | * Display name for the recognition. 31 | */ 32 | private final String title; 33 | 34 | /** 35 | * A sortable score for how good the recognition is relative to others. Higher should be better. 36 | */ 37 | private final Float confidence; 38 | 39 | /** 40 | * Optional location within the source image for the location of the recognized object. 41 | */ 42 | private RectF location; 43 | 44 | public Recognition( 45 | final String id, final String title, final Float confidence, final RectF location) { 46 | this.id = id; 47 | this.title = title; 48 | this.confidence = confidence; 49 | this.location = location; 50 | } 51 | 52 | public String getId() { 53 | return id; 54 | } 55 | 56 | public String getTitle() { 57 | return title; 58 | } 59 | 60 | public Float getConfidence() { 61 | return confidence; 62 | } 63 | 64 | public RectF getLocation() { 65 | return new RectF(location); 66 | } 67 | 68 | public void setLocation(RectF location) { 69 | this.location = location; 70 | } 71 | 72 | @Override 73 | public String toString() { 74 | String resultString = ""; 75 | if (id != null) { 76 | resultString += "[" + id + "] "; 77 | } 78 | 79 | if (title != null) { 80 | resultString += title + " "; 81 | } 82 | 83 | if (confidence != null) { 84 | resultString += String.format("(%.1f%%) ", confidence * 100.0f); 85 | } 86 | 87 | if (location != null) { 88 | resultString += location + " "; 89 | } 90 | 91 | return resultString.trim(); 92 | } 93 | } 94 | 95 | List recognizeImage(Bitmap bitmap); 96 | 97 | void enableStatLogging(final boolean debug); 98 | 99 | String getStatString(); 100 | 101 | void close(); 102 | } 103 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/tracking/MultiBoxTracker.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.tensor.tracking; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.graphics.Color; 6 | import android.graphics.Matrix; 7 | import android.graphics.Paint; 8 | import android.graphics.Paint.Cap; 9 | import android.graphics.Paint.Join; 10 | import android.graphics.Paint.Style; 11 | import android.graphics.RectF; 12 | import android.text.TextUtils; 13 | import android.util.Pair; 14 | import android.util.TypedValue; 15 | import android.widget.Toast; 16 | 17 | import com.rossia.life.scan.common.util.ImageUtil; 18 | import com.rossia.life.scan.common.util.LogUtil; 19 | import com.rossia.life.scan.tensor.env.BorderedText; 20 | import com.rossia.life.scan.tensor.env.Logger; 21 | import com.rossia.life.scan.tensor.interf.Classifier; 22 | 23 | import java.util.LinkedList; 24 | import java.util.List; 25 | import java.util.Queue; 26 | 27 | /** 28 | * A tracker wrapping ObjectTracker that also handles non-max suppression and matching existing 29 | * objects to new detections. 30 | * 31 | * @author pd_liu 2017/12/15. 32 | *

33 | * Object detection tracker box. 34 | *

35 | */ 36 | public class MultiBoxTracker { 37 | 38 | private static final String TAG_LOG = "MultiBoxTracker"; 39 | 40 | private final Logger logger = new Logger(); 41 | 42 | private static final float TEXT_SIZE_DIP = 18; 43 | 44 | // Maximum percentage of a box that can be overlapped by another box at detection time. Otherwise 45 | // the lower scored box (new or old) will be removed. 46 | private static final float MAX_OVERLAP = 0.2f; 47 | 48 | private static final float MIN_SIZE = 16.0f; 49 | 50 | // Allow replacement of the tracked box with new results if 51 | // correlation has dropped below this level. 52 | private static final float MARGINAL_CORRELATION = 0.75f; 53 | 54 | // Consider object to be lost if correlation falls below this threshold. 55 | private static final float MIN_CORRELATION = 0.3f; 56 | 57 | /** 58 | * 当绘制Box边框完成后的Callback 59 | */ 60 | private OnDrawRectCompleteCallback mOnDrawRectCompleteCallback; 61 | 62 | /** 63 | * 当为检测出对象时Callback. 64 | */ 65 | private DetectionNothingCallback mDetectionNothingCallback; 66 | /** 67 | * 这里存储了绘制的颜色值 68 | */ 69 | private static final int[] COLORS = { 70 | Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA, Color.WHITE, 71 | Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"), 72 | Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"), 73 | Color.parseColor("#AA33AA"), Color.parseColor("#0D0068") 74 | }; 75 | 76 | private final Queue mAvailableColors = new LinkedList(); 77 | 78 | public ObjectTracker mObjectTracker; 79 | 80 | /** 81 | * 检测出的对象在屏幕上展示的Rect区域 82 | */ 83 | final List> mScreenRects = new LinkedList>(); 84 | 85 | private static class TrackedRecognition { 86 | ObjectTracker.TrackedObject trackedObject; 87 | RectF location; 88 | float detectionConfidence; 89 | int color; 90 | String title; 91 | } 92 | 93 | private final List mTrackedObjects = new LinkedList(); 94 | 95 | private final Paint boxPaint = new Paint(); 96 | 97 | private final float textSizePx; 98 | private final BorderedText borderedText; 99 | 100 | private Matrix frameToCanvasMatrix; 101 | 102 | private int frameWidth; 103 | private int frameHeight; 104 | 105 | private int sensorOrientation; 106 | private Context context; 107 | 108 | public MultiBoxTracker(final Context context) { 109 | this.context = context; 110 | for (final int color : COLORS) { 111 | mAvailableColors.add(color); 112 | } 113 | 114 | boxPaint.setColor(Color.RED); 115 | boxPaint.setStyle(Style.STROKE); 116 | boxPaint.setStrokeWidth(12.0f); 117 | boxPaint.setStrokeCap(Cap.ROUND); 118 | boxPaint.setStrokeJoin(Join.ROUND); 119 | boxPaint.setStrokeMiter(100); 120 | 121 | textSizePx = 122 | TypedValue.applyDimension( 123 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics()); 124 | borderedText = new BorderedText(textSizePx); 125 | } 126 | 127 | private Matrix getFrameToCanvasMatrix() { 128 | return frameToCanvasMatrix; 129 | } 130 | 131 | public synchronized void drawDebug(final Canvas canvas) { 132 | final Paint textPaint = new Paint(); 133 | textPaint.setColor(Color.WHITE); 134 | textPaint.setTextSize(60.0f); 135 | 136 | final Paint boxPaint = new Paint(); 137 | boxPaint.setColor(Color.RED); 138 | boxPaint.setAlpha(200); 139 | boxPaint.setStyle(Style.STROKE); 140 | 141 | for (final Pair detection : mScreenRects) { 142 | final RectF rect = detection.second; 143 | canvas.drawRect(rect, boxPaint); 144 | canvas.drawText("" + detection.first, rect.left, rect.top, textPaint); 145 | borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first); 146 | } 147 | 148 | if (mObjectTracker == null) { 149 | return; 150 | } 151 | 152 | // Draw correlations. 153 | for (final TrackedRecognition recognition : mTrackedObjects) { 154 | final ObjectTracker.TrackedObject trackedObject = recognition.trackedObject; 155 | 156 | final RectF trackedPos = trackedObject.getTrackedPositionInPreviewFrame(); 157 | 158 | if (getFrameToCanvasMatrix().mapRect(trackedPos)) { 159 | final String labelString = String.format("%.2f", trackedObject.getCurrentCorrelation()); 160 | borderedText.drawText(canvas, trackedPos.right, trackedPos.bottom, labelString); 161 | } 162 | } 163 | 164 | final Matrix matrix = getFrameToCanvasMatrix(); 165 | mObjectTracker.drawDebug(canvas, matrix); 166 | } 167 | 168 | public synchronized void trackResults( 169 | final List results, final byte[] frame, final long timestamp) { 170 | logger.i("Processing %d results from %d", results.size(), timestamp); 171 | processResults(timestamp, results, frame); 172 | } 173 | 174 | /** 175 | * 这里主要是进行绘制 ,检测出来的对象后,进行绘制的边框角度 176 | * 177 | * @param canvas 画布 178 | */ 179 | public synchronized void draw(final Canvas canvas) { 180 | final boolean rotated = sensorOrientation % 180 == 90; 181 | final float multiplier = 182 | Math.min(canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight), 183 | canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth)); 184 | frameToCanvasMatrix = 185 | ImageUtil.getTransformationMatrix( 186 | frameWidth, 187 | frameHeight, 188 | (int) (multiplier * (rotated ? frameHeight : frameWidth)), 189 | (int) (multiplier * (rotated ? frameWidth : frameHeight)), 190 | sensorOrientation, 191 | false); 192 | 193 | for (final TrackedRecognition recognition : mTrackedObjects) { 194 | final RectF trackedPos = 195 | (mObjectTracker != null) 196 | ? recognition.trackedObject.getTrackedPositionInPreviewFrame() 197 | : new RectF(recognition.location); 198 | 199 | getFrameToCanvasMatrix().mapRect(trackedPos); 200 | boxPaint.setColor(recognition.color); 201 | 202 | final float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f; 203 | 204 | if ("book".equals(recognition.title) && recognition.detectionConfidence >= 0.60f) { 205 | /* 206 | 这里对识别对象、以及识别可信度进行确定 207 | */ 208 | 209 | /* 210 | 绘制矩形的圆形边角 211 | */ 212 | //canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint); 213 | //获取之前的StrokeWidth保存, 214 | float oldStrokeWidth = boxPaint.getStrokeWidth(); 215 | boxPaint.setStrokeWidth(5f); 216 | boxPaint.setColor(Color.YELLOW); 217 | canvas.drawRect(trackedPos, boxPaint); 218 | 219 | //恢复之前设置的StrokeWidth 220 | boxPaint.setStrokeWidth(oldStrokeWidth); 221 | final String labelString = 222 | !TextUtils.isEmpty(recognition.title) 223 | ? String.format("%s %.2f", recognition.title, recognition.detectionConfidence) 224 | : String.format("%.2f", recognition.detectionConfidence); 225 | 226 | /* 227 | 绘制识别出来的 对象Title、confidence 【名称、可信度】 228 | */ 229 | //borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.bottom, labelString); 230 | 231 | if (mOnDrawRectCompleteCallback != null) { 232 | mOnDrawRectCompleteCallback.drawRectComplete(trackedPos); 233 | } 234 | 235 | } 236 | 237 | } 238 | } 239 | 240 | private boolean initialized = false; 241 | 242 | public synchronized void onFrame( 243 | final int w, 244 | final int h, 245 | final int rowStride, 246 | final int sensorOrienation, 247 | final byte[] frame, 248 | final long timestamp) { 249 | if (mObjectTracker == null && !initialized) { 250 | ObjectTracker.clearInstance(); 251 | 252 | logger.i("Initializing ObjectTracker: %dx%d", w, h); 253 | mObjectTracker = ObjectTracker.getInstance(w, h, rowStride, true); 254 | frameWidth = w; 255 | frameHeight = h; 256 | this.sensorOrientation = sensorOrienation; 257 | initialized = true; 258 | 259 | if (mObjectTracker == null) { 260 | String message = 261 | "Object tracking support not found. " 262 | + "See tensorflow/examples/android/README.md for details."; 263 | // Toast.makeText(context, message, Toast.LENGTH_LONG).show(); 264 | logger.e(message); 265 | } 266 | } 267 | 268 | if (mObjectTracker == null) { 269 | return; 270 | } 271 | 272 | mObjectTracker.nextFrame(frame, null, timestamp, null, true); 273 | 274 | // Clean up any objects not worth tracking any more. 275 | final LinkedList copyList = 276 | new LinkedList(mTrackedObjects); 277 | for (final TrackedRecognition recognition : copyList) { 278 | final ObjectTracker.TrackedObject trackedObject = recognition.trackedObject; 279 | final float correlation = trackedObject.getCurrentCorrelation(); 280 | if (correlation < MIN_CORRELATION) { 281 | logger.v("Removing tracked object %s because NCC is %.2f", trackedObject, correlation); 282 | trackedObject.stopTracking(); 283 | mTrackedObjects.remove(recognition); 284 | 285 | mAvailableColors.add(recognition.color); 286 | } 287 | } 288 | } 289 | 290 | private void processResults( 291 | final long timestamp, final List results, final byte[] originalFrame) { 292 | final List> rectsToTrack = new LinkedList>(); 293 | 294 | mScreenRects.clear(); 295 | final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix()); 296 | 297 | for (final Classifier.Recognition result : results) { 298 | if (result.getLocation() == null) { 299 | continue; 300 | } 301 | final RectF detectionFrameRect = new RectF(result.getLocation()); 302 | 303 | final RectF detectionScreenRect = new RectF(); 304 | rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect); 305 | 306 | logger.v( 307 | "Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect); 308 | 309 | 310 | LogUtil.e(TAG_LOG, "\tResult! Frame: " + result.getLocation() + "mapped to screen:" + detectionScreenRect); 311 | 312 | mScreenRects.add(new Pair(result.getConfidence(), detectionScreenRect)); 313 | 314 | if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) { 315 | logger.w("Degenerate rectangle! " + detectionFrameRect); 316 | continue; 317 | } 318 | 319 | rectsToTrack.add(new Pair(result.getConfidence(), result)); 320 | } 321 | 322 | if (rectsToTrack.isEmpty()) { 323 | logger.v("Nothing to track, aborting."); 324 | LogUtil.e(TAG_LOG, "Nothing to track, aborting."); 325 | if (mDetectionNothingCallback != null) { 326 | mDetectionNothingCallback.call(); 327 | } 328 | // TODO: 2018/1/4 注释return仅为测试用 329 | // return; 330 | } 331 | 332 | /* 333 | PS:貌似 mObjectTracker 一直为null 334 | */ 335 | if (mObjectTracker == null) { 336 | LogUtil.e(TAG_LOG, "objectTracker is null:\t " + (mObjectTracker == null)); 337 | mTrackedObjects.clear(); 338 | for (final Pair potential : rectsToTrack) { 339 | final TrackedRecognition trackedRecognition = new TrackedRecognition(); 340 | trackedRecognition.detectionConfidence = potential.first; 341 | trackedRecognition.location = new RectF(potential.second.getLocation()); 342 | trackedRecognition.trackedObject = null; 343 | trackedRecognition.title = potential.second.getTitle(); 344 | trackedRecognition.color = COLORS[mTrackedObjects.size()]; 345 | mTrackedObjects.add(trackedRecognition); 346 | 347 | if (mTrackedObjects.size() >= COLORS.length) { 348 | break; 349 | } 350 | } 351 | return; 352 | } 353 | 354 | logger.i("%d rects to track", rectsToTrack.size()); 355 | for (final Pair potential : rectsToTrack) { 356 | handleDetection(originalFrame, timestamp, potential); 357 | } 358 | } 359 | 360 | private void handleDetection( 361 | final byte[] frameCopy, final long timestamp, final Pair potential) { 362 | final ObjectTracker.TrackedObject potentialObject = 363 | mObjectTracker.trackObject(potential.second.getLocation(), timestamp, frameCopy); 364 | 365 | final float potentialCorrelation = potentialObject.getCurrentCorrelation(); 366 | logger.v( 367 | "Tracked object went from %s to %s with correlation %.2f", 368 | potential.second, potentialObject.getTrackedPositionInPreviewFrame(), potentialCorrelation); 369 | 370 | if (potentialCorrelation < MARGINAL_CORRELATION) { 371 | logger.v("Correlation too low to begin tracking %s.", potentialObject); 372 | potentialObject.stopTracking(); 373 | return; 374 | } 375 | 376 | final List removeList = new LinkedList(); 377 | 378 | float maxIntersect = 0.0f; 379 | 380 | // This is the current tracked object whose color we will take. If left null we'll take the 381 | // first one from the color queue. 382 | TrackedRecognition recogToReplace = null; 383 | 384 | // Look for intersections that will be overridden by this object or an intersection that would 385 | // prevent this one from being placed. 386 | for (final TrackedRecognition trackedRecognition : mTrackedObjects) { 387 | final RectF a = trackedRecognition.trackedObject.getTrackedPositionInPreviewFrame(); 388 | final RectF b = potentialObject.getTrackedPositionInPreviewFrame(); 389 | final RectF intersection = new RectF(); 390 | //如果矩形a和b相交,则返回true,并将该矩形设置为该交点,否则返回false并且不更改此矩形。不执行检查以查看任一矩形是否为空。为了测试相交,使用intersects() 391 | final boolean intersects = intersection.setIntersect(a, b); 392 | 393 | final float intersectArea = intersection.width() * intersection.height(); 394 | final float totalArea = a.width() * a.height() + b.width() * b.height() - intersectArea; 395 | final float intersectOverUnion = intersectArea / totalArea; 396 | 397 | // If there is an intersection with this currently tracked box above the maximum overlap 398 | // percentage allowed, either the new recognition needs to be dismissed or the old 399 | // recognition needs to be removed and possibly replaced with the new one. 400 | if (intersects && intersectOverUnion > MAX_OVERLAP) { 401 | if (potential.first < trackedRecognition.detectionConfidence 402 | && trackedRecognition.trackedObject.getCurrentCorrelation() > MARGINAL_CORRELATION) { 403 | // If track for the existing object is still going strong and the detection score was 404 | // good, reject this new object. 405 | potentialObject.stopTracking(); 406 | return; 407 | } else { 408 | removeList.add(trackedRecognition); 409 | 410 | // Let the previously tracked object with max intersection amount donate its color to 411 | // the new object. 412 | if (intersectOverUnion > maxIntersect) { 413 | maxIntersect = intersectOverUnion; 414 | recogToReplace = trackedRecognition; 415 | } 416 | } 417 | } 418 | } 419 | 420 | // If we're already tracking the max object and no intersections were found to bump off, 421 | // pick the worst current tracked object to remove, if it's also worse than this candidate 422 | // object. 423 | if (mAvailableColors.isEmpty() && removeList.isEmpty()) { 424 | for (final TrackedRecognition candidate : mTrackedObjects) { 425 | if (candidate.detectionConfidence < potential.first) { 426 | if (recogToReplace == null 427 | || candidate.detectionConfidence < recogToReplace.detectionConfidence) { 428 | // Save it so that we use this color for the new object. 429 | recogToReplace = candidate; 430 | } 431 | } 432 | } 433 | if (recogToReplace != null) { 434 | logger.v("Found non-intersecting object to remove."); 435 | removeList.add(recogToReplace); 436 | } else { 437 | logger.v("No non-intersecting object found to remove"); 438 | } 439 | } 440 | 441 | // Remove everything that got intersected. 442 | for (final TrackedRecognition trackedRecognition : removeList) { 443 | logger.v( 444 | "Removing tracked object %s with detection confidence %.2f, correlation %.2f", 445 | trackedRecognition.trackedObject, 446 | trackedRecognition.detectionConfidence, 447 | trackedRecognition.trackedObject.getCurrentCorrelation()); 448 | trackedRecognition.trackedObject.stopTracking(); 449 | mTrackedObjects.remove(trackedRecognition); 450 | if (trackedRecognition != recogToReplace) { 451 | mAvailableColors.add(trackedRecognition.color); 452 | } 453 | } 454 | 455 | if (recogToReplace == null && mAvailableColors.isEmpty()) { 456 | logger.e("No room to track this object, aborting."); 457 | potentialObject.stopTracking(); 458 | return; 459 | } 460 | 461 | // Finally safe to say we can track this object. 462 | logger.v( 463 | "Tracking object %s (%s) with detection confidence %.2f at position %s", 464 | potentialObject, 465 | potential.second.getTitle(), 466 | potential.first, 467 | potential.second.getLocation()); 468 | final TrackedRecognition trackedRecognition = new TrackedRecognition(); 469 | trackedRecognition.detectionConfidence = potential.first; 470 | trackedRecognition.trackedObject = potentialObject; 471 | trackedRecognition.title = potential.second.getTitle(); 472 | 473 | // Use the color from a replaced object before taking one from the color queue. 474 | trackedRecognition.color = 475 | recogToReplace != null ? recogToReplace.color : mAvailableColors.poll(); 476 | mTrackedObjects.add(trackedRecognition); 477 | } 478 | 479 | public void setOnDrawRectCompleteCallback(OnDrawRectCompleteCallback onDrawRectCompleteCallback) { 480 | this.mOnDrawRectCompleteCallback = onDrawRectCompleteCallback; 481 | } 482 | 483 | public void setDetectionNothingCallback(DetectionNothingCallback detectionNothingCallback) { 484 | mDetectionNothingCallback = detectionNothingCallback; 485 | } 486 | 487 | /** 488 | * 此类绘制完成所识别的边框后Callback 489 | */ 490 | public interface OnDrawRectCompleteCallback { 491 | /** 492 | * 绘制Rect矩形区域完成后的Callback. 493 | * 494 | * @param rectF {@link RectF} 495 | * @return null 496 | */ 497 | boolean drawRectComplete(RectF rectF); 498 | } 499 | 500 | public interface DetectionNothingCallback { 501 | boolean call(); 502 | } 503 | } 504 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/widget/CropImageView.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.tensor.widget; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.support.annotation.Nullable; 6 | import android.util.AttributeSet; 7 | import android.view.MotionEvent; 8 | import android.widget.ImageView; 9 | 10 | /** 11 | * @author pd_liu 2018/1/5. 12 | *

13 | * 裁剪图片 14 | *

15 | */ 16 | public class CropImageView extends ImageView { 17 | 18 | private static final String TAG = "CropImageView"; 19 | 20 | 21 | public CropImageView(Context context) { 22 | this(context, null); 23 | } 24 | 25 | public CropImageView(Context context, @Nullable AttributeSet attrs) { 26 | this(context, attrs, 0); 27 | } 28 | 29 | public CropImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 30 | this(context, attrs, defStyleAttr, 0); 31 | } 32 | 33 | public CropImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { 34 | super(context, attrs, defStyleAttr, defStyleRes); 35 | 36 | 37 | } 38 | 39 | @Override 40 | protected void onDraw(Canvas canvas) { 41 | super.onDraw(canvas); 42 | 43 | 44 | } 45 | 46 | @Override 47 | public boolean onTouchEvent(MotionEvent event) { 48 | 49 | int action = event.getAction(); 50 | 51 | switch (action) { 52 | case MotionEvent.ACTION_DOWN: 53 | 54 | break; 55 | 56 | case MotionEvent.ACTION_MOVE: 57 | break; 58 | 59 | case MotionEvent.ACTION_UP: 60 | 61 | break; 62 | default: 63 | } 64 | 65 | 66 | 67 | return super.onTouchEvent(event); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/widget/OverlayView.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.tensor.widget; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.support.annotation.Nullable; 6 | import android.util.AttributeSet; 7 | import android.view.View; 8 | 9 | import java.util.LinkedList; 10 | import java.util.List; 11 | 12 | /** 13 | * @author pd_liu on 2018/1/2. 14 | *

15 | * 覆盖视图 16 | *

17 | *

18 | * 1、内部实现保存机制 19 | * 2、当调用{@link #invalidate()} {@link #postInvalidate()}时,开始执行存储中的任务{@link #mCallbacks} 20 | *

21 | */ 22 | 23 | public class OverlayView extends View { 24 | 25 | private static final String TAG_LOG = "OverlayView"; 26 | 27 | private final List mCallbacks = new LinkedList<>(); 28 | 29 | /** 30 | * Simple constructor to use when creating a view from code. 31 | * 32 | * @param context The Context the view is running in, through which it can 33 | * access the current theme, resources, etc. 34 | */ 35 | public OverlayView(Context context) { 36 | super(context); 37 | } 38 | 39 | /** 40 | * Constructor that is called when inflating a view from XML. This is called 41 | * when a view is being constructed from an XML file, supplying attributes 42 | * that were specified in the XML file. This version uses a default style of 43 | * 0, so the only attribute values applied are those in the Context's Theme 44 | * and the given AttributeSet. 45 | *

46 | *

47 | * The method onFinishInflate() will be called after all children have been 48 | * added. 49 | * 50 | * @param context The Context the view is running in, through which it can 51 | * access the current theme, resources, etc. 52 | * @param attrs The attributes of the XML tag that is inflating the view. 53 | * @see #View(Context, AttributeSet, int) 54 | */ 55 | public OverlayView(Context context, @Nullable AttributeSet attrs) { 56 | super(context, attrs); 57 | } 58 | 59 | /** 60 | * Perform inflation from XML and apply a class-specific base style from a 61 | * theme attribute. This constructor of View allows subclasses to use their 62 | * own base style when they are inflating. For example, a Button class's 63 | * constructor would call this version of the super class constructor and 64 | * supply R.attr.buttonStyle for defStyleAttr; this 65 | * allows the theme's button style to modify all of the base view attributes 66 | * (in particular its background) as well as the Button class's attributes. 67 | * 68 | * @param context The Context the view is running in, through which it can 69 | * access the current theme, resources, etc. 70 | * @param attrs The attributes of the XML tag that is inflating the view. 71 | * @param defStyleAttr An attribute in the current theme that contains a 72 | * reference to a style resource that supplies default values for 73 | * the view. Can be 0 to not look for defaults. 74 | * @see #View(Context, AttributeSet) 75 | */ 76 | public OverlayView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 77 | super(context, attrs, defStyleAttr); 78 | } 79 | 80 | @Override 81 | protected void onDraw(Canvas canvas) { 82 | super.onDraw(canvas); 83 | 84 | /* 85 | 循环并执行绘制任务 86 | */ 87 | for (DrawCallback callback : mCallbacks) { 88 | callback.callback(canvas); 89 | } 90 | } 91 | 92 | /** 93 | * Interface defining the callback for client classes. 94 | */ 95 | public interface DrawCallback { 96 | /** 97 | * Callback 98 | * 99 | * @param canvas 画布 100 | */ 101 | void callback(Canvas canvas); 102 | } 103 | 104 | public void addCallback(DrawCallback drawCallback) { 105 | mCallbacks.add(drawCallback); 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/tensor/widget/ScanImageView.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.tensor.widget; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.graphics.Color; 6 | import android.graphics.LinearGradient; 7 | import android.graphics.Paint; 8 | import android.graphics.Shader; 9 | import android.support.annotation.Nullable; 10 | import android.util.AttributeSet; 11 | import android.widget.ImageView; 12 | 13 | import com.rossia.life.scan.common.util.LogUtil; 14 | 15 | /** 16 | * @author pd_liu on 2018/1/4. 17 | *

18 | * ScanImageView:拓展ImageView 19 | * 不仅拥有ImageView属性,还:实现对图片的上下的扫描效果 20 | *

21 | */ 22 | 23 | public class ScanImageView extends ImageView { 24 | 25 | private static final String TAG_LOG = "ScanImageView"; 26 | 27 | private static final int DEFAULT_REPEAT_COUNT = 1; 28 | 29 | /** 30 | * 绘制扫描效果的画笔 31 | */ 32 | private Paint mScanBarPaint; 33 | 34 | /** 35 | * 需要绘制Line points 36 | */ 37 | float mLineStartX = 0f; 38 | float mLineStartY = 0f; 39 | float mLineStopX = 0f; 40 | float mLineStopY = 0f; 41 | 42 | private int mWidth; 43 | private int mHeight; 44 | 45 | /** 46 | * 扫描的重复次数 47 | */ 48 | private int mRepeatCount = DEFAULT_REPEAT_COUNT; 49 | 50 | /** 51 | * 当前扫描到了几次 52 | */ 53 | private int mCurrentRepeatCount; 54 | 55 | /** 56 | * 是否开启绘制效果 57 | */ 58 | private boolean mOpenScanBarActionFlag; 59 | 60 | /** 61 | * 每次扫描的间隔 62 | */ 63 | private float mPerScanInterval; 64 | 65 | private Shader mPaintShader; 66 | 67 | private ScanCompleteCallback mScanCompleteCallback; 68 | 69 | public ScanImageView(Context context) { 70 | this(context, null); 71 | } 72 | 73 | public ScanImageView(Context context, @Nullable AttributeSet attrs) { 74 | this(context, attrs, 0); 75 | } 76 | 77 | public ScanImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 78 | this(context, attrs, defStyleAttr, 0); 79 | } 80 | 81 | public ScanImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { 82 | super(context, attrs, defStyleAttr, defStyleRes); 83 | 84 | mScanBarPaint = new Paint(); 85 | mScanBarPaint.setAntiAlias(true); 86 | mScanBarPaint.setColor(Color.YELLOW); 87 | mScanBarPaint.setStrokeWidth(5f); 88 | 89 | } 90 | 91 | 92 | @Override 93 | protected void onDraw(Canvas canvas) { 94 | super.onDraw(canvas); 95 | 96 | //如果标记为打开扫描效果 97 | if (mOpenScanBarActionFlag) { 98 | 99 | 100 | if (mCurrentRepeatCount < mRepeatCount) { 101 | 102 | /* 103 | 绘制扫描效果 104 | */ 105 | if(mWidth == 0 || mHeight == 0){ 106 | mWidth = getWidth(); 107 | mHeight = getHeight(); 108 | mPerScanInterval = mHeight * 0.01f; 109 | } 110 | 111 | if(mPaintShader == null){ 112 | mPaintShader = new LinearGradient(0,0,0,mScanBarPaint.getStrokeWidth(), Color.YELLOW, Color.BLUE, LinearGradient.TileMode.CLAMP); 113 | } 114 | 115 | mLineStopX = mLineStartX + mWidth; 116 | 117 | if (mLineStartY <= mHeight) { 118 | 119 | //绘制扫描线条 120 | canvas.drawLine(mLineStartX, mLineStartY, mLineStopX, mLineStopY, mScanBarPaint); 121 | 122 | mLineStartY = mLineStartY + mPerScanInterval; 123 | mLineStopY = mLineStopY + mPerScanInterval; 124 | 125 | } else { 126 | mLineStartY = 0f; 127 | mLineStopY = 0f; 128 | mCurrentRepeatCount ++ ; 129 | } 130 | 131 | //invalidate 132 | postInvalidate(); 133 | 134 | } else { 135 | /* 136 | 已经扫描结束 137 | */ 138 | mCurrentRepeatCount = 0; 139 | mLineStartY = 0f; 140 | mLineStopY = 0f; 141 | mWidth = 0; 142 | mHeight = 0; 143 | mOpenScanBarActionFlag = false; 144 | 145 | if (mScanCompleteCallback != null) { 146 | mScanCompleteCallback.complete(); 147 | } 148 | } 149 | 150 | 151 | } 152 | 153 | } 154 | 155 | public void startScan(){ 156 | mCurrentRepeatCount = 0; 157 | mLineStartY = 0f; 158 | mLineStopY = 0f; 159 | mOpenScanBarActionFlag = true; 160 | postInvalidate(); 161 | } 162 | 163 | public void setScanCompleteCallback(ScanCompleteCallback completeCallback) { 164 | mScanCompleteCallback = completeCallback; 165 | } 166 | 167 | public void setopenScanBarAction(boolean openScanBar) { 168 | mOpenScanBarActionFlag = openScanBar; 169 | } 170 | 171 | public boolean isOpenScanBarAction() { 172 | return mOpenScanBarActionFlag; 173 | } 174 | 175 | public interface ScanCompleteCallback { 176 | void complete(); 177 | } 178 | } 179 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/transfer/SensorMoveControl.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.transfer; 2 | 3 | import android.content.Context; 4 | import android.hardware.Sensor; 5 | import android.hardware.SensorEvent; 6 | import android.hardware.SensorEventListener; 7 | import android.hardware.SensorManager; 8 | 9 | import com.rossia.life.scan.common.util.LogUtil; 10 | 11 | import java.util.Calendar; 12 | 13 | /** 14 | * @author pd_liu on 2018/1/16. 15 | *

16 | * 传感器移动事件控制器 17 | *

18 | */ 19 | 20 | public class SensorMoveControl implements SensorEventListener { 21 | 22 | private static final String TAG_LOG = "SensorMoveControl"; 23 | 24 | private SensorManager mSensorManager; 25 | 26 | private Sensor mSensor; 27 | 28 | private SensorMoveListener mSensorMoveListener; 29 | 30 | private Calendar mCalendar; 31 | 32 | boolean isFocusing = false; 33 | boolean canFocusIn = false; //内部是否能够对焦控制机制 34 | boolean canFocus = false; 35 | 36 | private int mX, mY, mZ; 37 | 38 | public static final int STATUS_NONE = 0; 39 | public static final int STATUS_STATIC = 1; 40 | public static final int STATUS_MOVE = 2; 41 | private int STATUE = STATUS_NONE; 42 | 43 | private long lastStaticStamp = 0; 44 | 45 | /** 46 | * 延迟的时间 47 | */ 48 | public static final int DELEY_DURATION = 500; 49 | 50 | private SensorMoveControl(Context context) { 51 | mSensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE); 52 | mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER, true); 53 | } 54 | 55 | public static SensorMoveControl newInstance(Context context) { 56 | return new SensorMoveControl(context); 57 | } 58 | 59 | 60 | @Override 61 | public void onSensorChanged(SensorEvent event) { 62 | if (event.sensor == null) { 63 | return; 64 | } 65 | if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { 66 | int x = (int) event.values[0]; 67 | int y = (int) event.values[1]; 68 | int z = (int) event.values[2]; 69 | mCalendar = Calendar.getInstance(); 70 | long stamp = mCalendar.getTimeInMillis();// 1393844912 71 | 72 | int second = mCalendar.get(Calendar.SECOND);// 53 73 | 74 | if (STATUE != STATUS_NONE) { 75 | int px = Math.abs(mX - x); 76 | int py = Math.abs(mY - y); 77 | int pz = Math.abs(mZ - z); 78 | // Log.d(TAG, "pX:" + px + " pY:" + py + " pZ:" + pz + " stamp:" 79 | // + stamp + " second:" + second); 80 | double value = Math.sqrt(px * px + py * py + pz * pz); 81 | if (value > 1.0) {//1.4 82 | // textviewF.setText("检测手机在移动.."); 83 | LogUtil.e(TAG_LOG, "检测手机在移动mobile moving"); 84 | STATUE = STATUS_MOVE; 85 | if (mSensorMoveListener != null) { 86 | mSensorMoveListener.onMoving(); 87 | } 88 | } else { 89 | // textviewF.setText("检测手机静止.."); 90 | LogUtil.e(TAG_LOG, "检测手机静止mobile static"); 91 | //上一次状态是move,记录静态时间点 92 | if (STATUE == STATUS_MOVE) { 93 | lastStaticStamp = stamp; 94 | canFocusIn = true; 95 | } 96 | 97 | if (canFocusIn) { 98 | if (stamp - lastStaticStamp > DELEY_DURATION) { 99 | //移动后静止一段时间,可以发生对焦行为 100 | if (!isFocusing) { 101 | canFocusIn = false; 102 | // onCameraFocus(); 103 | if (mSensorMoveListener != null) { 104 | mSensorMoveListener.onStaticing(); 105 | } 106 | } 107 | } 108 | } 109 | 110 | STATUE = STATUS_STATIC; 111 | } 112 | } else { 113 | lastStaticStamp = stamp; 114 | STATUE = STATUS_STATIC; 115 | } 116 | 117 | mX = x; 118 | mY = y; 119 | mZ = z; 120 | } 121 | } 122 | 123 | public void startSensor() { 124 | STATUE = STATUS_NONE; 125 | canFocusIn = false; 126 | mX = 0; 127 | mY = 0; 128 | mZ = 0; 129 | 130 | canFocus = true; 131 | mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_NORMAL); 132 | } 133 | 134 | public void stopSensor() { 135 | mSensorManager.unregisterListener(this, mSensor); 136 | canFocus = false; 137 | } 138 | 139 | @Override 140 | public void onAccuracyChanged(Sensor sensor, int accuracy) { 141 | 142 | } 143 | 144 | public void setSensorMoveListener(SensorMoveListener sensorMoveListener) { 145 | mSensorMoveListener = sensorMoveListener; 146 | } 147 | 148 | public interface SensorMoveListener { 149 | void onMoving(); 150 | 151 | void onStaticing(); 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/transfer/TransferSample.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.transfer; 2 | 3 | /** 4 | * Created by pd_liu on 2017/12/29. 5 | */ 6 | 7 | public class TransferSample { 8 | 9 | /** 10 | * 加载C++ 的编译产出,这是必须的代码. 11 | */ 12 | static { 13 | System.loadLibrary("native-lib"); 14 | } 15 | 16 | /** 17 | * 这是对外暴露的接口,在接口内部对JNI进行调用. 18 | * 19 | * @param a 数值 20 | */ 21 | public String convertIntToString(int a) { 22 | 23 | /* 24 | 调用底层 25 | */ 26 | StringClass stringClass = new StringClass(); 27 | jni_string(5, stringClass.value); 28 | 29 | return stringClass.value; 30 | } 31 | 32 | /** 33 | * 这是调用C++代码的JNI方法,String 34 | */ 35 | 36 | private static native int jni_string(int input, String out); 37 | 38 | private static native void jni_2(int input, StringClass output); 39 | 40 | class StringClass { 41 | String value; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/ui/detector/CameraApi2Fragment.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.ui.detector; 2 | 3 | import android.os.Bundle; 4 | 5 | /** 6 | * @author by pd_liu on 2017/12/29. 7 | *

8 | * 支持Camera2包下的最新Api 9 | *

10 | */ 11 | 12 | public class CameraApi2Fragment { 13 | 14 | private static final String TAG_LOG = "CameraApi2Fragment"; 15 | 16 | private String mCameraID; 17 | 18 | private int mFragmentLayoutID; 19 | 20 | private CameraApi2Fragment(final String cameraId, final int layout){ 21 | this.mCameraID = cameraId; 22 | this.mFragmentLayoutID = layout; 23 | } 24 | 25 | public static CameraApi2Fragment newInstance(String cameraId, int layoutId) { 26 | 27 | CameraApi2Fragment fragment = new CameraApi2Fragment(cameraId, layoutId); 28 | return fragment; 29 | } 30 | 31 | 32 | } 33 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/ui/detector/CameraConnectionFragment.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.ui.detector; 2 | 3 | import android.app.Activity; 4 | import android.content.Context; 5 | import android.graphics.SurfaceTexture; 6 | import android.hardware.camera2.CameraAccessException; 7 | import android.hardware.camera2.CameraCharacteristics; 8 | import android.hardware.camera2.CameraManager; 9 | import android.hardware.camera2.params.StreamConfigurationMap; 10 | import android.net.Uri; 11 | import android.os.Bundle; 12 | import android.support.annotation.Nullable; 13 | import android.support.v4.app.Fragment; 14 | import android.util.SparseIntArray; 15 | import android.view.LayoutInflater; 16 | import android.view.Surface; 17 | import android.view.TextureView; 18 | import android.view.View; 19 | import android.view.ViewGroup; 20 | 21 | import com.rossia.life.scan.R; 22 | import com.rossia.life.scan.common.util.LogUtil; 23 | 24 | /** 25 | * @author pd_liu 2017/12/29. 26 | *

27 | * 相机 28 | *

29 | *

30 | * Note: 31 | * 1、不支持前置摄像头进行捕捉画面 32 | *

33 | * A simple {@link Fragment} subclass. 34 | * Activities that contain this fragment must implement the 35 | * {@link CameraConnectionFragment.OnFragmentInteractionListener} interface 36 | * to handle interaction events. 37 | * Use the {@link CameraConnectionFragment#newInstance} factory method to 38 | * create an instance of this fragment. 39 | */ 40 | public class CameraConnectionFragment extends Fragment { 41 | 42 | private static final String TAG_LOG = "CameraConnectionFragment"; 43 | 44 | // TODO: Rename parameter arguments, choose names that match 45 | // the fragment initialization parameters, e.g. ARG_ITEM_NUMBER 46 | private static final String ARG_PARAM1 = "param1"; 47 | private static final String ARG_PARAM2 = "param2"; 48 | 49 | // TODO: Rename and change types of parameters 50 | private String mParam1; 51 | private String mParam2; 52 | 53 | /** 54 | * Conversion from screen rotation to JPEG orientation. 55 | */ 56 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); 57 | 58 | static { 59 | ORIENTATIONS.append(Surface.ROTATION_0, 90); 60 | ORIENTATIONS.append(Surface.ROTATION_90, 0); 61 | ORIENTATIONS.append(Surface.ROTATION_180, 270); 62 | ORIENTATIONS.append(Surface.ROTATION_270, 180); 63 | } 64 | 65 | private OnFragmentInteractionListener mListener; 66 | 67 | /** 68 | * Texture view display the camera output resource of image 69 | */ 70 | private TextureView mTextureView; 71 | 72 | /** 73 | * 当前Fragment显示的Layout的资源ID 74 | */ 75 | private int mFragmentContentLayoutID; 76 | 77 | public CameraConnectionFragment(int layoutId) { 78 | // Required empty public constructor 79 | this.mFragmentContentLayoutID = layoutId; 80 | } 81 | 82 | /** 83 | * Use this factory method to create a new instance of 84 | * this fragment using the provided parameters. 85 | * 86 | * @param layoutId Fragment layout resource id. 87 | * @return A new instance of fragment CameraConnectionFragment. 88 | */ 89 | public static CameraConnectionFragment newInstance(int layoutId) { 90 | CameraConnectionFragment fragment = new CameraConnectionFragment(layoutId); 91 | return fragment; 92 | } 93 | 94 | private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() { 95 | @Override 96 | public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 97 | 98 | } 99 | 100 | @Override 101 | public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { 102 | 103 | } 104 | 105 | @Override 106 | public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { 107 | return false; 108 | } 109 | 110 | @Override 111 | public void onSurfaceTextureUpdated(SurfaceTexture surface) { 112 | 113 | } 114 | }; 115 | 116 | @Override 117 | public void onCreate(Bundle savedInstanceState) { 118 | super.onCreate(savedInstanceState); 119 | } 120 | 121 | @Override 122 | public View onCreateView(LayoutInflater inflater, ViewGroup container, 123 | Bundle savedInstanceState) { 124 | // Inflate the layout for this fragment 125 | return inflater.inflate(R.layout.fragment_camera_connection, container, false); 126 | } 127 | 128 | @Override 129 | public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { 130 | super.onViewCreated(view, savedInstanceState); 131 | 132 | //findView. 133 | mTextureView = view.findViewById(R.id.texture_view); 134 | } 135 | 136 | @Override 137 | public void onResume() { 138 | super.onResume(); 139 | 140 | if (mTextureView.isAvailable()) { 141 | startCamera(); 142 | } else { 143 | mTextureView.setSurfaceTextureListener(mSurfaceTextureListener); 144 | } 145 | } 146 | 147 | @Override 148 | public void onPause() { 149 | super.onPause(); 150 | } 151 | 152 | @Override 153 | public void onAttach(Context context) { 154 | super.onAttach(context); 155 | if (context instanceof OnFragmentInteractionListener) { 156 | mListener = (OnFragmentInteractionListener) context; 157 | } else { 158 | throw new RuntimeException(context.toString() 159 | + " must implement OnFragmentInteractionListener"); 160 | } 161 | } 162 | 163 | @Override 164 | public void onDetach() { 165 | super.onDetach(); 166 | mListener = null; 167 | } 168 | 169 | /** 170 | * 启动相机 171 | */ 172 | private void startCamera() { 173 | 174 | try { 175 | //当前的CameraID 176 | String cameraId = chooseCamera(); 177 | 178 | 179 | } catch (CameraAccessException e) { 180 | e.printStackTrace(); 181 | } 182 | } 183 | 184 | /** 185 | * @return CameraId 186 | * @throws CameraAccessException 187 | */ 188 | private String chooseCamera() throws CameraAccessException { 189 | 190 | final Activity activity = getActivity(); 191 | 192 | CameraManager cameraManager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); 193 | 194 | //获取所有的相机ID列表 195 | String[] cameraIdList = cameraManager.getCameraIdList(); 196 | 197 | for (String cameraId : cameraIdList) { 198 | 199 | CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); 200 | 201 | // We don't use a front facing camera in this sample. 202 | final Integer facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING); 203 | 204 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { 205 | /* 206 | 不支持前置摄像头进行捕捉画面 207 | */ 208 | continue; 209 | } 210 | 211 | //此相机设备支持的可用流配置; 还包括每种格式/尺寸组合的最小帧持续时间和停顿持续时间。 212 | StreamConfigurationMap scalerStreamConfigurationMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 213 | 214 | if (scalerStreamConfigurationMap == null) { 215 | // TODO: 2018/1/2 216 | return null; 217 | } 218 | 219 | //相机设备是外部相机,相对于设备的屏幕没有固定的朝向。或则支持硬件等级为INFO_SUPPORTED_HARDWARE_LEVEL_FULL 220 | // if (facing == CameraCharacteristics.LENS_FACING_EXTERNAL || isHardwareLevelSupported(cameraCharacteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL)) { 221 | // LogUtil.e(TAG_LOG, "Camera API level 2 ? : true"); 222 | // } 223 | 224 | return cameraId; 225 | } 226 | 227 | return null; 228 | } 229 | 230 | /** 231 | * Returns true if the device supports the required hardware level, or better. 232 | * 233 | * @param characteristics {@link CameraCharacteristics} 234 | * @param requiredLevel requiredLevel. 235 | * @return whether the device supports the required hardware level. 236 | */ 237 | // private boolean isHardwareLevelSupported(CameraCharacteristics characteristics, int requiredLevel) { 238 | // 239 | // int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3); 240 | // 241 | // //支持的硬件级别是摄像机设备功能的高级描述,将多个功能汇总到一个领域。 242 | // // 每个级别都增加了前一级的附加功能,并且始终是前一级的严格超集。排序是LEGACY < LIMITED < FULL < LEVEL_3。 243 | // if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { 244 | // return requiredLevel == deviceLevel; 245 | // } 246 | // 247 | // return requiredLevel <= deviceLevel; 248 | // } 249 | 250 | /** 251 | * This interface must be implemented by activities that contain this 252 | * fragment to allow an interaction in this fragment to be communicated 253 | * to the activity and potentially other fragments contained in that 254 | * activity. 255 | *

256 | * See the Android Training lesson Communicating with Other Fragments for more information. 259 | */ 260 | public interface OnFragmentInteractionListener { 261 | // TODO: Update argument type and name 262 | void onFragmentInteraction(Uri uri); 263 | } 264 | } 265 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/ui/interf/TakePictureCallback.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.ui.interf; 2 | 3 | import android.graphics.Bitmap; 4 | 5 | /** 6 | * @author pd_liu on 2018/1/8. 7 | *

8 | * 拍照Callback 9 | *

10 | */ 11 | 12 | public interface TakePictureCallback { 13 | /** 14 | * Call 15 | * 16 | * @param bitmap 拍照后处理过的图片位图 17 | */ 18 | void call(Bitmap bitmap); 19 | } 20 | -------------------------------------------------------------------------------- /scan/src/main/java/com/rossia/life/scan/ui/view/DrawColorView.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan.ui.view; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.support.annotation.Nullable; 6 | import android.util.AttributeSet; 7 | import android.view.View; 8 | 9 | /** 10 | * @author pd_liu on 2018/1/5. 11 | */ 12 | 13 | public class DrawColorView extends View { 14 | 15 | private DrawColorListener mDrawColorListener; 16 | 17 | /** 18 | * Simple constructor to use when creating a view from code. 19 | * 20 | * @param context The Context the view is running in, through which it can 21 | * access the current theme, resources, etc. 22 | */ 23 | public DrawColorView(Context context) { 24 | super(context); 25 | } 26 | 27 | /** 28 | * Constructor that is called when inflating a view from XML. This is called 29 | * when a view is being constructed from an XML file, supplying attributes 30 | * that were specified in the XML file. This version uses a default style of 31 | * 0, so the only attribute values applied are those in the Context's Theme 32 | * and the given AttributeSet. 33 | *

34 | *

35 | * The method onFinishInflate() will be called after all children have been 36 | * added. 37 | * 38 | * @param context The Context the view is running in, through which it can 39 | * access the current theme, resources, etc. 40 | * @param attrs The attributes of the XML tag that is inflating the view. 41 | * @see #View(Context, AttributeSet, int) 42 | */ 43 | public DrawColorView(Context context, @Nullable AttributeSet attrs) { 44 | super(context, attrs); 45 | } 46 | 47 | /** 48 | * Perform inflation from XML and apply a class-specific base style from a 49 | * theme attribute. This constructor of View allows subclasses to use their 50 | * own base style when they are inflating. For example, a Button class's 51 | * constructor would call this version of the super class constructor and 52 | * supply R.attr.buttonStyle for defStyleAttr; this 53 | * allows the theme's button style to modify all of the base view attributes 54 | * (in particular its background) as well as the Button class's attributes. 55 | * 56 | * @param context The Context the view is running in, through which it can 57 | * access the current theme, resources, etc. 58 | * @param attrs The attributes of the XML tag that is inflating the view. 59 | * @param defStyleAttr An attribute in the current theme that contains a 60 | * reference to a style resource that supplies default values for 61 | * the view. Can be 0 to not look for defaults. 62 | * @see #View(Context, AttributeSet) 63 | */ 64 | public DrawColorView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 65 | super(context, attrs, defStyleAttr); 66 | } 67 | 68 | /** 69 | * Perform inflation from XML and apply a class-specific base style from a 70 | * theme attribute or style resource. This constructor of View allows 71 | * subclasses to use their own base style when they are inflating. 72 | *

73 | * When determining the final value of a particular attribute, there are 74 | * four inputs that come into play: 75 | *

    76 | *
  1. Any attribute values in the given AttributeSet. 77 | *
  2. The style resource specified in the AttributeSet (named "style"). 78 | *
  3. The default style specified by defStyleAttr. 79 | *
  4. The default style specified by defStyleRes. 80 | *
  5. The base values in this theme. 81 | *
82 | *

83 | * Each of these inputs is considered in-order, with the first listed taking 84 | * precedence over the following ones. In other words, if in the 85 | * AttributeSet you have supplied <Button * textColor="#ff000000"> 86 | * , then the button's text will always be black, regardless of 87 | * what is specified in any of the styles. 88 | * 89 | * @param context The Context the view is running in, through which it can 90 | * access the current theme, resources, etc. 91 | * @param attrs The attributes of the XML tag that is inflating the view. 92 | * @param defStyleAttr An attribute in the current theme that contains a 93 | * reference to a style resource that supplies default values for 94 | * the view. Can be 0 to not look for defaults. 95 | * @param defStyleRes A resource identifier of a style resource that 96 | * supplies default values for the view, used only if 97 | * defStyleAttr is 0 or can not be found in the theme. Can be 0 98 | * to not look for defaults. 99 | * @see #View(Context, AttributeSet, int) 100 | */ 101 | public DrawColorView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { 102 | super(context, attrs, defStyleAttr, defStyleRes); 103 | } 104 | 105 | @Override 106 | protected void onDraw(Canvas canvas) { 107 | super.onDraw(canvas); 108 | if(mDrawColorListener != null){ 109 | mDrawColorListener.drawColor(canvas); 110 | } 111 | } 112 | 113 | public void draw(DrawColorListener listener){ 114 | mDrawColorListener = listener; 115 | postInvalidate(); 116 | } 117 | 118 | public interface DrawColorListener{ 119 | void drawColor(Canvas canvas); 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /scan/src/main/res/drawable/bg_auto_take_pick.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /scan/src/main/res/drawable/bg_take_picture.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /scan/src/main/res/drawable/btn_background.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /scan/src/main/res/layout/fragment_camera_connection.xml: -------------------------------------------------------------------------------- 1 | 5 | 6 | 7 | 12 | 13 | 14 | 18 | 19 | 20 | 24 | 25 | 26 | 34 | 35 | 43 | 44 | 55 | 56 | 57 | 58 | 59 | 60 | 66 | 67 | 68 | 74 | 75 | -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xhdpi/camera_icon_shutter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xhdpi/camera_icon_shutter.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xhdpi/camera_icon_shutter_pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xhdpi/camera_icon_shutter_pressed.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xhdpi/ic_launcher_round.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xhdpi/ic_launcher_round.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xhdpi/scan_flash_off.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xhdpi/scan_flash_off.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xhdpi/scan_flash_on.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xhdpi/scan_flash_on.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xxhdpi/camera_icon_shutter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xxhdpi/camera_icon_shutter.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xxhdpi/camera_icon_shutter_pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xxhdpi/camera_icon_shutter_pressed.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /scan/src/main/res/mipmap-xxhdpi/ic_launcher_round.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pdliugithub/DocumentScan/84a1d02b2b2d104eb70b403938e37c3264b8b5ef/scan/src/main/res/mipmap-xxhdpi/ic_launcher_round.png -------------------------------------------------------------------------------- /scan/src/main/res/values/attrs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /scan/src/main/res/values/colors.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | #FFFFFFFF 4 | #FFFF00FF 5 | #FFFFFF00 6 | -------------------------------------------------------------------------------- /scan/src/main/res/values/dimens.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5dp 4 | 10dp 5 | 15dp 6 | 20dp 7 | 30dp 8 | 40dp 9 | 50dp 10 | 60dp 11 | 70dp 12 | 80dp 13 | -------------------------------------------------------------------------------- /scan/src/main/res/values/strings.xml: -------------------------------------------------------------------------------- 1 | 2 | Scan 3 | 4 | Hello blank fragment 5 | 6 | -------------------------------------------------------------------------------- /scan/src/test/java/com/rossia/life/scan/ExampleUnitTest.java: -------------------------------------------------------------------------------- 1 | package com.rossia.life.scan; 2 | 3 | import org.junit.Test; 4 | 5 | import static org.junit.Assert.*; 6 | 7 | /** 8 | * Example local unit test, which will execute on the development machine (host). 9 | * 10 | * @see Testing documentation 11 | */ 12 | public class ExampleUnitTest { 13 | @Test 14 | public void addition_isCorrect() throws Exception { 15 | assertEquals(4, 2 + 2); 16 | } 17 | } -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | include ':app', ':scan' 2 | --------------------------------------------------------------------------------