├── .gitignore ├── .google └── packaging.yaml ├── .idea ├── encodings.xml ├── misc.xml ├── modules.xml ├── runConfigurations.xml └── vcs.xml ├── Application ├── build.gradle ├── src │ └── main │ │ ├── AndroidManifest.xml │ │ ├── cpp │ │ ├── CMakeLists.txt │ │ └── yuv420.cpp │ │ ├── java │ │ ├── cc │ │ │ └── rome753 │ │ │ │ └── yuvtools │ │ │ │ ├── ImageBytes.java │ │ │ │ ├── MainActivity.java │ │ │ │ ├── YUVDetectView.java │ │ │ │ └── YUVTools.java │ │ └── com │ │ │ └── example │ │ │ └── android │ │ │ ├── camera1basic │ │ │ ├── Camera1Activity.java │ │ │ └── Camera1Preview.java │ │ │ ├── camera2basic │ │ │ ├── AutoFitTextureView.java │ │ │ ├── Camera2Activity.java │ │ │ ├── Camera2BasicFragment.java │ │ │ └── Camera2Fragment.java │ │ │ └── camerax │ │ │ └── CameraXActivity.java │ │ └── res │ │ ├── drawable-hdpi │ │ ├── ic_action_info.png │ │ ├── ic_launcher.png │ │ └── tile.9.png │ │ ├── drawable-mdpi │ │ ├── ic_action_info.png │ │ └── ic_launcher.png │ │ ├── drawable-xhdpi │ │ ├── ic_action_info.png │ │ └── ic_launcher.png │ │ ├── drawable-xxhdpi │ │ ├── ic_action_info.png │ │ └── ic_launcher.png │ │ ├── layout │ │ ├── activity_camera1.xml │ │ ├── activity_camera2.xml │ │ ├── activity_camerax.xml │ │ ├── activity_main.xml │ │ ├── fragment_camera2_basic.xml │ │ └── view_yuv_detect.xml │ │ ├── values-sw600dp │ │ ├── template-dimens.xml │ │ └── template-styles.xml │ │ ├── values-v11 │ │ └── template-styles.xml │ │ ├── values-v21 │ │ ├── base-colors.xml │ │ └── base-template-styles.xml │ │ └── values │ │ ├── base-strings.xml │ │ ├── colors.xml │ │ ├── strings.xml │ │ ├── styles.xml │ │ ├── template-dimens.xml │ │ └── template-styles.xml └── tests │ ├── AndroidManifest.xml │ └── src │ └── com │ └── example │ └── android │ └── camera2basic │ └── tests │ └── SampleTests.java ├── CONTRIB.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── packaging.yaml └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | # Built application files 2 | *.apk 3 | *.ap_ 4 | 5 | # Files for the ART/Dalvik VM 6 | *.dex 7 | 8 | # Java class files 9 | *.class 10 | 11 | # Generated files 12 | bin/ 13 | gen/ 14 | out/ 15 | 16 | # Gradle files 17 | .gradle/ 18 | build/ 19 | .cxx/ 20 | .externalNativeBuild/ 21 | 22 | # Local configuration file (sdk path, etc) 23 | local.properties 24 | 25 | # Proguard folder generated by Eclipse 26 | proguard/ 27 | 28 | # Log Files 29 | *.log 30 | 31 | # Android Studio Navigation editor temp files 32 | .navigation/ 33 | 34 | # Android Studio captures folder 35 | captures/ 36 | 37 | # IntelliJ 38 | *.iml 39 | .idea/ 40 | 41 | # Keystore files 42 | # Uncomment the following line if you do not want to check your keystore files in. 43 | #*.jks 44 | 45 | # External native build folder generated in Android Studio 2.2 and later 46 | .externalNativeBuild 47 | 48 | # Google Services (e.g. APIs or Firebase) 49 | google-services.json 50 | 51 | # Freeline 52 | freeline.py 53 | freeline/ 54 | freeline_project_description.json 55 | 56 | # fastlane 57 | fastlane/report.xml 58 | fastlane/Preview.html 59 | fastlane/screenshots 60 | fastlane/test_output 61 | fastlane/readme.md 62 | -------------------------------------------------------------------------------- /.google/packaging.yaml: -------------------------------------------------------------------------------- 1 | 2 | # GOOGLE SAMPLE PACKAGING DATA 3 | # 4 | # This file is used by Google as part of our samples packaging process. 5 | # End users may safely ignore this file. It has no relevance to other systems. 6 | --- 7 | status: PUBLISHED 8 | technologies: [Android] 9 | categories: [Media, Camera, Camera2] 10 | languages: [Java] 11 | solutions: [Mobile] 12 | github: android-Camera2Basic 13 | level: INTERMEDIATE 14 | icon: screenshots/icon-web.png 15 | apiRefs: 16 | - android:android.hardware.camera2.CameraManager 17 | - android:android.hardware.camera2.CameraDevice 18 | - android:android.hardware.camera2.CameraCharacteristics 19 | - android:android.hardware.camera2.CameraCaptureSession 20 | - android:android.hardware.camera2.CaptureRequest 21 | - android:android.hardware.camera2.CaptureResult 22 | - android:android.view.TextureView 23 | license: apache2 24 | -------------------------------------------------------------------------------- /.idea/encodings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 9 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /.idea/runConfigurations.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 11 | 12 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /Application/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdk 34 5 | 6 | defaultConfig { 7 | applicationId "cc.rome753.yuvtools" 8 | minSdkVersion 21 9 | targetSdkVersion 34 10 | externalNativeBuild { 11 | cmake { 12 | cppFlags "" 13 | } 14 | } 15 | } 16 | 17 | externalNativeBuild { 18 | cmake { 19 | path "src/main/cpp/CMakeLists.txt" 20 | version "3.10.2" 21 | } 22 | } 23 | 24 | compileOptions { 25 | sourceCompatibility JavaVersion.VERSION_11 26 | targetCompatibility JavaVersion.VERSION_11 27 | } 28 | 29 | } 30 | 31 | dependencies { 32 | implementation fileTree(dir: 'libs', include: ['*.jar']) 33 | implementation 'androidx.appcompat:appcompat:1.1.0-rc01' 34 | implementation 'androidx.constraintlayout:constraintlayout:1.1.3' 35 | testImplementation 'junit:junit:4.12' 36 | androidTestImplementation 'androidx.test:runner:1.2.0' 37 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' 38 | 39 | // Use the most recent version of CameraX, currently that is alpha04 40 | def camerax_version = "1.0.0-alpha04" 41 | implementation "androidx.camera:camera-core:${camerax_version}" 42 | implementation "androidx.camera:camera-camera2:${camerax_version}" 43 | } 44 | -------------------------------------------------------------------------------- /Application/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 15 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /Application/src/main/cpp/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # For more information about using CMake with Android Studio, read the 2 | # documentation: https://d.android.com/studio/projects/add-native-code.html 3 | 4 | # Sets the minimum version of CMake required to build the native library. 5 | 6 | cmake_minimum_required(VERSION 3.4.1) 7 | 8 | # Creates and names a library, sets it as either STATIC 9 | # or SHARED, and provides the relative paths to its source code. 10 | # You can define multiple libraries, and CMake builds them for you. 11 | # Gradle automatically packages shared libraries with your APK. 12 | 13 | add_library( # Sets the name of the library. 14 | yuv420 15 | 16 | # Sets the library as a shared library. 17 | SHARED 18 | 19 | # Provides a relative path to your source file(s). 20 | yuv420.cpp) 21 | 22 | # Searches for a specified prebuilt library and stores the path as a 23 | # variable. Because CMake includes system libraries in the search path by 24 | # default, you only need to specify the name of the public NDK library 25 | # you want to add. CMake verifies that the library exists before 26 | # completing its build. 27 | 28 | find_library( # Sets the name of the path variable. 29 | log-lib 30 | 31 | # Specifies the name of the NDK library that 32 | # you want CMake to locate. 33 | log) 34 | 35 | # Specifies libraries CMake should link to your target library. You 36 | # can link multiple libraries, such as libraries you define in this 37 | # build script, prebuilt third-party libraries, or system libraries. 38 | 39 | target_link_libraries( # Specifies the target library. 40 | yuv420 41 | 42 | # Links the target library to the log library 43 | # included in the NDK. 44 | ${log-lib}) -------------------------------------------------------------------------------- /Application/src/main/cpp/yuv420.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | extern "C" JNIEXPORT void JNICALL 5 | Java_cc_rome753_yuvtools_YUVTools_yv12ToNv21cpp 6 | (JNIEnv *env, jclass jcls, jbyteArray src_, jbyteArray dest_, jint w, jint h) { 7 | 8 | jbyte *src = env->GetByteArrayElements(src_, NULL); 9 | jbyte *dest = env->GetByteArrayElements(dest_, NULL); 10 | jsize len = env->GetArrayLength(dest_); 11 | 12 | int pos = w * h; 13 | memcpy(dest,src,pos); 14 | int v = pos; 15 | int u = pos + (pos >> 2); 16 | while(pos < len) { 17 | dest[pos++] = src[v++]; 18 | dest[pos++] = src[u++]; 19 | } 20 | env->ReleaseByteArrayElements(src_, src, 0); 21 | env->ReleaseByteArrayElements(dest_, dest, 0); 22 | } 23 | 24 | extern "C" JNIEXPORT void JNICALL 25 | Java_cc_rome753_yuvtools_YUVTools_i420ToNv21cpp(JNIEnv *env, jclass type, jbyteArray src_, 26 | jbyteArray dest_, jint w, jint h) { 27 | jbyte *src = env->GetByteArrayElements(src_, NULL); 28 | jbyte *dest = env->GetByteArrayElements(dest_, NULL); 29 | jsize len = env->GetArrayLength(dest_); 30 | 31 | int pos = w * h; 32 | int u = pos; 33 | int v = pos + (pos >> 2); 34 | memcpy(dest,src,pos); 35 | while(pos < len) { 36 | dest[pos++] = src[v++]; 37 | dest[pos++] = src[u++]; 38 | } 39 | 40 | env->ReleaseByteArrayElements(src_, src, 0); 41 | env->ReleaseByteArrayElements(dest_, dest, 0); 42 | } 43 | 44 | extern "C" JNIEXPORT void JNICALL 45 | Java_cc_rome753_yuvtools_YUVTools_nv12ToNv21cpp(JNIEnv *env, jclass type, jbyteArray src_, 46 | jbyteArray dest_, jint w, jint h) { 47 | jbyte *src = env->GetByteArrayElements(src_, NULL); 48 | jbyte *dest = env->GetByteArrayElements(dest_, NULL); 49 | jsize len = env->GetArrayLength(dest_); 50 | 51 | int pos = w * h; 52 | int u = pos; 53 | int v = pos + (pos >> 2); 54 | memcpy(dest,src,pos); 55 | for(; pos < len; pos += 2) { 56 | dest[pos] = src[pos+1]; 57 | dest[pos+1] = src[pos]; 58 | } 59 | 60 | env->ReleaseByteArrayElements(src_, src, 0); 61 | env->ReleaseByteArrayElements(dest_, dest, 0); 62 | } -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/yuvtools/ImageBytes.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.yuvtools; 2 | 3 | public class ImageBytes { 4 | public byte[] bytes; 5 | public int width; 6 | public int height; 7 | 8 | public ImageBytes(byte[] bytes, int width, int height) { 9 | this.bytes = bytes; 10 | this.width = width; 11 | this.height = height; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/yuvtools/MainActivity.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.yuvtools; 2 | 3 | import android.content.Intent; 4 | import android.os.Bundle; 5 | import android.view.View; 6 | 7 | import androidx.appcompat.app.AppCompatActivity; 8 | 9 | import com.example.android.camera1basic.Camera1Activity; 10 | import com.example.android.camera2basic.Camera2Activity; 11 | import com.example.android.camera2basic.R; 12 | import com.example.android.camerax.CameraXActivity; 13 | 14 | public class MainActivity extends AppCompatActivity { 15 | 16 | @Override 17 | protected void onCreate(Bundle savedInstanceState) { 18 | super.onCreate(savedInstanceState); 19 | setContentView(R.layout.activity_main); 20 | } 21 | 22 | public void click1(View v) { 23 | startActivity(new Intent(this, Camera1Activity.class)); 24 | } 25 | 26 | public void click2(View v) { 27 | startActivity(new Intent(this, Camera2Activity.class)); 28 | } 29 | 30 | public void click3(View v) { 31 | startActivity(new Intent(this, CameraXActivity.class)); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/yuvtools/YUVDetectView.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.yuvtools; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.media.Image; 6 | import android.media.ImageReader; 7 | import android.util.AttributeSet; 8 | import android.util.Log; 9 | import android.view.View; 10 | import android.widget.CheckBox; 11 | import android.widget.CompoundButton; 12 | import android.widget.FrameLayout; 13 | import android.widget.ImageView; 14 | 15 | import androidx.annotation.NonNull; 16 | import androidx.annotation.Nullable; 17 | 18 | import com.example.android.camera2basic.R; 19 | 20 | public class YUVDetectView extends FrameLayout { 21 | 22 | ImageView[] ivs; 23 | CheckBox cb; 24 | boolean isFlip = false; 25 | boolean isShowing = false; 26 | int rotation = 0; 27 | byte[] buf; 28 | 29 | public YUVDetectView(@NonNull Context context) { 30 | this(context, null); 31 | } 32 | 33 | public YUVDetectView(@NonNull Context context, @Nullable AttributeSet attrs) { 34 | this(context, attrs, 0); 35 | } 36 | 37 | public YUVDetectView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 38 | super(context, attrs, defStyleAttr); 39 | inflate(context, R.layout.view_yuv_detect, this); 40 | 41 | ivs = new ImageView[]{ 42 | findViewById(R.id.iv1), // I420 43 | findViewById(R.id.iv2), // YV12 44 | findViewById(R.id.iv3), // NV12 45 | findViewById(R.id.iv4), // NV21 46 | }; 47 | cb = findViewById(R.id.cb); 48 | cb.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { 49 | @Override 50 | public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { 51 | isFlip = isChecked; 52 | } 53 | }); 54 | 55 | View btn = findViewById(R.id.btn); 56 | btn.setOnClickListener(new OnClickListener() { 57 | @Override 58 | public void onClick(View v) { 59 | rotation = (rotation + 90) % 360; 60 | } 61 | }); 62 | } 63 | 64 | public void input(final ImageReader imageReader) { 65 | final ImageBytes imageBytes = YUVTools.getBytesFromImageReader(imageReader); 66 | if(imageBytes != null) { 67 | final int w = isFlip ? imageBytes.height : imageBytes.width; 68 | final int h = isFlip ? imageBytes.width : imageBytes.height; 69 | displayImage(imageBytes.bytes, w, h); 70 | } 71 | } 72 | 73 | public void input(final Image image) { 74 | final ImageBytes imageBytes = YUVTools.getBytesFromImage(image); 75 | if(imageBytes != null) { 76 | final int w = isFlip ? imageBytes.height : imageBytes.width; 77 | final int h = isFlip ? imageBytes.width : imageBytes.height; 78 | displayImage(imageBytes.bytes, w, h); 79 | } 80 | } 81 | 82 | public void inputAsync(final byte[] data, int width, int height) { 83 | final int w = isFlip ? height : width; 84 | final int h = isFlip ? width : height; 85 | 86 | if (isShowing) return; 87 | isShowing = true; 88 | new Thread() { 89 | @Override 90 | public void run() { 91 | displayImage(data, w, h); 92 | isShowing = false; 93 | } 94 | }.start(); 95 | } 96 | 97 | private void displayImage(byte[] data, int w, int h) { 98 | long time = System.currentTimeMillis(); 99 | 100 | if(buf == null) { 101 | buf = new byte[data.length]; 102 | } 103 | int rw = rotation % 180 == 0 ? w : h, rh = rotation % 180 == 0 ? h : w; // rotated 104 | 105 | YUVTools.rotateP(data, buf, w, h, rotation); 106 | final Bitmap b0 = YUVTools.i420ToBitmap(buf, rw, rh); 107 | 108 | YUVTools.rotateP(data, buf, w, h, rotation); 109 | final Bitmap b1 = YUVTools.yv12ToBitmap(buf, rw, rh); 110 | 111 | YUVTools.rotateSP(data, buf, w, h, rotation); 112 | final Bitmap b2 = YUVTools.nv12ToBitmap(buf, rw, rh); 113 | 114 | YUVTools.rotateSP(data, buf, w, h, rotation); 115 | final Bitmap b3 = YUVTools.nv21ToBitmap(buf, rw, rh); 116 | 117 | time = System.currentTimeMillis() - time; 118 | Log.d("YUVDetectView", "convert time: " + time); 119 | post(new Runnable() { 120 | @Override 121 | public void run() { 122 | if (b0 != null) ivs[0].setImageBitmap(b0); 123 | if (b1 != null) ivs[1].setImageBitmap(b1); 124 | if (b2 != null) ivs[2].setImageBitmap(b2); 125 | if (b3 != null) ivs[3].setImageBitmap(b3); 126 | } 127 | }); 128 | } 129 | 130 | } 131 | -------------------------------------------------------------------------------- /Application/src/main/java/cc/rome753/yuvtools/YUVTools.java: -------------------------------------------------------------------------------- 1 | package cc.rome753.yuvtools; 2 | 3 | import android.graphics.Bitmap; 4 | import android.media.Image; 5 | import android.media.ImageReader; 6 | 7 | import java.nio.ByteBuffer; 8 | 9 | public class YUVTools { 10 | 11 | /******************************* YUV420旋转算法 *******************************/ 12 | 13 | // I420或YV12顺时针旋转 14 | public static void rotateP(byte[] src, byte[] dest, int w, int h, int rotation) { 15 | switch (rotation) { 16 | case 0: 17 | System.arraycopy(src, 0, dest, 0, src.length); 18 | break; 19 | case 90: 20 | rotateP90(src, dest, w, h); 21 | break; 22 | case 180: 23 | rotateP180(src, dest, w, h); 24 | break; 25 | case 270: 26 | rotateP270(src, dest, w, h); 27 | break; 28 | } 29 | } 30 | 31 | // NV21或NV12顺时针旋转 32 | public static void rotateSP(byte[] src, byte[] dest, int w, int h, int rotation) { 33 | switch (rotation) { 34 | case 0: 35 | System.arraycopy(src, 0, dest, 0, src.length); 36 | break; 37 | case 90: 38 | rotateSP90(src, dest, w, h); 39 | break; 40 | case 180: 41 | rotateSP180(src, dest, w, h); 42 | break; 43 | case 270: 44 | rotateSP270(src, dest, w, h); 45 | break; 46 | } 47 | } 48 | 49 | // NV21或NV12顺时针旋转90度 50 | public static void rotateSP90(byte[] src, byte[] dest, int w, int h) { 51 | int pos = 0; 52 | int k = 0; 53 | for (int i = 0; i <= w - 1; i++) { 54 | for (int j = h - 1; j >= 0; j--) { 55 | dest[k++] = src[j * w + i]; 56 | } 57 | } 58 | 59 | pos = w * h; 60 | for (int i = 0; i <= w - 2; i += 2) { 61 | for (int j = h / 2 - 1; j >= 0; j--) { 62 | dest[k++] = src[pos + j * w + i]; 63 | dest[k++] = src[pos + j * w + i + 1]; 64 | } 65 | } 66 | } 67 | 68 | // NV21或NV12顺时针旋转270度 69 | public static void rotateSP270(byte[] src, byte[] dest, int w, int h) { 70 | int pos = 0; 71 | int k = 0; 72 | for (int i = w - 1; i >= 0; i--) { 73 | for (int j = 0; j <= h - 1; j++) { 74 | dest[k++] = src[j * w + i]; 75 | } 76 | } 77 | 78 | pos = w * h; 79 | for (int i = w - 2; i >= 0; i -= 2) { 80 | for (int j = 0; j <= h / 2 - 1; j++) { 81 | dest[k++] = src[pos + j * w + i]; 82 | dest[k++] = src[pos + j * w + i + 1]; 83 | } 84 | } 85 | } 86 | 87 | // NV21或NV12顺时针旋转180度 88 | public static void rotateSP180(byte[] src, byte[] dest, int w, int h) { 89 | int pos = 0; 90 | int k = w * h - 1; 91 | while (k >= 0) { 92 | dest[pos++] = src[k--]; 93 | } 94 | 95 | k = src.length - 2; 96 | while (pos < dest.length) { 97 | dest[pos++] = src[k]; 98 | dest[pos++] = src[k + 1]; 99 | k -= 2; 100 | } 101 | } 102 | 103 | // I420或YV12顺时针旋转90度 104 | public static void rotateP90(byte[] src, byte[] dest, int w, int h) { 105 | int pos = 0; 106 | //旋转Y 107 | int k = 0; 108 | for (int i = 0; i < w; i++) { 109 | for (int j = h - 1; j >= 0; j--) { 110 | dest[k++] = src[j * w + i]; 111 | } 112 | } 113 | //旋转U 114 | pos = w * h; 115 | for (int i = 0; i < w / 2; i++) { 116 | for (int j = h / 2 - 1; j >= 0; j--) { 117 | dest[k++] = src[pos + j * w / 2 + i]; 118 | } 119 | } 120 | 121 | //旋转V 122 | pos = w * h * 5 / 4; 123 | for (int i = 0; i < w / 2; i++) { 124 | for (int j = h / 2 - 1; j >= 0; j--) { 125 | dest[k++] = src[pos + j * w / 2 + i]; 126 | } 127 | } 128 | } 129 | 130 | // I420或YV12顺时针旋转270度 131 | public static void rotateP270(byte[] src, byte[] dest, int w, int h) { 132 | int pos = 0; 133 | //旋转Y 134 | int k = 0; 135 | for (int i = w - 1; i >= 0; i--) { 136 | for (int j = 0; j < h; j++) { 137 | dest[k++] = src[j * w + i]; 138 | } 139 | } 140 | //旋转U 141 | pos = w * h; 142 | for (int i = w / 2 - 1; i >= 0; i--) { 143 | for (int j = 0; j < h / 2; j++) { 144 | dest[k++] = src[pos + j * w / 2 + i]; 145 | } 146 | } 147 | 148 | //旋转V 149 | pos = w * h * 5 / 4; 150 | for (int i = w / 2 - 1; i >= 0; i--) { 151 | for (int j = 0; j < h / 2; j++) { 152 | dest[k++] = src[pos + j * w / 2 + i]; 153 | } 154 | } 155 | } 156 | 157 | // I420或YV12顺时针旋转180度 158 | public static void rotateP180(byte[] src, byte[] dest, int w, int h) { 159 | int pos = 0; 160 | int k = w * h - 1; 161 | while (k >= 0) { 162 | dest[pos++] = src[k--]; 163 | } 164 | 165 | k = w * h * 5 / 4; 166 | while (k >= w * h) { 167 | dest[pos++] = src[k--]; 168 | } 169 | 170 | k = src.length - 1; 171 | while (pos < dest.length) { 172 | dest[pos++] = src[k--]; 173 | } 174 | } 175 | 176 | /******************************* YUV420格式相互转换算法 *******************************/ 177 | 178 | // i420 -> nv12, yv12 -> nv21 179 | public static void pToSP(byte[] src, byte[] dest, int w, int h) { 180 | int pos = w * h; 181 | int u = pos; 182 | int v = pos + (pos >> 2); 183 | System.arraycopy(src, 0, dest, 0, pos); 184 | while (pos < src.length) { 185 | dest[pos++] = src[u++]; 186 | dest[pos++] = src[v++]; 187 | } 188 | } 189 | 190 | // i420 -> nv21, yv12 -> nv12 191 | public static void pToSPx(byte[] src, byte[] dest, int w, int h) { 192 | int pos = w * h; 193 | int u = pos; 194 | int v = pos + (pos >> 2); 195 | System.arraycopy(src, 0, dest, 0, pos); 196 | while (pos < src.length) { 197 | dest[pos++] = src[v++]; 198 | dest[pos++] = src[u++]; 199 | } 200 | } 201 | 202 | // nv12 -> i420, nv21 -> yv12 203 | public static void spToP(byte[] src, byte[] dest, int w, int h) { 204 | int pos = w * h; 205 | int u = pos; 206 | int v = pos + (pos >> 2); 207 | System.arraycopy(src, 0, dest, 0, pos); 208 | while (pos < src.length) { 209 | dest[u++] = src[pos++]; 210 | dest[v++] = src[pos++]; 211 | } 212 | } 213 | 214 | // nv12 -> yv12, nv21 -> i420 215 | public static void spToPx(byte[] src, byte[] dest, int w, int h) { 216 | int pos = w * h; 217 | int u = pos; 218 | int v = pos + (pos >> 2); 219 | System.arraycopy(src, 0, dest, 0, pos); 220 | while (pos < src.length) { 221 | dest[v++] = src[pos++]; 222 | dest[u++] = src[pos++]; 223 | } 224 | } 225 | 226 | // i420 <-> yv12 227 | public static void pToP(byte[] src, byte[] dest, int w, int h) { 228 | int pos = w * h; 229 | int off = pos >> 2; 230 | System.arraycopy(src, 0, dest, 0, pos); 231 | System.arraycopy(src, pos, dest, pos + off, off); 232 | System.arraycopy(src, pos + off, dest, pos, off); 233 | } 234 | 235 | // nv12 <-> nv21 236 | public static void spToSP(byte[] src, byte[] dest, int w, int h) { 237 | int pos = w * h; 238 | System.arraycopy(src, 0, dest, 0, pos); 239 | for (; pos < src.length; pos += 2) { 240 | dest[pos] = src[pos + 1]; 241 | dest[pos + 1] = src[pos]; 242 | } 243 | } 244 | 245 | 246 | /******************************* YUV420转换Bitmap算法 *******************************/ 247 | 248 | // 此方法虽然是官方的,但是耗时是下面方法的两倍 249 | // public static Bitmap nv21ToBitmap(byte[] data, int w, int h) { 250 | // final YuvImage image = new YuvImage(data, ImageFormat.NV21, w, h, null); 251 | // ByteArrayOutputStream os = new ByteArrayOutputStream(data.length); 252 | // if (image.compressToJpeg(new Rect(0, 0, w, h), 100, os)) { 253 | // byte[] tmp = os.toByteArray(); 254 | // return BitmapFactory.decodeByteArray(tmp, 0, tmp.length); 255 | // } 256 | // return null; 257 | // } 258 | 259 | public static Bitmap nv12ToBitmap(byte[] data, int w, int h) { 260 | return spToBitmap(data, w, h, 0, 1); 261 | } 262 | 263 | public static Bitmap nv21ToBitmap(byte[] data, int w, int h) { 264 | return spToBitmap(data, w, h, 1, 0); 265 | } 266 | 267 | private static Bitmap spToBitmap(byte[] data, int w, int h, int uOff, int vOff) { 268 | int plane = w * h; 269 | int[] colors = new int[plane]; 270 | int yPos = 0, uvPos = plane; 271 | for(int j = 0; j < h; j++) { 272 | for(int i = 0; i < w; i++) { 273 | // YUV byte to RGB int 274 | final int y1 = data[yPos] & 0xff; 275 | final int u = (data[uvPos + uOff] & 0xff) - 128; 276 | final int v = (data[uvPos + vOff] & 0xff) - 128; 277 | final int y1192 = 1192 * y1; 278 | int r = (y1192 + 1634 * v); 279 | int g = (y1192 - 833 * v - 400 * u); 280 | int b = (y1192 + 2066 * u); 281 | 282 | r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r); 283 | g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g); 284 | b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b); 285 | colors[yPos] = ((r << 6) & 0xff0000) | 286 | ((g >> 2) & 0xff00) | 287 | ((b >> 10) & 0xff); 288 | 289 | if((yPos++ & 1) == 1) uvPos += 2; 290 | } 291 | if((j & 1) == 0) uvPos -= w; 292 | } 293 | return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565); 294 | } 295 | 296 | public static Bitmap i420ToBitmap(byte[] data, int w, int h) { 297 | return pToBitmap(data, w, h, true); 298 | } 299 | 300 | public static Bitmap yv12ToBitmap(byte[] data, int w, int h) { 301 | return pToBitmap(data, w, h, false); 302 | } 303 | 304 | private static Bitmap pToBitmap(byte[] data, int w, int h, boolean uv) { 305 | int plane = w * h; 306 | int[] colors = new int[plane]; 307 | int off = plane >> 2; 308 | int yPos = 0, uPos = plane + (uv ? 0 : off), vPos = plane + (uv ? off : 0); 309 | for(int j = 0; j < h; j++) { 310 | for(int i = 0; i < w; i++) { 311 | // YUV byte to RGB int 312 | final int y1 = data[yPos] & 0xff; 313 | final int u = (data[uPos] & 0xff) - 128; 314 | final int v = (data[vPos] & 0xff) - 128; 315 | final int y1192 = 1192 * y1; 316 | int r = (y1192 + 1634 * v); 317 | int g = (y1192 - 833 * v - 400 * u); 318 | int b = (y1192 + 2066 * u); 319 | 320 | r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r); 321 | g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g); 322 | b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b); 323 | colors[yPos] = ((r << 6) & 0xff0000) | 324 | ((g >> 2) & 0xff00) | 325 | ((b >> 10) & 0xff); 326 | 327 | if((yPos++ & 1) == 1) { 328 | uPos++; 329 | vPos++; 330 | } 331 | } 332 | if((j & 1) == 0) { 333 | uPos -= (w >> 1); 334 | vPos -= (w >> 1); 335 | } 336 | } 337 | return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565); 338 | } 339 | 340 | public static int[] planesToColors(Image.Plane[] planes, int height) { 341 | ByteBuffer yPlane = planes[0].getBuffer(); 342 | ByteBuffer uPlane = planes[1].getBuffer(); 343 | ByteBuffer vPlane = planes[2].getBuffer(); 344 | 345 | int bufferIndex = 0; 346 | final int total = yPlane.capacity(); 347 | final int uvCapacity = uPlane.capacity(); 348 | final int width = planes[0].getRowStride(); 349 | 350 | int[] rgbBuffer = new int[width * height]; 351 | 352 | int yPos = 0; 353 | for (int i = 0; i < height; i++) { 354 | int uvPos = (i >> 1) * width; 355 | 356 | for (int j = 0; j < width; j++) { 357 | if (uvPos >= uvCapacity - 1) 358 | break; 359 | if (yPos >= total) 360 | break; 361 | 362 | final int y1 = yPlane.get(yPos++) & 0xff; 363 | 364 | /* 365 | The ordering of the u (Cb) and v (Cr) bytes inside the planes is a 366 | bit strange. The _first_ byte of the u-plane and the _second_ byte 367 | of the v-plane build the u/v pair and belong to the first two pixels 368 | (y-bytes), thus usual YUV 420 behavior. What the Android devs did 369 | here (IMHO): just copy the interleaved NV21 U/V data to two planes 370 | but keep the offset of the interleaving. 371 | */ 372 | final int u = (uPlane.get(uvPos) & 0xff) - 128; 373 | final int v = (vPlane.get(uvPos) & 0xff) - 128; 374 | if ((j & 1) == 1) { 375 | uvPos += 2; 376 | } 377 | 378 | // This is the integer variant to convert YCbCr to RGB, NTSC values. 379 | // formulae found at 380 | // https://software.intel.com/en-us/android/articles/trusted-tools-in-the-new-android-world-optimization-techniques-from-intel-sse-intrinsics-to 381 | // and on StackOverflow etc. 382 | final int y1192 = 1192 * y1; 383 | int r = (y1192 + 1634 * v); 384 | int g = (y1192 - 833 * v - 400 * u); 385 | int b = (y1192 + 2066 * u); 386 | 387 | r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r); 388 | g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g); 389 | b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b); 390 | 391 | rgbBuffer[bufferIndex++] = ((r << 6) & 0xff0000) | 392 | ((g >> 2) & 0xff00) | 393 | ((b >> 10) & 0xff); 394 | } 395 | } 396 | return rgbBuffer; 397 | } 398 | 399 | /** 400 | * 从ImageReader中获取byte[]数据 401 | */ 402 | public static ImageBytes getBytesFromImageReader(ImageReader imageReader) { 403 | try (Image image = imageReader.acquireNextImage()) { 404 | return getBytesFromImage(image); 405 | } catch (Exception e) { 406 | e.printStackTrace(); 407 | } 408 | return null; 409 | } 410 | 411 | public static ImageBytes getBytesFromImage(Image image) { 412 | final Image.Plane[] planes = image.getPlanes(); 413 | 414 | Image.Plane p0 = planes[0]; 415 | Image.Plane p1 = planes[1]; 416 | Image.Plane p2 = planes[2]; 417 | 418 | ByteBuffer b0 = p0.getBuffer(); 419 | ByteBuffer b1 = p1.getBuffer(); 420 | ByteBuffer b2 = p2.getBuffer(); 421 | 422 | int r0 = b0.remaining(); 423 | int r1 = b1.remaining(); 424 | int r2 = b2.remaining(); 425 | 426 | int w0 = p0.getRowStride(); 427 | int h0 = r0 / w0; 428 | if(r0 % w0 > 0) h0++; 429 | int w1 = p1.getRowStride(); 430 | int h1 = r1 / w1; 431 | if(r1 % w1 > 1) h1++; 432 | int w2 = p2.getRowStride(); 433 | int h2 = r2 / w2; 434 | if(r2 % w2 > 2) h2++; 435 | 436 | int y = w0 * h0; 437 | int u = w1 * h1; 438 | int v = w2 * h2; 439 | 440 | byte[] bytes = new byte[y + u + v]; 441 | 442 | b0.get(bytes, 0, r0); 443 | b1.get(bytes, y, r1); // u 444 | b2.get(bytes, y + u, r2); // v 445 | 446 | return new ImageBytes(bytes, w0, h0); 447 | } 448 | 449 | static { 450 | System.loadLibrary("yuv420"); 451 | } 452 | 453 | public static native void i420ToNv21cpp(byte[] src, byte[] dest, int width, int height); 454 | 455 | public static native void yv12ToNv21cpp(byte[] src, byte[] dest, int width, int height); 456 | 457 | public static native void nv12ToNv21cpp(byte[] src, byte[] dest, int width, int height); 458 | } 459 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera1basic/Camera1Activity.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2007 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.android.camera1basic; 18 | 19 | import android.app.AlertDialog; 20 | import android.hardware.Camera; 21 | import android.hardware.Camera.CameraInfo; 22 | import android.os.Bundle; 23 | 24 | import androidx.appcompat.app.AppCompatActivity; 25 | 26 | import com.example.android.camera2basic.R; 27 | 28 | import cc.rome753.yuvtools.YUVDetectView; 29 | 30 | // Need the following import to get access to the app resources, since this 31 | // class is in a sub-package. 32 | 33 | // ---------------------------------------------------------------------- 34 | 35 | public class Camera1Activity extends AppCompatActivity implements Camera.PreviewCallback { 36 | private Camera1Preview mPreview; 37 | private YUVDetectView ydv; 38 | Camera mCamera; 39 | int numberOfCameras; 40 | int cameraCurrentlyLocked; 41 | 42 | // The first rear facing camera 43 | int defaultCameraId; 44 | 45 | @Override 46 | protected void onCreate(Bundle savedInstanceState) { 47 | super.onCreate(savedInstanceState); 48 | setContentView(R.layout.activity_camera1); 49 | // 50 | // // Hide the window title. 51 | // requestWindowFeature(Window.FEATURE_NO_TITLE); 52 | // getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); 53 | 54 | // Create a RelativeLayout container that will hold a SurfaceView, 55 | // and set it as the content of our activity. 56 | mPreview = findViewById(R.id.surface); 57 | ydv = findViewById(R.id.ydv); 58 | 59 | // Find the total number of cameras available 60 | numberOfCameras = Camera.getNumberOfCameras(); 61 | 62 | // Find the ID of the default camera 63 | CameraInfo cameraInfo = new CameraInfo(); 64 | for (int i = 0; i < numberOfCameras; i++) { 65 | Camera.getCameraInfo(i, cameraInfo); 66 | if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) { 67 | defaultCameraId = i; 68 | } 69 | } 70 | 71 | } 72 | 73 | @Override 74 | protected void onResume() { 75 | super.onResume(); 76 | 77 | // Open the default i.e. the first rear facing camera. 78 | mCamera = Camera.open(); 79 | mCamera.setPreviewCallback(this); 80 | mCamera.setDisplayOrientation(90); 81 | cameraCurrentlyLocked = defaultCameraId; 82 | mPreview.setCamera(mCamera); 83 | } 84 | 85 | @Override 86 | protected void onPause() { 87 | super.onPause(); 88 | 89 | // Because the Camera object is a shared resource, it's very 90 | // important to release it when the activity is paused. 91 | if (mCamera != null) { 92 | mPreview.setCamera(null); 93 | mCamera.setPreviewCallback(null); 94 | mCamera.release(); 95 | mCamera = null; 96 | } 97 | } 98 | 99 | private void switchCam() { 100 | if (numberOfCameras == 1) { 101 | AlertDialog.Builder builder = new AlertDialog.Builder(this); 102 | builder.setMessage("Device has only one camera!") 103 | .setNeutralButton("Close", null); 104 | AlertDialog alert = builder.create(); 105 | alert.show(); 106 | return; 107 | } 108 | 109 | // OK, we have multiple cameras. 110 | // Release this camera -> cameraCurrentlyLocked 111 | if (mCamera != null) { 112 | mCamera.stopPreview(); 113 | mPreview.setCamera(null); 114 | mCamera.setPreviewCallback(null); 115 | mCamera.release(); 116 | mCamera = null; 117 | } 118 | 119 | // Acquire the next camera and request Preview to reconfigure 120 | // parameters. 121 | mCamera = Camera 122 | .open((cameraCurrentlyLocked + 1) % numberOfCameras); 123 | cameraCurrentlyLocked = (cameraCurrentlyLocked + 1) 124 | % numberOfCameras; 125 | mPreview.switchCamera(mCamera); 126 | 127 | // Start the preview 128 | mCamera.startPreview(); 129 | } 130 | 131 | @Override 132 | public void onPreviewFrame(byte[] data, Camera camera) { 133 | if(camera == null) return; 134 | Camera.Size size = camera.getParameters().getPreviewSize(); //获取预览大小 135 | final int w = size.width; 136 | final int h = size.height; 137 | ydv.inputAsync(data, w, h); 138 | } 139 | } 140 | 141 | // ---------------------------------------------------------------------- 142 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera1basic/Camera1Preview.java: -------------------------------------------------------------------------------- 1 | package com.example.android.camera1basic; 2 | 3 | import android.content.Context; 4 | import android.graphics.Color; 5 | import android.graphics.ImageFormat; 6 | import android.hardware.Camera; 7 | import android.hardware.Camera.Size; 8 | import android.util.AttributeSet; 9 | import android.util.Log; 10 | import android.view.Gravity; 11 | import android.view.SurfaceHolder; 12 | import android.view.SurfaceView; 13 | import android.view.View; 14 | import android.view.ViewGroup; 15 | import android.widget.FrameLayout; 16 | 17 | import androidx.annotation.NonNull; 18 | import androidx.annotation.Nullable; 19 | 20 | import java.io.IOException; 21 | import java.util.List; 22 | 23 | import cc.rome753.yuvtools.YUVDetectView; 24 | 25 | /** 26 | * A simple wrapper around a Camera and a SurfaceView that renders a centered preview of the Camera 27 | * to the surface. We need to center the SurfaceView because not all devices have cameras that 28 | * support preview sizes at the same aspect ratio as the device's display. 29 | */ 30 | public class Camera1Preview extends FrameLayout implements SurfaceHolder.Callback { 31 | private final String TAG = "Preview"; 32 | 33 | SurfaceView mSurfaceView; 34 | SurfaceHolder mHolder; 35 | Size mPreviewSize; 36 | List mSupportedPreviewSizes; 37 | Camera mCamera; 38 | 39 | public Camera1Preview(Context context) { 40 | this(context, null); 41 | } 42 | 43 | public Camera1Preview(@NonNull Context context, @Nullable AttributeSet attrs) { 44 | this(context, attrs, 0); 45 | } 46 | 47 | public Camera1Preview(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) { 48 | super(context, attrs, defStyleAttr); 49 | setBackgroundColor(Color.BLACK); 50 | mSurfaceView = new SurfaceView(context); 51 | addView(mSurfaceView); 52 | 53 | // Install a SurfaceHolder.Callback so we get notified when the 54 | // underlying surface is created and destroyed. 55 | mHolder = mSurfaceView.getHolder(); 56 | mHolder.addCallback(this); 57 | mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 58 | } 59 | 60 | public void setCamera(Camera camera) { 61 | mCamera = camera; 62 | if (mCamera != null) { 63 | mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes(); 64 | requestLayout(); 65 | } 66 | } 67 | 68 | public void switchCamera(Camera camera) { 69 | // setCamera(camera); 70 | // try { 71 | // camera.setPreviewDisplay(mHolder); 72 | // } catch (IOException exception) { 73 | // Log.e(TAG, "IOException caused by setPreviewDisplay()", exception); 74 | // } 75 | // Camera.Parameters parameters = camera.getParameters(); 76 | // parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height); 77 | // requestLayout(); 78 | // 79 | // camera.setParameters(parameters); 80 | } 81 | 82 | @Override 83 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { 84 | // We purposely disregard child measurements because act as a 85 | // wrapper to a SurfaceView that centers the camera preview instead 86 | // of stretching it. 87 | final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec); 88 | final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec); 89 | setMeasuredDimension(width, height); 90 | 91 | if (mSupportedPreviewSizes != null) { 92 | mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height); 93 | mPreviewSize.width = 640; 94 | mPreviewSize.height = 480; 95 | } 96 | } 97 | 98 | @Override 99 | protected void onLayout(boolean changed, int l, int t, int r, int b) { 100 | if (changed && getChildCount() > 0) { 101 | final View child = getChildAt(0); 102 | 103 | final int width = r - l; 104 | final int height = b - t; 105 | 106 | int previewWidth = width; 107 | int previewHeight = height; 108 | if (mPreviewSize != null) { 109 | previewWidth = mPreviewSize.height; 110 | previewHeight = mPreviewSize.width; 111 | } 112 | 113 | // Center the child SurfaceView within the parent. 114 | if (width * previewHeight > height * previewWidth) { 115 | final int scaledChildWidth = previewWidth * height / previewHeight; 116 | child.layout((width - scaledChildWidth) / 2, 0, 117 | (width + scaledChildWidth) / 2, height); 118 | } else { 119 | final int scaledChildHeight = previewHeight * width / previewWidth; 120 | child.layout(0, (height - scaledChildHeight) / 2, 121 | width, (height + scaledChildHeight) / 2); 122 | } 123 | } 124 | } 125 | 126 | public void surfaceCreated(SurfaceHolder holder) { 127 | // The Surface has been created, acquire the camera and tell it where 128 | // to draw. 129 | try { 130 | if (mCamera != null) { 131 | mCamera.setPreviewDisplay(holder); 132 | } 133 | } catch (IOException exception) { 134 | Log.e(TAG, "IOException caused by setPreviewDisplay()", exception); 135 | } 136 | } 137 | 138 | public void surfaceDestroyed(SurfaceHolder holder) { 139 | // Surface will be destroyed when we return, so stop the preview. 140 | if (mCamera != null) { 141 | mCamera.setPreviewCallback(null); 142 | mCamera.stopPreview(); 143 | } 144 | } 145 | 146 | 147 | private Size getOptimalPreviewSize(List sizes, int w, int h) { 148 | final double ASPECT_TOLERANCE = 0.1; 149 | double targetRatio = (double) w / h; 150 | if (sizes == null) return null; 151 | 152 | Size optimalSize = null; 153 | double minDiff = Double.MAX_VALUE; 154 | 155 | int targetHeight = h; 156 | 157 | // Try to find an size match aspect ratio and size 158 | for (Size size : sizes) { 159 | double ratio = (double) size.width / size.height; 160 | if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue; 161 | if (Math.abs(size.height - targetHeight) < minDiff) { 162 | optimalSize = size; 163 | minDiff = Math.abs(size.height - targetHeight); 164 | } 165 | } 166 | 167 | // Cannot find the one match the aspect ratio, ignore the requirement 168 | if (optimalSize == null) { 169 | minDiff = Double.MAX_VALUE; 170 | for (Size size : sizes) { 171 | if (Math.abs(size.height - targetHeight) < minDiff) { 172 | optimalSize = size; 173 | minDiff = Math.abs(size.height - targetHeight); 174 | } 175 | } 176 | } 177 | return optimalSize; 178 | } 179 | 180 | public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { 181 | // Now that the size is known, set up the camera parameters and begin 182 | // the preview. 183 | Camera.Parameters parameters = mCamera.getParameters(); 184 | parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height); 185 | List supportedPreviewFormats = parameters.getSupportedPreviewFormats(); 186 | for(int i : supportedPreviewFormats) { 187 | Log.d("chao", "supportedPreviewFormats 0x" + Integer.toHexString(i)); 188 | } 189 | parameters.setPreviewFormat(ImageFormat.YV12); 190 | requestLayout(); 191 | 192 | mCamera.setParameters(parameters); 193 | mCamera.startPreview(); 194 | } 195 | 196 | } 197 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera2basic/AutoFitTextureView.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.android.camera2basic; 18 | 19 | import android.content.Context; 20 | import android.util.AttributeSet; 21 | import android.view.TextureView; 22 | 23 | /** 24 | * A {@link TextureView} that can be adjusted to a specified aspect ratio. 25 | */ 26 | public class AutoFitTextureView extends TextureView { 27 | 28 | private int mRatioWidth = 0; 29 | private int mRatioHeight = 0; 30 | 31 | public AutoFitTextureView(Context context) { 32 | this(context, null); 33 | } 34 | 35 | public AutoFitTextureView(Context context, AttributeSet attrs) { 36 | this(context, attrs, 0); 37 | } 38 | 39 | public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) { 40 | super(context, attrs, defStyle); 41 | } 42 | 43 | /** 44 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio 45 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that 46 | * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. 47 | * 48 | * @param width Relative horizontal size 49 | * @param height Relative vertical size 50 | */ 51 | public void setAspectRatio(int width, int height) { 52 | if (width < 0 || height < 0) { 53 | throw new IllegalArgumentException("Size cannot be negative."); 54 | } 55 | mRatioWidth = width; 56 | mRatioHeight = height; 57 | requestLayout(); 58 | } 59 | 60 | @Override 61 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { 62 | super.onMeasure(widthMeasureSpec, heightMeasureSpec); 63 | int width = MeasureSpec.getSize(widthMeasureSpec); 64 | int height = MeasureSpec.getSize(heightMeasureSpec); 65 | if (0 == mRatioWidth || 0 == mRatioHeight) { 66 | setMeasuredDimension(width, height); 67 | } else { 68 | if (width < height * mRatioWidth / mRatioHeight) { 69 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth); 70 | } else { 71 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height); 72 | } 73 | } 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera2basic/Camera2Activity.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.android.camera2basic; 18 | 19 | import android.os.Bundle; 20 | 21 | import androidx.appcompat.app.AppCompatActivity; 22 | 23 | public class Camera2Activity extends AppCompatActivity { 24 | 25 | @Override 26 | protected void onCreate(Bundle savedInstanceState) { 27 | super.onCreate(savedInstanceState); 28 | setContentView(R.layout.activity_camera2); 29 | if (null == savedInstanceState) { 30 | getSupportFragmentManager().beginTransaction() 31 | .replace(R.id.container, Camera2Fragment.newInstance()) 32 | .commit(); 33 | } 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera2basic/Camera2BasicFragment.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.android.camera2basic; 18 | 19 | import android.Manifest; 20 | import android.app.Activity; 21 | import android.app.AlertDialog; 22 | import android.app.Dialog; 23 | import android.content.Context; 24 | import android.content.DialogInterface; 25 | import android.content.pm.PackageManager; 26 | import android.content.res.Configuration; 27 | import android.graphics.ImageFormat; 28 | import android.graphics.Matrix; 29 | import android.graphics.Point; 30 | import android.graphics.RectF; 31 | import android.graphics.SurfaceTexture; 32 | import android.hardware.camera2.CameraAccessException; 33 | import android.hardware.camera2.CameraCaptureSession; 34 | import android.hardware.camera2.CameraCharacteristics; 35 | import android.hardware.camera2.CameraDevice; 36 | import android.hardware.camera2.CameraManager; 37 | import android.hardware.camera2.CameraMetadata; 38 | import android.hardware.camera2.CaptureRequest; 39 | import android.hardware.camera2.CaptureResult; 40 | import android.hardware.camera2.TotalCaptureResult; 41 | import android.hardware.camera2.params.StreamConfigurationMap; 42 | import android.media.Image; 43 | import android.media.ImageReader; 44 | import android.os.Bundle; 45 | import android.os.Handler; 46 | import android.os.HandlerThread; 47 | import android.util.Log; 48 | import android.util.Size; 49 | import android.util.SparseIntArray; 50 | import android.view.LayoutInflater; 51 | import android.view.Surface; 52 | import android.view.TextureView; 53 | import android.view.View; 54 | import android.view.ViewGroup; 55 | import android.widget.Toast; 56 | 57 | import androidx.annotation.NonNull; 58 | import androidx.core.app.ActivityCompat; 59 | import androidx.core.content.ContextCompat; 60 | import androidx.fragment.app.DialogFragment; 61 | import androidx.fragment.app.Fragment; 62 | 63 | import java.io.File; 64 | import java.io.FileOutputStream; 65 | import java.io.IOException; 66 | import java.nio.ByteBuffer; 67 | import java.util.ArrayList; 68 | import java.util.Arrays; 69 | import java.util.Collections; 70 | import java.util.Comparator; 71 | import java.util.List; 72 | import java.util.concurrent.Semaphore; 73 | import java.util.concurrent.TimeUnit; 74 | 75 | public class Camera2BasicFragment extends Fragment 76 | implements View.OnClickListener, ActivityCompat.OnRequestPermissionsResultCallback { 77 | 78 | /** 79 | * Conversion from screen rotation to JPEG orientation. 80 | */ 81 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); 82 | private static final int REQUEST_CAMERA_PERMISSION = 1; 83 | private static final String FRAGMENT_DIALOG = "dialog"; 84 | 85 | static { 86 | ORIENTATIONS.append(Surface.ROTATION_0, 90); 87 | ORIENTATIONS.append(Surface.ROTATION_90, 0); 88 | ORIENTATIONS.append(Surface.ROTATION_180, 270); 89 | ORIENTATIONS.append(Surface.ROTATION_270, 180); 90 | } 91 | 92 | /** 93 | * Tag for the {@link Log}. 94 | */ 95 | private static final String TAG = "Camera2BasicFragment"; 96 | 97 | /** 98 | * Camera state: Showing camera preview. 99 | */ 100 | private static final int STATE_PREVIEW = 0; 101 | 102 | /** 103 | * Camera state: Waiting for the focus to be locked. 104 | */ 105 | private static final int STATE_WAITING_LOCK = 1; 106 | 107 | /** 108 | * Camera state: Waiting for the exposure to be precapture state. 109 | */ 110 | private static final int STATE_WAITING_PRECAPTURE = 2; 111 | 112 | /** 113 | * Camera state: Waiting for the exposure state to be something other than precapture. 114 | */ 115 | private static final int STATE_WAITING_NON_PRECAPTURE = 3; 116 | 117 | /** 118 | * Camera state: Picture was taken. 119 | */ 120 | private static final int STATE_PICTURE_TAKEN = 4; 121 | 122 | /** 123 | * Max preview width that is guaranteed by Camera2 API 124 | */ 125 | private static final int MAX_PREVIEW_WIDTH = 1920; 126 | 127 | /** 128 | * Max preview height that is guaranteed by Camera2 API 129 | */ 130 | private static final int MAX_PREVIEW_HEIGHT = 1080; 131 | 132 | /** 133 | * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a 134 | * {@link TextureView}. 135 | */ 136 | private final TextureView.SurfaceTextureListener mSurfaceTextureListener 137 | = new TextureView.SurfaceTextureListener() { 138 | 139 | @Override 140 | public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) { 141 | openCamera(width, height); 142 | } 143 | 144 | @Override 145 | public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) { 146 | configureTransform(width, height); 147 | } 148 | 149 | @Override 150 | public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) { 151 | return true; 152 | } 153 | 154 | @Override 155 | public void onSurfaceTextureUpdated(SurfaceTexture texture) { 156 | } 157 | 158 | }; 159 | 160 | /** 161 | * ID of the current {@link CameraDevice}. 162 | */ 163 | private String mCameraId; 164 | 165 | /** 166 | * An {@link AutoFitTextureView} for camera preview. 167 | */ 168 | private AutoFitTextureView mTextureView; 169 | 170 | /** 171 | * A {@link CameraCaptureSession } for camera preview. 172 | */ 173 | private CameraCaptureSession mCaptureSession; 174 | 175 | /** 176 | * A reference to the opened {@link CameraDevice}. 177 | */ 178 | private CameraDevice mCameraDevice; 179 | 180 | /** 181 | * The {@link android.util.Size} of camera preview. 182 | */ 183 | private Size mPreviewSize; 184 | 185 | /** 186 | * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. 187 | */ 188 | private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { 189 | 190 | @Override 191 | public void onOpened(@NonNull CameraDevice cameraDevice) { 192 | // This method is called when the camera is opened. We start camera preview here. 193 | mCameraOpenCloseLock.release(); 194 | mCameraDevice = cameraDevice; 195 | createCameraPreviewSession(); 196 | } 197 | 198 | @Override 199 | public void onDisconnected(@NonNull CameraDevice cameraDevice) { 200 | mCameraOpenCloseLock.release(); 201 | cameraDevice.close(); 202 | mCameraDevice = null; 203 | } 204 | 205 | @Override 206 | public void onError(@NonNull CameraDevice cameraDevice, int error) { 207 | mCameraOpenCloseLock.release(); 208 | cameraDevice.close(); 209 | mCameraDevice = null; 210 | Activity activity = getActivity(); 211 | if (null != activity) { 212 | activity.finish(); 213 | } 214 | } 215 | 216 | }; 217 | 218 | /** 219 | * An additional thread for running tasks that shouldn't block the UI. 220 | */ 221 | private HandlerThread mBackgroundThread; 222 | 223 | /** 224 | * A {@link Handler} for running tasks in the background. 225 | */ 226 | private Handler mBackgroundHandler; 227 | 228 | /** 229 | * An {@link ImageReader} that handles still image capture. 230 | */ 231 | private ImageReader mImageReader; 232 | 233 | /** 234 | * This is the output file for our picture. 235 | */ 236 | private File mFile; 237 | 238 | /** 239 | * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a 240 | * still image is ready to be saved. 241 | */ 242 | private final ImageReader.OnImageAvailableListener mOnImageAvailableListener 243 | = new ImageReader.OnImageAvailableListener() { 244 | 245 | @Override 246 | public void onImageAvailable(ImageReader reader) { 247 | handleImage(reader); 248 | } 249 | 250 | }; 251 | 252 | protected void handleImage(ImageReader reader) { 253 | mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile)); 254 | } 255 | 256 | /** 257 | * {@link CaptureRequest.Builder} for the camera preview 258 | */ 259 | private CaptureRequest.Builder mPreviewRequestBuilder; 260 | 261 | /** 262 | * {@link CaptureRequest} generated by {@link #mPreviewRequestBuilder} 263 | */ 264 | private CaptureRequest mPreviewRequest; 265 | 266 | /** 267 | * The current state of camera state for taking pictures. 268 | * 269 | * @see #mCaptureCallback 270 | */ 271 | private int mState = STATE_PREVIEW; 272 | 273 | /** 274 | * A {@link Semaphore} to prevent the app from exiting before closing the camera. 275 | */ 276 | private Semaphore mCameraOpenCloseLock = new Semaphore(1); 277 | 278 | /** 279 | * Whether the current camera device supports Flash or not. 280 | */ 281 | private boolean mFlashSupported; 282 | 283 | /** 284 | * Orientation of the camera sensor 285 | */ 286 | private int mSensorOrientation; 287 | 288 | /** 289 | * A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. 290 | */ 291 | private CameraCaptureSession.CaptureCallback mCaptureCallback 292 | = new CameraCaptureSession.CaptureCallback() { 293 | 294 | private void process(CaptureResult result) { 295 | switch (mState) { 296 | case STATE_PREVIEW: { 297 | // We have nothing to do when the camera preview is working normally. 298 | break; 299 | } 300 | case STATE_WAITING_LOCK: { 301 | Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); 302 | if (afState == null) { 303 | captureStillPicture(); 304 | } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState || 305 | CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) { 306 | // CONTROL_AE_STATE can be null on some devices 307 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 308 | if (aeState == null || 309 | aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) { 310 | mState = STATE_PICTURE_TAKEN; 311 | captureStillPicture(); 312 | } else { 313 | runPrecaptureSequence(); 314 | } 315 | } 316 | break; 317 | } 318 | case STATE_WAITING_PRECAPTURE: { 319 | // CONTROL_AE_STATE can be null on some devices 320 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 321 | if (aeState == null || 322 | aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE || 323 | aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { 324 | mState = STATE_WAITING_NON_PRECAPTURE; 325 | } 326 | break; 327 | } 328 | case STATE_WAITING_NON_PRECAPTURE: { 329 | // CONTROL_AE_STATE can be null on some devices 330 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); 331 | if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) { 332 | mState = STATE_PICTURE_TAKEN; 333 | captureStillPicture(); 334 | } 335 | break; 336 | } 337 | } 338 | } 339 | 340 | @Override 341 | public void onCaptureProgressed(@NonNull CameraCaptureSession session, 342 | @NonNull CaptureRequest request, 343 | @NonNull CaptureResult partialResult) { 344 | process(partialResult); 345 | } 346 | 347 | @Override 348 | public void onCaptureCompleted(@NonNull CameraCaptureSession session, 349 | @NonNull CaptureRequest request, 350 | @NonNull TotalCaptureResult result) { 351 | process(result); 352 | } 353 | 354 | }; 355 | 356 | /** 357 | * Shows a {@link Toast} on the UI thread. 358 | * 359 | * @param text The message to show 360 | */ 361 | private void showToast(final String text) { 362 | final Activity activity = getActivity(); 363 | if (activity != null) { 364 | activity.runOnUiThread(new Runnable() { 365 | @Override 366 | public void run() { 367 | Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); 368 | } 369 | }); 370 | } 371 | } 372 | 373 | /** 374 | * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that 375 | * is at least as large as the respective texture view size, and that is at most as large as the 376 | * respective max size, and whose aspect ratio matches with the specified value. If such size 377 | * doesn't exist, choose the largest one that is at most as large as the respective max size, 378 | * and whose aspect ratio matches with the specified value. 379 | * 380 | * @param choices The list of sizes that the camera supports for the intended output 381 | * class 382 | * @param textureViewWidth The width of the texture view relative to sensor coordinate 383 | * @param textureViewHeight The height of the texture view relative to sensor coordinate 384 | * @param maxWidth The maximum width that can be chosen 385 | * @param maxHeight The maximum height that can be chosen 386 | * @param aspectRatio The aspect ratio 387 | * @return The optimal {@code Size}, or an arbitrary one if none were big enough 388 | */ 389 | private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, 390 | int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { 391 | 392 | // Collect the supported resolutions that are at least as big as the preview Surface 393 | List bigEnough = new ArrayList<>(); 394 | // Collect the supported resolutions that are smaller than the preview Surface 395 | List notBigEnough = new ArrayList<>(); 396 | int w = aspectRatio.getWidth(); 397 | int h = aspectRatio.getHeight(); 398 | for (Size option : choices) { 399 | if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && 400 | option.getHeight() == option.getWidth() * h / w) { 401 | if (option.getWidth() >= textureViewWidth && 402 | option.getHeight() >= textureViewHeight) { 403 | bigEnough.add(option); 404 | } else { 405 | notBigEnough.add(option); 406 | } 407 | } 408 | } 409 | 410 | // Pick the smallest of those big enough. If there is no one big enough, pick the 411 | // largest of those not big enough. 412 | if (bigEnough.size() > 0) { 413 | return Collections.min(bigEnough, new CompareSizesByArea()); 414 | } else if (notBigEnough.size() > 0) { 415 | return Collections.max(notBigEnough, new CompareSizesByArea()); 416 | } else { 417 | Log.e(TAG, "Couldn't find any suitable preview size"); 418 | return choices[0]; 419 | } 420 | } 421 | 422 | public static Camera2BasicFragment newInstance() { 423 | return new Camera2BasicFragment(); 424 | } 425 | 426 | @Override 427 | public View onCreateView(LayoutInflater inflater, ViewGroup container, 428 | Bundle savedInstanceState) { 429 | return inflater.inflate(R.layout.fragment_camera2_basic, container, false); 430 | } 431 | 432 | @Override 433 | public void onViewCreated(final View view, Bundle savedInstanceState) { 434 | view.findViewById(R.id.picture).setOnClickListener(this); 435 | view.findViewById(R.id.info).setOnClickListener(this); 436 | mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture); 437 | } 438 | 439 | @Override 440 | public void onActivityCreated(Bundle savedInstanceState) { 441 | super.onActivityCreated(savedInstanceState); 442 | mFile = new File(getActivity().getExternalFilesDir(null), "pic.jpg"); 443 | } 444 | 445 | @Override 446 | public void onResume() { 447 | super.onResume(); 448 | startBackgroundThread(); 449 | 450 | // When the screen is turned off and turned back on, the SurfaceTexture is already 451 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open 452 | // a camera and start preview from here (otherwise, we wait until the surface is ready in 453 | // the SurfaceTextureListener). 454 | if (mTextureView.isAvailable()) { 455 | openCamera(mTextureView.getWidth(), mTextureView.getHeight()); 456 | } else { 457 | mTextureView.setSurfaceTextureListener(mSurfaceTextureListener); 458 | } 459 | } 460 | 461 | @Override 462 | public void onPause() { 463 | closeCamera(); 464 | stopBackgroundThread(); 465 | super.onPause(); 466 | } 467 | 468 | private void requestCameraPermission() { 469 | if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) { 470 | new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG); 471 | } else { 472 | requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION); 473 | } 474 | } 475 | 476 | @Override 477 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, 478 | @NonNull int[] grantResults) { 479 | if (requestCode == REQUEST_CAMERA_PERMISSION) { 480 | if (grantResults.length != 1 || grantResults[0] != PackageManager.PERMISSION_GRANTED) { 481 | ErrorDialog.newInstance(getString(R.string.request_permission)) 482 | .show(getChildFragmentManager(), FRAGMENT_DIALOG); 483 | } 484 | } else { 485 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 486 | } 487 | } 488 | 489 | /** 490 | * Sets up member variables related to camera. 491 | * 492 | * @param width The width of available size for camera preview 493 | * @param height The height of available size for camera preview 494 | */ 495 | @SuppressWarnings("SuspiciousNameCombination") 496 | private void setUpCameraOutputs(int width, int height) { 497 | Activity activity = getActivity(); 498 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); 499 | try { 500 | for (String cameraId : manager.getCameraIdList()) { 501 | CameraCharacteristics characteristics 502 | = manager.getCameraCharacteristics(cameraId); 503 | 504 | // We don't use a front facing camera in this sample. 505 | Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); 506 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { 507 | continue; 508 | } 509 | 510 | StreamConfigurationMap map = characteristics.get( 511 | CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 512 | if (map == null) { 513 | continue; 514 | } 515 | 516 | int[] outputFormats = map.getOutputFormats(); 517 | for(int i : outputFormats) { 518 | Log.d("chao", "supportOutputFormats 0x" + Integer.toHexString(i)); 519 | } 520 | 521 | // For still image captures, we use the largest available size. 522 | Size largest = Collections.max( 523 | Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), 524 | new CompareSizesByArea()); 525 | mImageReader = ImageReader.newInstance(640, 480, 526 | ImageFormat.YUV_420_888, /*maxImages*/2); 527 | mImageReader.setOnImageAvailableListener( 528 | mOnImageAvailableListener, mBackgroundHandler); 529 | 530 | // Find out if we need to swap dimension to get the preview size relative to sensor 531 | // coordinate. 532 | int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 533 | //noinspection ConstantConditions 534 | mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 535 | boolean swappedDimensions = false; 536 | switch (displayRotation) { 537 | case Surface.ROTATION_0: 538 | case Surface.ROTATION_180: 539 | if (mSensorOrientation == 90 || mSensorOrientation == 270) { 540 | swappedDimensions = true; 541 | } 542 | break; 543 | case Surface.ROTATION_90: 544 | case Surface.ROTATION_270: 545 | if (mSensorOrientation == 0 || mSensorOrientation == 180) { 546 | swappedDimensions = true; 547 | } 548 | break; 549 | default: 550 | Log.e(TAG, "Display rotation is invalid: " + displayRotation); 551 | } 552 | 553 | Point displaySize = new Point(); 554 | activity.getWindowManager().getDefaultDisplay().getSize(displaySize); 555 | int rotatedPreviewWidth = width; 556 | int rotatedPreviewHeight = height; 557 | int maxPreviewWidth = displaySize.x; 558 | int maxPreviewHeight = displaySize.y; 559 | 560 | if (swappedDimensions) { 561 | rotatedPreviewWidth = height; 562 | rotatedPreviewHeight = width; 563 | maxPreviewWidth = displaySize.y; 564 | maxPreviewHeight = displaySize.x; 565 | } 566 | 567 | if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { 568 | maxPreviewWidth = MAX_PREVIEW_WIDTH; 569 | } 570 | 571 | if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { 572 | maxPreviewHeight = MAX_PREVIEW_HEIGHT; 573 | } 574 | 575 | // Danger, W.R.! Attempting to use too large a preview size could exceed the camera 576 | // bus' bandwidth limitation, resulting in gorgeous previews but the storage of 577 | // garbage capture data. 578 | mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), 579 | rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, 580 | maxPreviewHeight, largest); 581 | 582 | // We fit the aspect ratio of TextureView to the size of preview we picked. 583 | int orientation = getResources().getConfiguration().orientation; 584 | if (orientation == Configuration.ORIENTATION_LANDSCAPE) { 585 | mTextureView.setAspectRatio( 586 | mPreviewSize.getWidth(), mPreviewSize.getHeight()); 587 | } else { 588 | mTextureView.setAspectRatio( 589 | mPreviewSize.getHeight(), mPreviewSize.getWidth()); 590 | } 591 | 592 | // Check if the flash is supported. 593 | Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); 594 | mFlashSupported = available == null ? false : available; 595 | 596 | mCameraId = cameraId; 597 | return; 598 | } 599 | } catch (CameraAccessException e) { 600 | e.printStackTrace(); 601 | } catch (NullPointerException e) { 602 | // Currently an NPE is thrown when the Camera2API is used but not supported on the 603 | // device this code runs. 604 | ErrorDialog.newInstance(getString(R.string.camera_error)) 605 | .show(getChildFragmentManager(), FRAGMENT_DIALOG); 606 | } 607 | } 608 | 609 | /** 610 | * Opens the camera specified by {@link Camera2BasicFragment#mCameraId}. 611 | */ 612 | private void openCamera(int width, int height) { 613 | if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) 614 | != PackageManager.PERMISSION_GRANTED) { 615 | requestCameraPermission(); 616 | return; 617 | } 618 | setUpCameraOutputs(width, height); 619 | configureTransform(width, height); 620 | Activity activity = getActivity(); 621 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); 622 | try { 623 | if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { 624 | throw new RuntimeException("Time out waiting to lock camera opening."); 625 | } 626 | manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); 627 | } catch (CameraAccessException e) { 628 | e.printStackTrace(); 629 | } catch (InterruptedException e) { 630 | throw new RuntimeException("Interrupted while trying to lock camera opening.", e); 631 | } 632 | } 633 | 634 | /** 635 | * Closes the current {@link CameraDevice}. 636 | */ 637 | private void closeCamera() { 638 | try { 639 | mCameraOpenCloseLock.acquire(); 640 | if (null != mCaptureSession) { 641 | mCaptureSession.close(); 642 | mCaptureSession = null; 643 | } 644 | if (null != mCameraDevice) { 645 | mCameraDevice.close(); 646 | mCameraDevice = null; 647 | } 648 | if (null != mImageReader) { 649 | mImageReader.close(); 650 | mImageReader = null; 651 | } 652 | } catch (InterruptedException e) { 653 | throw new RuntimeException("Interrupted while trying to lock camera closing.", e); 654 | } finally { 655 | mCameraOpenCloseLock.release(); 656 | } 657 | } 658 | 659 | /** 660 | * Starts a background thread and its {@link Handler}. 661 | */ 662 | private void startBackgroundThread() { 663 | mBackgroundThread = new HandlerThread("CameraBackground"); 664 | mBackgroundThread.start(); 665 | mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); 666 | } 667 | 668 | /** 669 | * Stops the background thread and its {@link Handler}. 670 | */ 671 | private void stopBackgroundThread() { 672 | mBackgroundThread.quitSafely(); 673 | try { 674 | mBackgroundThread.join(); 675 | mBackgroundThread = null; 676 | mBackgroundHandler = null; 677 | } catch (InterruptedException e) { 678 | e.printStackTrace(); 679 | } 680 | } 681 | 682 | /** 683 | * Creates a new {@link CameraCaptureSession} for camera preview. 684 | */ 685 | private void createCameraPreviewSession() { 686 | try { 687 | SurfaceTexture texture = mTextureView.getSurfaceTexture(); 688 | assert texture != null; 689 | 690 | // We configure the size of default buffer to be the size of camera preview we want. 691 | texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); 692 | 693 | // This is the output Surface we need to start preview. 694 | Surface surface = new Surface(texture); 695 | 696 | // We set up a CaptureRequest.Builder with the output Surface. 697 | mPreviewRequestBuilder 698 | = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 699 | mPreviewRequestBuilder.addTarget(surface); 700 | mPreviewRequestBuilder.addTarget(mImageReader.getSurface()); 701 | 702 | // Here, we create a CameraCaptureSession for camera preview. 703 | mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), 704 | new CameraCaptureSession.StateCallback() { 705 | 706 | @Override 707 | public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { 708 | // The camera is already closed 709 | if (null == mCameraDevice) { 710 | return; 711 | } 712 | 713 | // When the session is ready, we start displaying the preview. 714 | mCaptureSession = cameraCaptureSession; 715 | try { 716 | // Auto focus should be continuous for camera preview. 717 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, 718 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); 719 | // Flash is automatically enabled when necessary. 720 | setAutoFlash(mPreviewRequestBuilder); 721 | 722 | // Finally, we start displaying the camera preview. 723 | mPreviewRequest = mPreviewRequestBuilder.build(); 724 | mCaptureSession.setRepeatingRequest(mPreviewRequest, 725 | mCaptureCallback, mBackgroundHandler); 726 | } catch (CameraAccessException e) { 727 | e.printStackTrace(); 728 | } 729 | } 730 | 731 | @Override 732 | public void onConfigureFailed( 733 | @NonNull CameraCaptureSession cameraCaptureSession) { 734 | showToast("Failed"); 735 | } 736 | }, null 737 | ); 738 | } catch (CameraAccessException e) { 739 | e.printStackTrace(); 740 | } 741 | } 742 | 743 | /** 744 | * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`. 745 | * This method should be called after the camera preview size is determined in 746 | * setUpCameraOutputs and also the size of `mTextureView` is fixed. 747 | * 748 | * @param viewWidth The width of `mTextureView` 749 | * @param viewHeight The height of `mTextureView` 750 | */ 751 | private void configureTransform(int viewWidth, int viewHeight) { 752 | Activity activity = getActivity(); 753 | if (null == mTextureView || null == mPreviewSize || null == activity) { 754 | return; 755 | } 756 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 757 | Matrix matrix = new Matrix(); 758 | RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); 759 | RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); 760 | float centerX = viewRect.centerX(); 761 | float centerY = viewRect.centerY(); 762 | if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { 763 | bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); 764 | matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); 765 | float scale = Math.max( 766 | (float) viewHeight / mPreviewSize.getHeight(), 767 | (float) viewWidth / mPreviewSize.getWidth()); 768 | matrix.postScale(scale, scale, centerX, centerY); 769 | matrix.postRotate(90 * (rotation - 2), centerX, centerY); 770 | } else if (Surface.ROTATION_180 == rotation) { 771 | matrix.postRotate(180, centerX, centerY); 772 | } 773 | mTextureView.setTransform(matrix); 774 | } 775 | 776 | /** 777 | * Initiate a still image capture. 778 | */ 779 | private void takePicture() { 780 | lockFocus(); 781 | } 782 | 783 | /** 784 | * Lock the focus as the first step for a still image capture. 785 | */ 786 | private void lockFocus() { 787 | try { 788 | // This is how to tell the camera to lock focus. 789 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, 790 | CameraMetadata.CONTROL_AF_TRIGGER_START); 791 | // Tell #mCaptureCallback to wait for the lock. 792 | mState = STATE_WAITING_LOCK; 793 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, 794 | mBackgroundHandler); 795 | } catch (CameraAccessException e) { 796 | e.printStackTrace(); 797 | } 798 | } 799 | 800 | /** 801 | * Run the precapture sequence for capturing a still image. This method should be called when 802 | * we get a response in {@link #mCaptureCallback} from {@link #lockFocus()}. 803 | */ 804 | private void runPrecaptureSequence() { 805 | try { 806 | // This is how to tell the camera to trigger. 807 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 808 | CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 809 | // Tell #mCaptureCallback to wait for the precapture sequence to be set. 810 | mState = STATE_WAITING_PRECAPTURE; 811 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, 812 | mBackgroundHandler); 813 | } catch (CameraAccessException e) { 814 | e.printStackTrace(); 815 | } 816 | } 817 | 818 | /** 819 | * Capture a still picture. This method should be called when we get a response in 820 | * {@link #mCaptureCallback} from both {@link #lockFocus()}. 821 | */ 822 | private void captureStillPicture() { 823 | try { 824 | final Activity activity = getActivity(); 825 | if (null == activity || null == mCameraDevice) { 826 | return; 827 | } 828 | // This is the CaptureRequest.Builder that we use to take a picture. 829 | final CaptureRequest.Builder captureBuilder = 830 | mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 831 | captureBuilder.addTarget(mImageReader.getSurface()); 832 | 833 | // Use the same AE and AF modes as the preview. 834 | captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, 835 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); 836 | setAutoFlash(captureBuilder); 837 | 838 | // Orientation 839 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); 840 | captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); 841 | 842 | CameraCaptureSession.CaptureCallback CaptureCallback 843 | = new CameraCaptureSession.CaptureCallback() { 844 | 845 | @Override 846 | public void onCaptureCompleted(@NonNull CameraCaptureSession session, 847 | @NonNull CaptureRequest request, 848 | @NonNull TotalCaptureResult result) { 849 | showToast("Saved: " + mFile); 850 | Log.d(TAG, mFile.toString()); 851 | unlockFocus(); 852 | } 853 | }; 854 | 855 | mCaptureSession.stopRepeating(); 856 | mCaptureSession.abortCaptures(); 857 | mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); 858 | } catch (CameraAccessException e) { 859 | e.printStackTrace(); 860 | } 861 | } 862 | 863 | /** 864 | * Retrieves the JPEG orientation from the specified screen rotation. 865 | * 866 | * @param rotation The screen rotation. 867 | * @return The JPEG orientation (one of 0, 90, 270, and 360) 868 | */ 869 | private int getOrientation(int rotation) { 870 | // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X) 871 | // We have to take that into account and rotate JPEG properly. 872 | // For devices with orientation of 90, we simply return our mapping from ORIENTATIONS. 873 | // For devices with orientation of 270, we need to rotate the JPEG 180 degrees. 874 | return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360; 875 | } 876 | 877 | /** 878 | * Unlock the focus. This method should be called when still image capture sequence is 879 | * finished. 880 | */ 881 | private void unlockFocus() { 882 | try { 883 | // Reset the auto-focus trigger 884 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, 885 | CameraMetadata.CONTROL_AF_TRIGGER_CANCEL); 886 | setAutoFlash(mPreviewRequestBuilder); 887 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, 888 | mBackgroundHandler); 889 | // After this, the camera will go back to the normal state of preview. 890 | mState = STATE_PREVIEW; 891 | mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, 892 | mBackgroundHandler); 893 | } catch (CameraAccessException e) { 894 | e.printStackTrace(); 895 | } 896 | } 897 | 898 | @Override 899 | public void onClick(View view) { 900 | switch (view.getId()) { 901 | case R.id.picture: { 902 | takePicture(); 903 | break; 904 | } 905 | case R.id.info: { 906 | Activity activity = getActivity(); 907 | if (null != activity) { 908 | new AlertDialog.Builder(activity) 909 | .setMessage(R.string.intro_message) 910 | .setPositiveButton(android.R.string.ok, null) 911 | .show(); 912 | } 913 | break; 914 | } 915 | } 916 | } 917 | 918 | private void setAutoFlash(CaptureRequest.Builder requestBuilder) { 919 | if (mFlashSupported) { 920 | requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, 921 | CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); 922 | } 923 | } 924 | 925 | /** 926 | * Saves a JPEG {@link Image} into the specified {@link File}. 927 | */ 928 | private static class ImageSaver implements Runnable { 929 | 930 | /** 931 | * The JPEG image 932 | */ 933 | private final Image mImage; 934 | /** 935 | * The file we save the image into. 936 | */ 937 | private final File mFile; 938 | 939 | ImageSaver(Image image, File file) { 940 | mImage = image; 941 | mFile = file; 942 | } 943 | 944 | @Override 945 | public void run() { 946 | ByteBuffer buffer = mImage.getPlanes()[0].getBuffer(); 947 | byte[] bytes = new byte[buffer.remaining()]; 948 | buffer.get(bytes); 949 | FileOutputStream output = null; 950 | try { 951 | output = new FileOutputStream(mFile); 952 | output.write(bytes); 953 | } catch (IOException e) { 954 | e.printStackTrace(); 955 | } finally { 956 | mImage.close(); 957 | if (null != output) { 958 | try { 959 | output.close(); 960 | } catch (IOException e) { 961 | e.printStackTrace(); 962 | } 963 | } 964 | } 965 | } 966 | 967 | } 968 | 969 | /** 970 | * Compares two {@code Size}s based on their areas. 971 | */ 972 | static class CompareSizesByArea implements Comparator { 973 | 974 | @Override 975 | public int compare(Size lhs, Size rhs) { 976 | // We cast here to ensure the multiplications won't overflow 977 | return Long.signum((long) lhs.getWidth() * lhs.getHeight() - 978 | (long) rhs.getWidth() * rhs.getHeight()); 979 | } 980 | 981 | } 982 | 983 | /** 984 | * Shows an error message dialog. 985 | */ 986 | public static class ErrorDialog extends DialogFragment { 987 | 988 | private static final String ARG_MESSAGE = "message"; 989 | 990 | public static ErrorDialog newInstance(String message) { 991 | ErrorDialog dialog = new ErrorDialog(); 992 | Bundle args = new Bundle(); 993 | args.putString(ARG_MESSAGE, message); 994 | dialog.setArguments(args); 995 | return dialog; 996 | } 997 | 998 | @NonNull 999 | @Override 1000 | public Dialog onCreateDialog(Bundle savedInstanceState) { 1001 | final Activity activity = getActivity(); 1002 | return new AlertDialog.Builder(activity) 1003 | .setMessage(getArguments().getString(ARG_MESSAGE)) 1004 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { 1005 | @Override 1006 | public void onClick(DialogInterface dialogInterface, int i) { 1007 | activity.finish(); 1008 | } 1009 | }) 1010 | .create(); 1011 | } 1012 | 1013 | } 1014 | 1015 | /** 1016 | * Shows OK/Cancel confirmation dialog about camera permission. 1017 | */ 1018 | public static class ConfirmationDialog extends DialogFragment { 1019 | 1020 | @NonNull 1021 | @Override 1022 | public Dialog onCreateDialog(Bundle savedInstanceState) { 1023 | final Fragment parent = getParentFragment(); 1024 | return new AlertDialog.Builder(getActivity()) 1025 | .setMessage(R.string.request_permission) 1026 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { 1027 | @Override 1028 | public void onClick(DialogInterface dialog, int which) { 1029 | parent.requestPermissions(new String[]{Manifest.permission.CAMERA}, 1030 | REQUEST_CAMERA_PERMISSION); 1031 | } 1032 | }) 1033 | .setNegativeButton(android.R.string.cancel, 1034 | new DialogInterface.OnClickListener() { 1035 | @Override 1036 | public void onClick(DialogInterface dialog, int which) { 1037 | Activity activity = parent.getActivity(); 1038 | if (activity != null) { 1039 | activity.finish(); 1040 | } 1041 | } 1042 | }) 1043 | .create(); 1044 | } 1045 | } 1046 | 1047 | } 1048 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camera2basic/Camera2Fragment.java: -------------------------------------------------------------------------------- 1 | package com.example.android.camera2basic; 2 | 3 | import android.graphics.Bitmap; 4 | import android.media.ImageReader; 5 | import android.os.Bundle; 6 | import android.view.View; 7 | import android.widget.ImageView; 8 | 9 | import cc.rome753.yuvtools.YUVDetectView; 10 | import cc.rome753.yuvtools.YUVTools; 11 | 12 | public class Camera2Fragment extends Camera2BasicFragment { 13 | 14 | private YUVDetectView ydv; 15 | 16 | public static Camera2Fragment newInstance() { 17 | Bundle args = new Bundle(); 18 | Camera2Fragment fragment = new Camera2Fragment(); 19 | fragment.setArguments(args); 20 | return fragment; 21 | } 22 | 23 | @Override 24 | public void onViewCreated(final View view, Bundle savedInstanceState) { 25 | super.onViewCreated(view, savedInstanceState); 26 | ydv = view.findViewById(R.id.ydv); 27 | } 28 | 29 | @Override 30 | protected void handleImage(ImageReader reader) { 31 | ydv.input(reader); 32 | } 33 | 34 | 35 | } 36 | -------------------------------------------------------------------------------- /Application/src/main/java/com/example/android/camerax/CameraXActivity.java: -------------------------------------------------------------------------------- 1 | package com.example.android.camerax; 2 | 3 | import android.graphics.Matrix; 4 | import android.media.Image; 5 | import android.os.Bundle; 6 | import android.os.Handler; 7 | import android.os.HandlerThread; 8 | import android.util.Log; 9 | import android.util.Rational; 10 | import android.util.Size; 11 | import android.view.TextureView; 12 | import android.view.View; 13 | import android.view.ViewGroup; 14 | import android.widget.Toast; 15 | 16 | import androidx.annotation.NonNull; 17 | import androidx.annotation.Nullable; 18 | import androidx.appcompat.app.AppCompatActivity; 19 | import androidx.camera.core.CameraX; 20 | import androidx.camera.core.ImageAnalysis; 21 | import androidx.camera.core.ImageAnalysisConfig; 22 | import androidx.camera.core.ImageCapture; 23 | import androidx.camera.core.ImageCaptureConfig; 24 | import androidx.camera.core.ImageProxy; 25 | import androidx.camera.core.Preview; 26 | import androidx.camera.core.PreviewConfig; 27 | 28 | import com.example.android.camera2basic.R; 29 | 30 | import java.io.File; 31 | 32 | import cc.rome753.yuvtools.MainActivity; 33 | import cc.rome753.yuvtools.YUVDetectView; 34 | 35 | public class CameraXActivity extends AppCompatActivity { 36 | 37 | private TextureView viewFinder; 38 | private YUVDetectView imageView; 39 | 40 | @Override 41 | protected void onCreate(Bundle savedInstanceState) { 42 | super.onCreate(savedInstanceState); 43 | setContentView(R.layout.activity_camerax); 44 | 45 | imageView = findViewById(R.id.image_view); 46 | viewFinder = findViewById(R.id.view_finder); 47 | viewFinder.addOnLayoutChangeListener(new View.OnLayoutChangeListener() { 48 | @Override 49 | public void onLayoutChange(View view, int i, int i1, int i2, int i3, int i4, int i5, int i6, int i7) { 50 | updateTransform(); 51 | } 52 | }); 53 | 54 | viewFinder.post(new Runnable() { 55 | @Override 56 | public void run() { 57 | startCamera(); 58 | } 59 | }); 60 | } 61 | 62 | private void startCamera() { 63 | // 1. preview 64 | PreviewConfig previewConfig = new PreviewConfig.Builder() 65 | .setTargetAspectRatio(new Rational(1, 1)) 66 | .setTargetResolution(new Size(640,640)) 67 | .build(); 68 | 69 | Preview preview = new Preview(previewConfig); 70 | preview.setOnPreviewOutputUpdateListener(new Preview.OnPreviewOutputUpdateListener() { 71 | @Override 72 | public void onUpdated(Preview.PreviewOutput output) { 73 | ViewGroup parent = (ViewGroup) viewFinder.getParent(); 74 | parent.removeView(viewFinder); 75 | parent.addView(viewFinder, 0); 76 | 77 | viewFinder.setSurfaceTexture(output.getSurfaceTexture()); 78 | updateTransform(); 79 | } 80 | }); 81 | 82 | // 2. capture 83 | ImageCaptureConfig imageCaptureConfig = new ImageCaptureConfig.Builder() 84 | .setTargetAspectRatio(new Rational(1,1)) 85 | .setCaptureMode(ImageCapture.CaptureMode.MIN_LATENCY) 86 | .build(); 87 | final ImageCapture imageCapture = new ImageCapture(imageCaptureConfig); 88 | viewFinder.setOnLongClickListener(new View.OnLongClickListener() { 89 | @Override 90 | public boolean onLongClick(View view) { 91 | File photo = new File(getExternalCacheDir() + "/" + System.currentTimeMillis() + ".jpg"); 92 | imageCapture.takePicture(photo, new ImageCapture.OnImageSavedListener() { 93 | @Override 94 | public void onImageSaved(@NonNull File file) { 95 | showToast("saved " + file.getAbsolutePath()); 96 | } 97 | 98 | @Override 99 | public void onError(@NonNull ImageCapture.UseCaseError useCaseError, @NonNull String message, @Nullable Throwable cause) { 100 | showToast("error " + message); 101 | cause.printStackTrace(); 102 | } 103 | }); 104 | return true; 105 | } 106 | }); 107 | 108 | // 3. analyze 109 | HandlerThread handlerThread = new HandlerThread("Analyze-thread"); 110 | handlerThread.start(); 111 | 112 | ImageAnalysisConfig imageAnalysisConfig = new ImageAnalysisConfig.Builder() 113 | .setCallbackHandler(new Handler(handlerThread.getLooper())) 114 | .setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE) 115 | .setTargetAspectRatio(new Rational(1, 1)) 116 | // .setTargetResolution(new Size(600, 600)) 117 | .build(); 118 | 119 | ImageAnalysis imageAnalysis = new ImageAnalysis(imageAnalysisConfig); 120 | imageAnalysis.setAnalyzer(new MyAnalyzer()); 121 | 122 | 123 | CameraX.bindToLifecycle(this, preview, imageCapture, imageAnalysis); 124 | 125 | } 126 | 127 | public void showToast(String msg) { 128 | Toast.makeText(CameraXActivity.this, msg, Toast.LENGTH_SHORT).show(); 129 | } 130 | 131 | private void updateTransform() { 132 | Matrix matrix = new Matrix(); 133 | // Compute the center of the view finder 134 | float centerX = viewFinder.getWidth() / 2f; 135 | float centerY = viewFinder.getHeight() / 2f; 136 | 137 | float[] rotations = {0,90,180,270}; 138 | // Correct preview output to account for display rotation 139 | float rotationDegrees = rotations[viewFinder.getDisplay().getRotation()]; 140 | 141 | matrix.postRotate(-rotationDegrees, centerX, centerY); 142 | 143 | // Finally, apply transformations to our TextureView 144 | viewFinder.setTransform(matrix); 145 | } 146 | 147 | private class MyAnalyzer implements ImageAnalysis.Analyzer { 148 | 149 | @Override 150 | public void analyze(ImageProxy imageProxy, int rotationDegrees) { 151 | final Image image = imageProxy.getImage(); 152 | if(image != null) { 153 | Log.d("chao", image.getWidth() + "," + image.getHeight()); 154 | imageView.input(image); 155 | } 156 | } 157 | } 158 | 159 | @Override 160 | protected void onDestroy() { 161 | CameraX.unbindAll(); 162 | super.onDestroy(); 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /Application/src/main/res/drawable-hdpi/ic_action_info.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-hdpi/ic_action_info.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-hdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-hdpi/ic_launcher.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-hdpi/tile.9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-hdpi/tile.9.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-mdpi/ic_action_info.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-mdpi/ic_action_info.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-mdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-mdpi/ic_launcher.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-xhdpi/ic_action_info.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-xhdpi/ic_action_info.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-xxhdpi/ic_action_info.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-xxhdpi/ic_action_info.png -------------------------------------------------------------------------------- /Application/src/main/res/drawable-xxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rome753/android-YuvTools/e5d23863e47a8c64d92242cf9308ad31f5c6bbe1/Application/src/main/res/drawable-xxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /Application/src/main/res/layout/activity_camera1.xml: -------------------------------------------------------------------------------- 1 | 16 | 19 | 20 | 24 | 25 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /Application/src/main/res/layout/activity_camera2.xml: -------------------------------------------------------------------------------- 1 | 16 | 23 | -------------------------------------------------------------------------------- /Application/src/main/res/layout/activity_camerax.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 8 | 17 | 18 | 23 | 24 | -------------------------------------------------------------------------------- /Application/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 |