├── .gitignore
├── LICENSE
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── com
│ │ └── afei
│ │ └── camerademo
│ │ ├── ImageUtils.java
│ │ ├── MainActivity.java
│ │ ├── MyApp.java
│ │ ├── camera
│ │ ├── Camera2Proxy.java
│ │ └── CameraProxy.java
│ │ ├── glsurfaceview
│ │ ├── Camera2GLSurfaceView.java
│ │ ├── CameraDrawer.java
│ │ ├── CameraGLSurfaceView.java
│ │ ├── GLSurfaceCamera2Activity.java
│ │ ├── GLSurfaceCameraActivity.java
│ │ └── OpenGLUtils.java
│ │ ├── surfaceview
│ │ ├── Camera2SurfaceView.java
│ │ ├── CameraSurfaceView.java
│ │ ├── SurfaceCamera2Activity.java
│ │ └── SurfaceCameraActivity.java
│ │ └── textureview
│ │ ├── Camera2TextureView.java
│ │ ├── CameraTextureView.java
│ │ ├── TextureCamera2Activity.java
│ │ └── TextureCameraActivity.java
│ └── res
│ ├── drawable
│ └── click_button_selector.xml
│ ├── layout
│ ├── activity_glsurface_camera.xml
│ ├── activity_glsurface_camera2.xml
│ ├── activity_main.xml
│ ├── activity_surface_camera.xml
│ ├── activity_surface_camera2.xml
│ ├── activity_texture_camera.xml
│ └── activity_texture_camera2.xml
│ ├── mipmap-xxhdpi
│ ├── ic_camera_switch.png
│ ├── ic_close.png
│ ├── ic_launcher.png
│ ├── ic_launcher_round.png
│ ├── icon_button_click.png
│ └── icon_button_click_down.png
│ ├── mipmap-xxxhdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ └── values
│ ├── colors.xml
│ ├── strings.xml
│ └── styles.xml
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.ap_
4 |
5 | # Files for the ART/Dalvik VM
6 | *.dex
7 |
8 | # Java class files
9 | *.class
10 |
11 | # Generated files
12 | bin/
13 | gen/
14 | out/
15 |
16 | # Gradle files
17 | .gradle/
18 | build/
19 |
20 | # Local configuration file (sdk path, etc)
21 | local.properties
22 |
23 | # Proguard folder generated by Eclipse
24 | proguard/
25 |
26 | # Log Files
27 | *.log
28 |
29 | # Android Studio Navigation editor temp files
30 | .navigation/
31 |
32 | # Android Studio captures folder
33 | captures/
34 |
35 | # IntelliJ
36 | *.iml
37 | .idea/
38 |
39 | # Keystore files
40 | # Uncomment the following line if you do not want to check your keystore files in.
41 | #*.jks
42 |
43 | # External native build folder generated in Android Studio 2.2 and later
44 | .externalNativeBuild
45 |
46 | # Google Services (e.g. APIs or Firebase)
47 | google-services.json
48 |
49 | # Freeline
50 | freeline.py
51 | freeline/
52 | freeline_project_description.json
53 |
54 | # fastlane
55 | fastlane/report.xml
56 | fastlane/Preview.html
57 | fastlane/screenshots
58 | fastlane/test_output
59 | fastlane/readme.md
60 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # CameraDemo
2 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 34
5 | defaultConfig {
6 | applicationId "com.afei.camerademo"
7 | minSdkVersion 26
8 | targetSdkVersion 34
9 | versionCode 1
10 | versionName "1.0"
11 | }
12 | buildTypes {
13 | release {
14 | minifyEnabled false
15 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
16 | }
17 | }
18 |
19 | compileOptions {
20 | sourceCompatibility JavaVersion.VERSION_1_8
21 | targetCompatibility JavaVersion.VERSION_1_8
22 | }
23 | }
24 |
25 | dependencies {
26 | implementation fileTree(dir: 'libs', include: ['*.jar'])
27 | implementation 'androidx.appcompat:appcompat:1.6.0'
28 | }
29 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
19 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
31 |
34 |
37 |
40 |
43 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/ImageUtils.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo;
2 |
3 | import android.content.ContentResolver;
4 | import android.content.ContentValues;
5 | import android.content.Context;
6 | import android.database.Cursor;
7 | import android.graphics.Bitmap;
8 | import android.graphics.Matrix;
9 | import android.os.Environment;
10 | import android.provider.MediaStore;
11 | import android.util.Log;
12 |
13 | import java.io.File;
14 | import java.io.FileOutputStream;
15 | import java.io.IOException;
16 | import java.text.SimpleDateFormat;
17 | import java.util.Date;
18 |
19 | public class ImageUtils {
20 |
21 | private static final String TAG = "ImageUtils";
22 | private static Context sContext = MyApp.getInstance();
23 |
24 | private static final String GALLERY_PATH = Environment.getExternalStoragePublicDirectory(Environment
25 | .DIRECTORY_DCIM) + File.separator + "Camera";
26 |
27 | private static final String[] STORE_IMAGES = {
28 | MediaStore.Images.Thumbnails._ID,
29 | };
30 | private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyyMMdd_HHmmss");
31 |
32 | public static Bitmap rotateBitmap(Bitmap source, int degree, boolean flipHorizontal, boolean recycle) {
33 | if (degree == 0 && !flipHorizontal) {
34 | return source;
35 | }
36 | Matrix matrix = new Matrix();
37 | matrix.postRotate(degree);
38 | if (flipHorizontal) {
39 | matrix.postScale(-1, 1);
40 | }
41 | Log.d(TAG, "source width: " + source.getWidth() + ", height: " + source.getHeight());
42 | Log.d(TAG, "rotateBitmap: degree: " + degree);
43 | Bitmap rotateBitmap = Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, false);
44 | Log.d(TAG, "rotate width: " + rotateBitmap.getWidth() + ", height: " + rotateBitmap.getHeight());
45 | if (recycle) {
46 | source.recycle();
47 | }
48 | return rotateBitmap;
49 | }
50 |
51 | public static void saveImage(byte[] jpeg) {
52 | String fileName = DATE_FORMAT.format(new Date(System.currentTimeMillis())) + ".jpg";
53 | File outFile = new File(GALLERY_PATH, fileName);
54 | Log.d(TAG, "saveImage. filepath: " + outFile.getAbsolutePath());
55 | FileOutputStream os = null;
56 | try {
57 | os = new FileOutputStream(outFile);
58 | os.write(jpeg);
59 | os.flush();
60 | os.close();
61 | insertToDB(outFile.getAbsolutePath());
62 | } catch (IOException e) {
63 | e.printStackTrace();
64 | } finally {
65 | if (os != null) {
66 | try {
67 | os.close();
68 | } catch (IOException e) {
69 | e.printStackTrace();
70 | }
71 | }
72 | }
73 | }
74 |
75 | public static void saveBitmap(Bitmap bitmap) {
76 | String fileName = DATE_FORMAT.format(new Date(System.currentTimeMillis())) + ".jpg";
77 | File outFile = new File(GALLERY_PATH, fileName);
78 | Log.d(TAG, "saveImage. filepath: " + outFile.getAbsolutePath());
79 | FileOutputStream os = null;
80 | try {
81 | os = new FileOutputStream(outFile);
82 | boolean success = bitmap.compress(Bitmap.CompressFormat.JPEG, 100, os);
83 | Log.d(TAG, "saveBitmap: " + success);
84 | if (success) {
85 | insertToDB(outFile.getAbsolutePath());
86 | }
87 | } catch (IOException e) {
88 | e.printStackTrace();
89 | } finally {
90 | if (os != null) {
91 | try {
92 | os.close();
93 | } catch (IOException e) {
94 | e.printStackTrace();
95 | }
96 | }
97 | }
98 | }
99 |
100 | public static void insertToDB(String picturePath) {
101 | ContentValues values = new ContentValues();
102 | ContentResolver resolver = sContext.getContentResolver();
103 | values.put(MediaStore.Images.ImageColumns.DATA, picturePath);
104 | values.put(MediaStore.Images.ImageColumns.TITLE, picturePath.substring(picturePath.lastIndexOf("/") + 1));
105 | values.put(MediaStore.Images.ImageColumns.DATE_TAKEN, System.currentTimeMillis());
106 | values.put(MediaStore.Images.ImageColumns.MIME_TYPE, "image/jpeg");
107 | resolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
108 | }
109 |
110 | public static Bitmap getLatestThumbBitmap() {
111 | Bitmap bitmap = null;
112 | // 按照时间顺序降序查询
113 | Cursor cursor = MediaStore.Images.Media.query(sContext.getContentResolver(), MediaStore.Images.Media
114 | .EXTERNAL_CONTENT_URI, STORE_IMAGES, null, null, MediaStore.Files.FileColumns.DATE_MODIFIED + " DESC");
115 | boolean first = cursor.moveToFirst();
116 | if (first) {
117 | long id = cursor.getLong(0);
118 | bitmap = MediaStore.Images.Thumbnails.getThumbnail(sContext.getContentResolver(), id, MediaStore.Images
119 | .Thumbnails.MICRO_KIND, null);
120 | Log.d(TAG, "bitmap width: " + bitmap.getWidth());
121 | Log.d(TAG, "bitmap height: " + bitmap.getHeight());
122 | }
123 | cursor.close();
124 | return bitmap;
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo;
2 |
3 | import static android.content.pm.PackageManager.PERMISSION_GRANTED;
4 |
5 | import android.Manifest;
6 | import android.content.Intent;
7 | import android.os.Bundle;
8 | import android.view.View;
9 | import android.widget.Toast;
10 |
11 | import androidx.annotation.NonNull;
12 | import androidx.appcompat.app.AppCompatActivity;
13 | import androidx.core.app.ActivityCompat;
14 | import androidx.core.content.ContextCompat;
15 |
16 | import com.afei.camerademo.glsurfaceview.GLSurfaceCamera2Activity;
17 | import com.afei.camerademo.glsurfaceview.GLSurfaceCameraActivity;
18 | import com.afei.camerademo.surfaceview.SurfaceCamera2Activity;
19 | import com.afei.camerademo.surfaceview.SurfaceCameraActivity;
20 | import com.afei.camerademo.textureview.TextureCamera2Activity;
21 | import com.afei.camerademo.textureview.TextureCameraActivity;
22 |
23 | public class MainActivity extends AppCompatActivity {
24 |
25 | private static final int REQUEST_PERMISSION = 1;
26 | private final String[] PERMISSIONS = new String[] {
27 | Manifest.permission.WRITE_EXTERNAL_STORAGE,
28 | Manifest.permission.CAMERA
29 | };
30 |
31 | @Override
32 | protected void onCreate(Bundle savedInstanceState) {
33 | super.onCreate(savedInstanceState);
34 | setContentView(R.layout.activity_main);
35 | checkPermission();
36 | }
37 |
38 | public void startCameraActivity(View view) {
39 | Intent intent = null;
40 | switch (view.getId()) {
41 | case R.id.camera_btn1:
42 | intent = new Intent(this, SurfaceCameraActivity.class);
43 | break;
44 | case R.id.camera_btn2:
45 | intent = new Intent(this, TextureCameraActivity.class);
46 | break;
47 | case R.id.camera_btn3:
48 | intent = new Intent(this, GLSurfaceCameraActivity.class);
49 | break;
50 | case R.id.camera_btn4:
51 | intent = new Intent(this, SurfaceCamera2Activity.class);
52 | break;
53 | case R.id.camera_btn5:
54 | intent = new Intent(this, TextureCamera2Activity.class);
55 | break;
56 | case R.id.camera_btn6:
57 | intent = new Intent(this, GLSurfaceCamera2Activity.class);
58 | break;
59 | }
60 | startActivity(intent);
61 | }
62 |
63 | private boolean checkPermission() {
64 | for (int i = 0; i < PERMISSIONS.length; i++) {
65 | int state = ContextCompat.checkSelfPermission(this, PERMISSIONS[i]);
66 | if (state != PERMISSION_GRANTED) {
67 | ActivityCompat.requestPermissions(this, PERMISSIONS, REQUEST_PERMISSION);
68 | return false;
69 | }
70 | }
71 | return true;
72 | }
73 |
74 | @Override
75 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
76 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
77 | if (requestCode == REQUEST_PERMISSION) {
78 | for (int i = 0; i < permissions.length; i++) {
79 | if (grantResults[i] != PERMISSION_GRANTED) {
80 | Toast.makeText(this, "请在设置中相关权限", Toast.LENGTH_SHORT).show();
81 | }
82 | }
83 | }
84 | }
85 |
86 | }
87 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/MyApp.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo;
2 |
3 | import android.app.Application;
4 |
5 | public class MyApp extends Application {
6 |
7 | private static MyApp app;
8 |
9 | @Override
10 | public void onCreate() {
11 | super.onCreate();
12 | app = this;
13 | }
14 |
15 | public static MyApp getInstance() {
16 | return app;
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/camera/Camera2Proxy.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.camera;
2 |
3 | import android.annotation.SuppressLint;
4 | import android.annotation.TargetApi;
5 | import android.app.Activity;
6 | import android.content.Context;
7 | import android.graphics.ImageFormat;
8 | import android.graphics.Matrix;
9 | import android.graphics.Rect;
10 | import android.graphics.RectF;
11 | import android.graphics.SurfaceTexture;
12 | import android.hardware.camera2.CameraAccessException;
13 | import android.hardware.camera2.CameraCaptureSession;
14 | import android.hardware.camera2.CameraCharacteristics;
15 | import android.hardware.camera2.CameraDevice;
16 | import android.hardware.camera2.CameraManager;
17 | import android.hardware.camera2.CameraMetadata;
18 | import android.hardware.camera2.CaptureRequest;
19 | import android.hardware.camera2.CaptureResult;
20 | import android.hardware.camera2.TotalCaptureResult;
21 | import android.hardware.camera2.params.MeteringRectangle;
22 | import android.hardware.camera2.params.StreamConfigurationMap;
23 | import android.media.ImageReader;
24 | import android.os.Build;
25 | import android.os.Handler;
26 | import android.os.HandlerThread;
27 | import android.util.Log;
28 | import android.util.Size;
29 | import android.view.OrientationEventListener;
30 | import android.view.Surface;
31 | import android.view.SurfaceHolder;
32 |
33 | import androidx.annotation.NonNull;
34 |
35 | import java.util.ArrayList;
36 | import java.util.Arrays;
37 | import java.util.Collections;
38 | import java.util.Comparator;
39 | import java.util.List;
40 |
41 | public class Camera2Proxy {
42 |
43 | private static final String TAG = "Camera2Proxy";
44 |
45 | private Activity mActivity;
46 |
47 | private int mCameraId = CameraCharacteristics.LENS_FACING_FRONT; // 要打开的摄像头ID
48 | private CameraCharacteristics mCameraCharacteristics; // 相机属性
49 | private CameraManager mCameraManager; // 相机管理者
50 | private CameraDevice mCameraDevice; // 相机对象
51 | private CameraCaptureSession mCaptureSession;
52 | private CaptureRequest.Builder mPreviewRequestBuilder; // 相机预览请求的构造器
53 | private CaptureRequest mPreviewRequest;
54 | private Handler mBackgroundHandler;
55 | private HandlerThread mBackgroundThread;
56 | private ImageReader mPictureImageReader;
57 | private Surface mPreviewSurface;
58 | private OrientationEventListener mOrientationEventListener;
59 |
60 | private Size mPreviewSize; // 预览大小
61 | private Size mPictureSize; // 拍照大小
62 | private int mDisplayRotation = 0; // 原始Sensor画面顺时针旋转该角度后,画面朝上
63 | private int mDeviceOrientation = 0; // 设备方向,由相机传感器获取
64 |
65 | /* 缩放相关 */
66 | private final int MAX_ZOOM = 200; // 放大的最大值,用于计算每次放大/缩小操作改变的大小
67 | private int mZoom = 0; // 0~mMaxZoom之间变化
68 | private float mStepWidth; // 每次改变的宽度大小
69 | private float mStepHeight; // 每次改变的高度大小
70 |
71 | /**
72 | * 打开摄像头的回调
73 | */
74 | private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
75 | @Override
76 | public void onOpened(@NonNull CameraDevice camera) {
77 | Log.d(TAG, "onOpened");
78 | mCameraDevice = camera;
79 | initPreviewRequest();
80 | createCommonSession();
81 | }
82 |
83 | @Override
84 | public void onDisconnected(@NonNull CameraDevice camera) {
85 | Log.d(TAG, "onDisconnected");
86 | releaseCamera();
87 | }
88 |
89 | @Override
90 | public void onError(@NonNull CameraDevice camera, int error) {
91 | Log.e(TAG, "Camera Open failed, error: " + error);
92 | releaseCamera();
93 | }
94 | };
95 |
96 | @TargetApi(Build.VERSION_CODES.M)
97 | public Camera2Proxy(Activity activity) {
98 | mActivity = activity;
99 | mOrientationEventListener = new OrientationEventListener(mActivity) {
100 | @Override
101 | public void onOrientationChanged(int orientation) {
102 | mDeviceOrientation = orientation;
103 | }
104 | };
105 | }
106 |
107 | public void setUpCameraOutputs(int width, int height) {
108 | mCameraManager = (CameraManager) mActivity.getSystemService(Context.CAMERA_SERVICE);
109 | try {
110 | mCameraCharacteristics = mCameraManager.getCameraCharacteristics(Integer.toString(mCameraId));
111 | StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
112 | Size[] supportPictureSizes = map.getOutputSizes(ImageFormat.JPEG);
113 | Size pictureSize = Collections.max(Arrays.asList(supportPictureSizes), new CompareSizesByArea());
114 | float aspectRatio = pictureSize.getHeight() * 1.0f / pictureSize.getWidth();
115 | Size[] supportPreviewSizes = map.getOutputSizes(SurfaceTexture.class);
116 | // 一般相机页面都是固定竖屏,宽是短边,所以根据view的宽度来计算需要的预览大小
117 | Size previewSize = chooseOptimalSize(supportPreviewSizes, width, aspectRatio);
118 | Log.d(TAG, "pictureSize: " + pictureSize);
119 | Log.d(TAG, "previewSize: " + previewSize);
120 | mPictureSize = pictureSize;
121 | mPreviewSize = previewSize;
122 | } catch (CameraAccessException e) {
123 | e.printStackTrace();
124 | }
125 | }
126 |
127 | @SuppressLint("MissingPermission")
128 | public void openCamera() {
129 | Log.v(TAG, "openCamera");
130 | startBackgroundThread(); // 对应 releaseCamera() 方法中的 stopBackgroundThread()
131 | mOrientationEventListener.enable();
132 | try {
133 | mCameraCharacteristics = mCameraManager.getCameraCharacteristics(Integer.toString(mCameraId));
134 | // 每次切换摄像头计算一次就行,结果缓存到成员变量中
135 | initDisplayRotation();
136 | initZoomParameter();
137 | // 打开摄像头
138 | mCameraManager.openCamera(Integer.toString(mCameraId), mStateCallback, mBackgroundHandler);
139 | } catch (CameraAccessException e) {
140 | e.printStackTrace();
141 | }
142 | }
143 |
144 | private void initDisplayRotation() {
145 | int displayRotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
146 | switch (displayRotation) {
147 | case Surface.ROTATION_0:
148 | displayRotation = 90;
149 | break;
150 | case Surface.ROTATION_90:
151 | displayRotation = 0;
152 | break;
153 | case Surface.ROTATION_180:
154 | displayRotation = 270;
155 | break;
156 | case Surface.ROTATION_270:
157 | displayRotation = 180;
158 | break;
159 | }
160 | int sensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
161 | mDisplayRotation = (displayRotation + sensorOrientation + 270) % 360;
162 | Log.d(TAG, "mDisplayRotation: " + mDisplayRotation);
163 | }
164 |
165 | private void initZoomParameter() {
166 | Rect rect = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
167 | Log.d(TAG, "sensor_info_active_array_size: " + rect);
168 | // max_digital_zoom 表示 active_rect 除以 crop_rect 的最大值
169 | float max_digital_zoom = mCameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
170 | Log.d(TAG, "max_digital_zoom: " + max_digital_zoom);
171 | // crop_rect的最小宽高
172 | float minWidth = rect.width() / max_digital_zoom;
173 | float minHeight = rect.height() / max_digital_zoom;
174 | // 因为缩放时两边都要变化,所以要除以2
175 | mStepWidth = (rect.width() - minWidth) / MAX_ZOOM / 2;
176 | mStepHeight = (rect.height() - minHeight) / MAX_ZOOM / 2;
177 | }
178 |
179 | public void releaseCamera() {
180 | Log.v(TAG, "releaseCamera");
181 | if (null != mCaptureSession) {
182 | mCaptureSession.close();
183 | mCaptureSession = null;
184 | }
185 | if (mCameraDevice != null) {
186 | mCameraDevice.close();
187 | mCameraDevice = null;
188 | }
189 | if (mPictureImageReader != null) {
190 | mPictureImageReader.close();
191 | mPictureImageReader = null;
192 | }
193 | mOrientationEventListener.disable();
194 | stopBackgroundThread(); // 对应 openCamera() 方法中的 startBackgroundThread()
195 | }
196 |
197 | public void setPreviewSurface(SurfaceHolder holder) {
198 | mPreviewSurface = holder.getSurface();
199 | }
200 |
201 | public void setPreviewSurface(SurfaceTexture surfaceTexture) {
202 | surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
203 | mPreviewSurface = new Surface(surfaceTexture);
204 | }
205 |
206 | private void createCommonSession() {
207 | List outputs = new ArrayList<>();
208 | // preview output
209 | if (mPreviewSurface != null) {
210 | Log.d(TAG, "createCommonSession add target mPreviewSurface");
211 | outputs.add(mPreviewSurface);
212 | }
213 | // picture output
214 | Size pictureSize = mPictureSize;
215 | if (pictureSize != null) {
216 | Log.d(TAG, "createCommonSession add target mPictureImageReader");
217 | mPictureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(), ImageFormat.JPEG, 1);
218 | outputs.add(mPictureImageReader.getSurface());
219 | }
220 | try {
221 | // 一个session中,所有CaptureRequest能够添加的target,必须是outputs的子集,所以在创建session的时候需要都添加进来
222 | mCameraDevice.createCaptureSession(outputs, new CameraCaptureSession.StateCallback() {
223 |
224 | @Override
225 | public void onConfigured(@NonNull CameraCaptureSession session) {
226 | mCaptureSession = session;
227 | startPreview();
228 | }
229 |
230 | @Override
231 | public void onConfigureFailed(@NonNull CameraCaptureSession session) {
232 | Log.e(TAG, "ConfigureFailed. session: " + session);
233 | }
234 | }, mBackgroundHandler); // handle 传入 null 表示使用当前线程的 Looper
235 | } catch (CameraAccessException e) {
236 | e.printStackTrace();
237 | }
238 | }
239 |
240 | private void initPreviewRequest() {
241 | if (mPreviewSurface == null) {
242 | Log.e(TAG, "initPreviewRequest failed, mPreviewSurface is null");
243 | return;
244 | }
245 | try {
246 | mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
247 | // 设置预览输出的 Surface
248 | mPreviewRequestBuilder.addTarget(mPreviewSurface);
249 | // 设置连续自动对焦
250 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
251 | // 设置自动曝光
252 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
253 | // 设置自动白平衡
254 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
255 | } catch (CameraAccessException e) {
256 | e.printStackTrace();
257 | }
258 | }
259 |
260 | public void startPreview() {
261 | Log.v(TAG, "startPreview");
262 | if (mCaptureSession == null || mPreviewRequestBuilder == null) {
263 | Log.w(TAG, "startPreview: mCaptureSession or mPreviewRequestBuilder is null");
264 | return;
265 | }
266 | try {
267 | // 开始预览,即一直发送预览的请求
268 | CaptureRequest captureRequest = mPreviewRequestBuilder.build();
269 | mCaptureSession.setRepeatingRequest(captureRequest, null, mBackgroundHandler);
270 | } catch (CameraAccessException e) {
271 | e.printStackTrace();
272 | }
273 | }
274 |
275 | public void stopPreview() {
276 | Log.v(TAG, "stopPreview");
277 | if (mCaptureSession == null) {
278 | Log.w(TAG, "stopPreview: mCaptureSession is null");
279 | return;
280 | }
281 | try {
282 | mCaptureSession.stopRepeating();
283 | } catch (CameraAccessException e) {
284 | e.printStackTrace();
285 | }
286 | }
287 |
288 | public void captureStillPicture(ImageReader.OnImageAvailableListener onImageAvailableListener) {
289 | if (mPictureImageReader == null) {
290 | Log.w(TAG, "captureStillPicture failed! mPictureImageReader is null");
291 | return;
292 | }
293 | mPictureImageReader.setOnImageAvailableListener(onImageAvailableListener, mBackgroundHandler);
294 | try {
295 | // 创建一个用于拍照的Request
296 | CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
297 | captureBuilder.addTarget(mPictureImageReader.getSurface());
298 | captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
299 | captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getJpegOrientation(mDeviceOrientation));
300 | // 预览如果有放大,拍照的时候也应该保存相同的缩放
301 | Rect zoomRect = mPreviewRequestBuilder.get(CaptureRequest.SCALER_CROP_REGION);
302 | if (zoomRect != null) {
303 | captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoomRect);
304 | }
305 | stopPreview();
306 | mCaptureSession.abortCaptures();
307 | final long time = System.currentTimeMillis();
308 | mCaptureSession.capture(captureBuilder.build(), new CameraCaptureSession.CaptureCallback() {
309 | @Override
310 | public void onCaptureCompleted(@NonNull CameraCaptureSession session,
311 | @NonNull CaptureRequest request,
312 | @NonNull TotalCaptureResult result) {
313 | Log.w(TAG, "onCaptureCompleted, time: " + (System.currentTimeMillis() - time));
314 | try {
315 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
316 | mCaptureSession.capture(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
317 | } catch (CameraAccessException e) {
318 | e.printStackTrace();
319 | }
320 | startPreview();
321 | }
322 | }, mBackgroundHandler);
323 | } catch (CameraAccessException e) {
324 | e.printStackTrace();
325 | }
326 | }
327 |
328 | private int getJpegOrientation(int deviceOrientation) {
329 | if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
330 | int sensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
331 | // Round device orientation to a multiple of 90
332 | deviceOrientation = (deviceOrientation + 45) / 90 * 90;
333 | // Reverse device orientation for front-facing cameras
334 | boolean facingFront = mCameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics
335 | .LENS_FACING_FRONT;
336 | if (facingFront) deviceOrientation = -deviceOrientation;
337 | // Calculate desired JPEG orientation relative to camera orientation to make
338 | // the image upright relative to the device orientation
339 | int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
340 | Log.d(TAG, "jpegOrientation: " + jpegOrientation);
341 | return jpegOrientation;
342 | }
343 |
344 | public boolean isFrontCamera() {
345 | return mCameraId == CameraCharacteristics.LENS_FACING_BACK;
346 | }
347 |
348 | public Size getPreviewSize() {
349 | return mPreviewSize;
350 | }
351 |
352 | public void setPreviewSize(Size previewSize) {
353 | mPreviewSize = previewSize;
354 | }
355 |
356 | public Size getPictureSize() {
357 | return mPictureSize;
358 | }
359 |
360 | public void setPictureSize(Size pictureSize) {
361 | mPictureSize = pictureSize;
362 | }
363 |
364 | public void switchCamera() {
365 | mCameraId ^= 1;
366 | Log.d(TAG, "switchCamera: mCameraId: " + mCameraId);
367 | releaseCamera();
368 | openCamera();
369 | }
370 |
371 | public void handleZoom(boolean isZoomIn) {
372 | if (mCameraDevice == null || mCameraCharacteristics == null || mPreviewRequestBuilder == null) {
373 | return;
374 | }
375 | if (isZoomIn && mZoom < MAX_ZOOM) { // 放大
376 | mZoom++;
377 | } else if (mZoom > 0) { // 缩小
378 | mZoom--;
379 | }
380 | Log.v(TAG, "handleZoom: mZoom: " + mZoom);
381 | Rect rect = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
382 | int cropW = (int) (mStepWidth * mZoom);
383 | int cropH = (int) (mStepHeight * mZoom);
384 | Rect zoomRect = new Rect(rect.left + cropW, rect.top + cropH, rect.right - cropW, rect.bottom - cropH);
385 | Log.d(TAG, "zoomRect: " + zoomRect);
386 | mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoomRect);
387 | startPreview(); // 需要重新 start preview 才能生效
388 | }
389 |
390 | public void triggerFocusAtPoint(float x, float y, int width, int height) {
391 | Log.d(TAG, "triggerFocusAtPoint (" + x + ", " + y + ")");
392 | Rect cropRegion = mPreviewRequestBuilder.get(CaptureRequest.SCALER_CROP_REGION);
393 | MeteringRectangle afRegion = getAFAERegion(x, y, width, height, 1f, cropRegion);
394 | // ae的区域比af的稍大一点,聚焦的效果比较好
395 | MeteringRectangle aeRegion = getAFAERegion(x, y, width, height, 1.5f, cropRegion);
396 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{afRegion});
397 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{aeRegion});
398 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
399 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
400 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
401 | try {
402 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mAfCaptureCallback, mBackgroundHandler);
403 | } catch (CameraAccessException e) {
404 | e.printStackTrace();
405 | }
406 | }
407 |
408 | private MeteringRectangle getAFAERegion(float x, float y, int viewWidth, int viewHeight, float multiple, Rect cropRegion) {
409 | Log.v(TAG, "getAFAERegion enter");
410 | Log.d(TAG, "point: [" + x + ", " + y + "], viewWidth: " + viewWidth + ", viewHeight: " + viewHeight);
411 | Log.d(TAG, "multiple: " + multiple);
412 | // do rotate and mirror
413 | RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
414 | Matrix matrix1 = new Matrix();
415 | matrix1.setRotate(mDisplayRotation);
416 | matrix1.postScale(isFrontCamera() ? -1 : 1, 1);
417 | matrix1.invert(matrix1);
418 | matrix1.mapRect(viewRect);
419 | // get scale and translate matrix
420 | Matrix matrix2 = new Matrix();
421 | RectF cropRect = new RectF(cropRegion);
422 | matrix2.setRectToRect(viewRect, cropRect, Matrix.ScaleToFit.CENTER);
423 | Log.d(TAG, "viewRect: " + viewRect);
424 | Log.d(TAG, "cropRect: " + cropRect);
425 | // get out region
426 | int side = (int) (Math.max(viewWidth, viewHeight) / 8 * multiple);
427 | RectF outRect = new RectF(x - side / 2, y - side / 2, x + side / 2, y + side / 2);
428 | Log.d(TAG, "outRect before: " + outRect);
429 | matrix1.mapRect(outRect);
430 | matrix2.mapRect(outRect);
431 | Log.d(TAG, "outRect after: " + outRect);
432 | // 做一个clamp,测光区域不能超出cropRegion的区域
433 | Rect meteringRect = new Rect((int) outRect.left, (int) outRect.top, (int) outRect.right, (int) outRect.bottom);
434 | meteringRect.left = clamp(meteringRect.left, cropRegion.left, cropRegion.right);
435 | meteringRect.top = clamp(meteringRect.top, cropRegion.top, cropRegion.bottom);
436 | meteringRect.right = clamp(meteringRect.right, cropRegion.left, cropRegion.right);
437 | meteringRect.bottom = clamp(meteringRect.bottom, cropRegion.top, cropRegion.bottom);
438 | Log.d(TAG, "meteringRegion: " + meteringRect);
439 | return new MeteringRectangle(meteringRect, 1000);
440 | }
441 |
442 | private final CameraCaptureSession.CaptureCallback mAfCaptureCallback = new CameraCaptureSession.CaptureCallback() {
443 |
444 | private void process(CaptureResult result) {
445 | Integer state = result.get(CaptureResult.CONTROL_AF_STATE);
446 | Log.d(TAG, "CONTROL_AF_STATE: " + state);
447 | if (state == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || state == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
448 | Log.d(TAG, "process: start normal preview");
449 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
450 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
451 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.FLASH_MODE_OFF);
452 | startPreview();
453 | }
454 | }
455 |
456 | @Override
457 | public void onCaptureProgressed(@NonNull CameraCaptureSession session,
458 | @NonNull CaptureRequest request,
459 | @NonNull CaptureResult partialResult) {
460 | process(partialResult);
461 | }
462 |
463 | @Override
464 | public void onCaptureCompleted(@NonNull CameraCaptureSession session,
465 | @NonNull CaptureRequest request,
466 | @NonNull TotalCaptureResult result) {
467 | process(result);
468 | }
469 | };
470 |
471 |
472 | private void startBackgroundThread() {
473 | if (mBackgroundThread == null || mBackgroundHandler == null) {
474 | Log.v(TAG, "startBackgroundThread");
475 | mBackgroundThread = new HandlerThread("CameraBackground");
476 | mBackgroundThread.start();
477 | mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
478 | }
479 | }
480 |
481 | private void stopBackgroundThread() {
482 | Log.v(TAG, "stopBackgroundThread");
483 | if (mBackgroundThread != null) {
484 | mBackgroundThread.quitSafely();
485 | try {
486 | mBackgroundThread.join();
487 | mBackgroundThread = null;
488 | mBackgroundHandler = null;
489 | } catch (InterruptedException e) {
490 | e.printStackTrace();
491 | }
492 | }
493 | }
494 |
495 | public Size chooseOptimalSize(Size[] sizes, int dstSize, float aspectRatio) {
496 | if (sizes == null || sizes.length <= 0) {
497 | Log.e(TAG, "chooseOptimalSize failed, input sizes is empty");
498 | return null;
499 | }
500 | int minDelta = Integer.MAX_VALUE; // 最小的差值,初始值应该设置大点保证之后的计算中会被重置
501 | int index = 0; // 最小的差值对应的索引坐标
502 | for (int i = 0; i < sizes.length; i++) {
503 | Size size = sizes[i];
504 | // 先判断比例是否相等
505 | if (size.getWidth() * aspectRatio == size.getHeight()) {
506 | int delta = Math.abs(dstSize - size.getHeight());
507 | if (delta == 0) {
508 | return size;
509 | }
510 | if (minDelta > delta) {
511 | minDelta = delta;
512 | index = i;
513 | }
514 | }
515 | }
516 | return sizes[index];
517 | }
518 |
519 | private int clamp(int x, int min, int max) {
520 | if (x > max) return max;
521 | if (x < min) return min;
522 | return x;
523 | }
524 |
525 | static class CompareSizesByArea implements Comparator {
526 |
527 | @Override
528 | public int compare(Size lhs, Size rhs) {
529 | // 我们在这里投放,以确保乘法不会溢出
530 | return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
531 | (long) rhs.getWidth() * rhs.getHeight());
532 | }
533 |
534 | }
535 |
536 | }
537 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/camera/CameraProxy.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.camera;
2 |
3 | import android.app.Activity;
4 | import android.graphics.ImageFormat;
5 | import android.graphics.Rect;
6 | import android.graphics.SurfaceTexture;
7 | import android.hardware.Camera;
8 | import android.hardware.Camera.CameraInfo;
9 | import android.hardware.Camera.Parameters;
10 | import android.hardware.Camera.PreviewCallback;
11 | import android.hardware.Camera.Size;
12 | import android.util.Log;
13 | import android.view.OrientationEventListener;
14 | import android.view.Surface;
15 | import android.view.SurfaceHolder;
16 |
17 | import java.io.IOException;
18 | import java.util.ArrayList;
19 | import java.util.List;
20 |
21 | @SuppressWarnings("deprecation")
22 | public class CameraProxy implements Camera.AutoFocusCallback {
23 |
24 | private static final String TAG = "CameraProxy";
25 |
26 | private Activity mActivity;
27 | private Camera mCamera;
28 | private Parameters mParameters;
29 | private CameraInfo mCameraInfo = new CameraInfo();
30 | private int mCameraId = CameraInfo.CAMERA_FACING_BACK;
31 | private int mPreviewWidth = 1440; // default 1440
32 | private int mPreviewHeight = 1080; // default 1080
33 | private float mPreviewScale = mPreviewHeight * 1f / mPreviewWidth;
34 | private PreviewCallback mPreviewCallback; // 相机预览的数据回调
35 | private OrientationEventListener mOrientationEventListener;
36 | private int mLatestRotation = 0;
37 |
38 | private byte[] mPreviewBuffer;
39 |
40 | public CameraProxy(Activity activity) {
41 | mActivity = activity;
42 | mOrientationEventListener = new OrientationEventListener(mActivity) {
43 | @Override
44 | public void onOrientationChanged(int orientation) {
45 | setPictureRotate(orientation);
46 | }
47 | };
48 | }
49 |
50 | public void openCamera() {
51 | Log.d(TAG, "openCamera cameraId: " + mCameraId);
52 | mCamera = Camera.open(mCameraId);
53 | Camera.getCameraInfo(mCameraId, mCameraInfo);
54 | initConfig();
55 | setDisplayOrientation();
56 | Log.d(TAG, "openCamera enable mOrientationEventListener");
57 | mOrientationEventListener.enable();
58 | }
59 |
60 | public void releaseCamera() {
61 | if (mCamera != null) {
62 | Log.v(TAG, "releaseCamera");
63 | mCamera.setPreviewCallback(null);
64 | mCamera.stopPreview();
65 | mCamera.release();
66 | mCamera = null;
67 | }
68 | mOrientationEventListener.disable();
69 | }
70 |
71 | public void startPreview(SurfaceHolder holder) {
72 | if (mCamera != null) {
73 | Log.v(TAG, "startPreview");
74 | try {
75 | mCamera.setPreviewDisplay(holder);
76 | } catch (IOException e) {
77 | e.printStackTrace();
78 | }
79 | mCamera.startPreview();
80 | }
81 | }
82 |
83 | public void startPreview(SurfaceTexture surface) {
84 | if (mCamera != null) {
85 | Log.v(TAG, "startPreview");
86 | try {
87 | mCamera.setPreviewTexture(surface);
88 | } catch (IOException e) {
89 | e.printStackTrace();
90 | }
91 | mCamera.startPreview();
92 | }
93 | }
94 |
95 | public void stopPreview() {
96 | if (mCamera != null) {
97 | Log.v(TAG, "stopPreview");
98 | mCamera.stopPreview();
99 | }
100 | }
101 |
102 | public boolean isFrontCamera() {
103 | return mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT;
104 | }
105 |
106 | private void initConfig() {
107 | Log.v(TAG, "initConfig");
108 | try {
109 | mParameters = mCamera.getParameters();
110 | // 如果摄像头不支持这些参数都会出错的,所以设置的时候一定要判断是否支持
111 | List supportedFlashModes = mParameters.getSupportedFlashModes();
112 | if (supportedFlashModes != null && supportedFlashModes.contains(Parameters.FLASH_MODE_OFF)) {
113 | mParameters.setFlashMode(Parameters.FLASH_MODE_OFF); // 设置闪光模式
114 | }
115 | List supportedFocusModes = mParameters.getSupportedFocusModes();
116 | if (supportedFocusModes != null && supportedFocusModes.contains(Parameters.FOCUS_MODE_AUTO)) {
117 | mParameters.setFocusMode(Parameters.FOCUS_MODE_AUTO); // 设置聚焦模式
118 | }
119 | mParameters.setPreviewFormat(ImageFormat.NV21); // 设置预览图片格式
120 | mParameters.setPictureFormat(ImageFormat.JPEG); // 设置拍照图片格式
121 | mParameters.setExposureCompensation(0); // 设置曝光强度
122 | Size previewSize = getSuitableSize(mParameters.getSupportedPreviewSizes());
123 | mPreviewWidth = previewSize.width;
124 | mPreviewHeight = previewSize.height;
125 | mParameters.setPreviewSize(mPreviewWidth, mPreviewHeight); // 设置预览图片大小
126 | Log.d(TAG, "previewWidth: " + mPreviewWidth + ", previewHeight: " + mPreviewHeight);
127 | Size pictureSize = getSuitableSize(mParameters.getSupportedPictureSizes());
128 | mParameters.setPictureSize(pictureSize.width, pictureSize.height);
129 | Log.d(TAG, "pictureWidth: " + pictureSize.width + ", pictureHeight: " + pictureSize.height);
130 | mCamera.setParameters(mParameters); // 将设置好的parameters添加到相机里
131 | } catch (Exception e) {
132 | e.printStackTrace();
133 | }
134 | }
135 |
136 | private Size getSuitableSize(List sizes) {
137 | int minDelta = Integer.MAX_VALUE; // 最小的差值,初始值应该设置大点保证之后的计算中会被重置
138 | int index = 0; // 最小的差值对应的索引坐标
139 | for (int i = 0; i < sizes.size(); i++) {
140 | Size size = sizes.get(i);
141 | Log.v(TAG, "SupportedSize, width: " + size.width + ", height: " + size.height);
142 | // 先判断比例是否相等
143 | if (size.width * mPreviewScale == size.height) {
144 | int delta = Math.abs(mPreviewWidth - size.width);
145 | if (delta == 0) {
146 | return size;
147 | }
148 | if (minDelta > delta) {
149 | minDelta = delta;
150 | index = i;
151 | }
152 | }
153 | }
154 | return sizes.get(index);
155 | }
156 |
157 | /**
158 | * 设置相机显示的方向,必须设置,否则显示的图像方向会错误
159 | */
160 | private void setDisplayOrientation() {
161 | int rotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
162 | int degrees = 0;
163 | switch (rotation) {
164 | case Surface.ROTATION_0:
165 | degrees = 0;
166 | break;
167 | case Surface.ROTATION_90:
168 | degrees = 90;
169 | break;
170 | case Surface.ROTATION_180:
171 | degrees = 180;
172 | break;
173 | case Surface.ROTATION_270:
174 | degrees = 270;
175 | break;
176 | }
177 | int result;
178 | if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
179 | result = (mCameraInfo.orientation + degrees) % 360;
180 | result = (360 - result) % 360; // compensate the mirror
181 | } else { // back-facing
182 | result = (mCameraInfo.orientation - degrees + 360) % 360;
183 | }
184 | mCamera.setDisplayOrientation(result);
185 | }
186 |
187 | private void setPictureRotate(int orientation) {
188 | if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) return;
189 | orientation = (orientation + 45) / 90 * 90;
190 | int rotation;
191 | if (mCameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
192 | rotation = (mCameraInfo.orientation - orientation + 360) % 360;
193 | } else { // back-facing camera
194 | rotation = (mCameraInfo.orientation + orientation) % 360;
195 | }
196 | mLatestRotation = rotation;
197 | }
198 |
199 | public int getLatestRotation() {
200 | return mLatestRotation;
201 | }
202 |
203 | public void setPreviewCallback(PreviewCallback previewCallback) {
204 | mPreviewCallback = previewCallback;
205 | if (mPreviewBuffer == null) {
206 | mPreviewBuffer = new byte[mPreviewWidth * mPreviewHeight * 3 / 2];
207 | }
208 | mCamera.addCallbackBuffer(mPreviewBuffer);
209 | mCamera.setPreviewCallbackWithBuffer(mPreviewCallback); // 设置预览的回调
210 | }
211 |
212 | public void takePicture(Camera.PictureCallback pictureCallback) {
213 | mCamera.takePicture(null, null, pictureCallback);
214 | }
215 |
216 | public void switchCamera() {
217 | mCameraId ^= 1; // 先改变摄像头朝向
218 | releaseCamera();
219 | openCamera();
220 | }
221 |
222 | public void focusOnPoint(int x, int y, int width, int height) {
223 | Log.v(TAG, "touch point (" + x + ", " + y + ")");
224 | if (mCamera == null) {
225 | return;
226 | }
227 | Parameters parameters = mCamera.getParameters();
228 | // 1.先要判断是否支持设置聚焦区域
229 | if (parameters.getMaxNumFocusAreas() > 0) {
230 | // 2.以触摸点为中心点,view窄边的1/4为聚焦区域的默认边长
231 | int length = Math.min(width, height) >> 3; // 1/8的长度
232 | int left = x - length;
233 | int top = y - length;
234 | int right = x + length;
235 | int bottom = y + length;
236 | // 3.映射,因为相机聚焦的区域是一个(-1000,-1000)到(1000,1000)的坐标区域
237 | left = left * 2000 / width - 1000;
238 | top = top * 2000 / height - 1000;
239 | right = right * 2000 / width - 1000;
240 | bottom = bottom * 2000 / height - 1000;
241 | // 4.判断上述矩形区域是否超过边界,若超过则设置为临界值
242 | left = left < -1000 ? -1000 : left;
243 | top = top < -1000 ? -1000 : top;
244 | right = right > 1000 ? 1000 : right;
245 | bottom = bottom > 1000 ? 1000 : bottom;
246 | Log.d(TAG, "focus area (" + left + ", " + top + ", " + right + ", " + bottom + ")");
247 | ArrayList areas = new ArrayList<>();
248 | areas.add(new Camera.Area(new Rect(left, top, right, bottom), 600));
249 | parameters.setFocusAreas(areas);
250 | }
251 | try {
252 | mCamera.cancelAutoFocus(); // 先要取消掉进程中所有的聚焦功能
253 | mCamera.setParameters(parameters);
254 | mCamera.autoFocus(this); // 调用聚焦
255 | } catch (Exception e) {
256 | e.printStackTrace();
257 | }
258 | }
259 |
260 | public void handleZoom(boolean isZoomIn) {
261 | if (mParameters.isZoomSupported()) {
262 | int maxZoom = mParameters.getMaxZoom();
263 | int zoom = mParameters.getZoom();
264 | if (isZoomIn && zoom < maxZoom) {
265 | zoom++;
266 | } else if (zoom > 0) {
267 | zoom--;
268 | }
269 | Log.d(TAG, "handleZoom: zoom: " + zoom);
270 | mParameters.setZoom(zoom);
271 | mCamera.setParameters(mParameters);
272 | } else {
273 | Log.i(TAG, "zoom not supported");
274 | }
275 | }
276 |
277 | public Camera getCamera() {
278 | return mCamera;
279 | }
280 |
281 | public int getPreviewWidth() {
282 | return mPreviewWidth;
283 | }
284 |
285 | public int getPreviewHeight() {
286 | return mPreviewHeight;
287 | }
288 |
289 | @Override
290 | public void onAutoFocus(boolean success, Camera camera) {
291 | Log.d(TAG, "onAutoFocus: " + success);
292 | }
293 |
294 | }
295 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/glsurfaceview/Camera2GLSurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.glsurfaceview;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.graphics.SurfaceTexture;
6 | import android.opengl.GLES20;
7 | import android.opengl.GLSurfaceView;
8 | import android.util.AttributeSet;
9 | import android.util.Log;
10 | import android.view.MotionEvent;
11 | import android.view.SurfaceHolder;
12 |
13 | import com.afei.camerademo.camera.Camera2Proxy;
14 |
15 | import javax.microedition.khronos.egl.EGLConfig;
16 | import javax.microedition.khronos.opengles.GL10;
17 |
18 | public class Camera2GLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
19 |
20 | private static final String TAG = "Camera2GLSurfaceView";
21 | private Camera2Proxy mCameraProxy;
22 | private SurfaceTexture mSurfaceTexture;
23 | private CameraDrawer mDrawer;
24 | private int mRatioWidth = 0;
25 | private int mRatioHeight = 0;
26 | private float mOldDistance;
27 | private int mTextureId = -1;
28 |
29 | public Camera2GLSurfaceView(Context context) {
30 | this(context, null);
31 | }
32 |
33 | public Camera2GLSurfaceView(Context context, AttributeSet attrs) {
34 | super(context, attrs);
35 | init(context);
36 | }
37 |
38 | private void init(Context context) {
39 | mCameraProxy = new Camera2Proxy((Activity) context);
40 | setEGLContextClientVersion(2);
41 | setRenderer(this);
42 | setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
43 | }
44 |
45 | @Override
46 | public void onSurfaceCreated(GL10 gl, EGLConfig config) {
47 | Log.d(TAG, "onSurfaceCreated. width: " + getWidth() + ", height: " + getHeight());
48 | mDrawer = new CameraDrawer();
49 | mTextureId = OpenGLUtils.getExternalOESTextureID();
50 | mSurfaceTexture = new SurfaceTexture(mTextureId);
51 | mSurfaceTexture.setOnFrameAvailableListener(this);
52 | mCameraProxy.setUpCameraOutputs(getWidth(), getHeight());
53 | mCameraProxy.setPreviewSurface(mSurfaceTexture);
54 | mCameraProxy.openCamera();
55 | }
56 |
57 | @Override
58 | public void onSurfaceChanged(GL10 gl, int width, int height) {
59 | Log.d(TAG, "onSurfaceChanged. width: " + width + ", height: " + height);
60 | int previewWidth = mCameraProxy.getPreviewSize().getWidth();
61 | int previewHeight = mCameraProxy.getPreviewSize().getHeight();
62 | if (width > height) {
63 | setAspectRatio(previewWidth, previewHeight);
64 | } else {
65 | setAspectRatio(previewHeight, previewWidth);
66 | }
67 | GLES20.glViewport(0, 0, width, height);
68 | }
69 |
70 | @Override
71 | public void surfaceDestroyed(SurfaceHolder holder) {
72 | Log.d(TAG, "surfaceDestroyed");
73 | super.surfaceDestroyed(holder);
74 | mCameraProxy.releaseCamera();
75 | }
76 |
77 | @Override
78 | public void onDrawFrame(GL10 gl) {
79 | GLES20.glClearColor(0, 0, 0, 0);
80 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
81 | mSurfaceTexture.updateTexImage();
82 | mDrawer.draw(mTextureId, mCameraProxy.isFrontCamera());
83 | }
84 |
85 | @Override
86 | public void onFrameAvailable(SurfaceTexture surfaceTexture) {
87 | requestRender();
88 | }
89 |
90 | private void setAspectRatio(int width, int height) {
91 | if (width < 0 || height < 0) {
92 | throw new IllegalArgumentException("Size cannot be negative.");
93 | }
94 | mRatioWidth = width;
95 | mRatioHeight = height;
96 | post(new Runnable() {
97 | @Override
98 | public void run() {
99 | requestLayout(); // must run in UI thread
100 | }
101 | });
102 | }
103 |
104 | public Camera2Proxy getCameraProxy() {
105 | return mCameraProxy;
106 | }
107 |
108 | public SurfaceTexture getSurfaceTexture() {
109 | return mSurfaceTexture;
110 | }
111 |
112 | @Override
113 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
114 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
115 | int width = MeasureSpec.getSize(widthMeasureSpec);
116 | int height = MeasureSpec.getSize(heightMeasureSpec);
117 | if (0 == mRatioWidth || 0 == mRatioHeight) {
118 | setMeasuredDimension(width, height);
119 | } else {
120 | if (width < height * mRatioWidth / mRatioHeight) {
121 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
122 | } else {
123 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
124 | }
125 | }
126 | }
127 |
128 | @Override
129 | public boolean onTouchEvent(MotionEvent event) {
130 | if (event.getPointerCount() == 1) {
131 | // 点击聚焦
132 | mCameraProxy.triggerFocusAtPoint((int) event.getX(), (int) event.getY(), getWidth(), getHeight());
133 | return true;
134 | }
135 | switch (event.getAction() & MotionEvent.ACTION_MASK) {
136 | case MotionEvent.ACTION_POINTER_DOWN:
137 | mOldDistance = getFingerSpacing(event);
138 | break;
139 | case MotionEvent.ACTION_MOVE:
140 | float newDistance = getFingerSpacing(event);
141 | if (newDistance > mOldDistance) {
142 | mCameraProxy.handleZoom(true);
143 | } else if (newDistance < mOldDistance) {
144 | mCameraProxy.handleZoom(false);
145 | }
146 | mOldDistance = newDistance;
147 | break;
148 | default:
149 | break;
150 | }
151 | return super.onTouchEvent(event);
152 | }
153 |
154 | private static float getFingerSpacing(MotionEvent event) {
155 | float x = event.getX(0) - event.getX(1);
156 | float y = event.getY(0) - event.getY(1);
157 | return (float) Math.sqrt(x * x + y * y);
158 | }
159 |
160 | }
161 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/glsurfaceview/CameraDrawer.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.glsurfaceview;
2 |
3 | import android.opengl.GLES11Ext;
4 | import android.opengl.GLES20;
5 |
6 | import java.nio.ByteBuffer;
7 | import java.nio.ByteOrder;
8 | import java.nio.FloatBuffer;
9 |
10 | public class CameraDrawer {
11 |
12 | private final String VERTEX_SHADER = "" +
13 | "attribute vec4 vPosition;" +
14 | "attribute vec2 inputTextureCoordinate;" +
15 | "varying vec2 textureCoordinate;" +
16 | "void main()" +
17 | "{"+
18 | "gl_Position = vPosition;"+
19 | "textureCoordinate = inputTextureCoordinate;" +
20 | "}";
21 | private final String FRAGMENT_SHADER = "" +
22 | "#extension GL_OES_EGL_image_external : require\n"+
23 | "precision mediump float;" +
24 | "varying vec2 textureCoordinate;\n" +
25 | "uniform samplerExternalOES s_texture;\n" +
26 | "void main() {" +
27 | " gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
28 | "}";
29 |
30 | private FloatBuffer mVertexBuffer;
31 | private FloatBuffer mBackTextureBuffer;
32 | private FloatBuffer mFrontTextureBuffer;
33 | private ByteBuffer mDrawListBuffer;
34 | private int mProgram;
35 | private int mPositionHandle;
36 | private int mTextureHandle;
37 |
38 | private static final float VERTEXES[] = {
39 | -1.0f, 1.0f,
40 | -1.0f,-1.0f,
41 | 1.0f, -1.0f,
42 | 1.0f, 1.0f,
43 | };
44 |
45 | // 后置摄像头使用的纹理坐标
46 | private static final float TEXTURE_BACK[] = {
47 | 0.0f, 1.0f,
48 | 1.0f, 1.0f,
49 | 1.0f, 0.0f,
50 | 0.0f, 0.0f,
51 | };
52 |
53 | // 前置摄像头使用的纹理坐标
54 | private static final float TEXTURE_FRONT[] = {
55 | 1.0f, 1.0f,
56 | 0.0f, 1.0f,
57 | 0.0f, 0.0f,
58 | 1.0f, 0.0f,
59 | };
60 |
61 | private static final byte VERTEX_ORDER[] = { 0, 1, 2, 3 }; // order to draw vertices
62 |
63 | private final int VERTEX_SIZE = 2;
64 | private final int VERTEX_STRIDE = VERTEX_SIZE * 4;
65 |
66 | public CameraDrawer() {
67 | // init float buffer for vertex coordinates
68 | mVertexBuffer = ByteBuffer.allocateDirect(VERTEXES.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
69 | mVertexBuffer.put(VERTEXES).position(0);
70 |
71 | // init float buffer for texture coordinates
72 | mBackTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_BACK.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
73 | mBackTextureBuffer.put(TEXTURE_BACK).position(0);
74 | mFrontTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_FRONT.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
75 | mFrontTextureBuffer.put(TEXTURE_FRONT).position(0);
76 |
77 | // init byte buffer for draw list
78 | mDrawListBuffer = ByteBuffer.allocateDirect(VERTEX_ORDER.length).order(ByteOrder.nativeOrder());
79 | mDrawListBuffer.put(VERTEX_ORDER).position(0);
80 |
81 | mProgram = OpenGLUtils.createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
82 | mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
83 | mTextureHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
84 | }
85 |
86 | public void draw(int texture, boolean isFrontCamera) {
87 | GLES20.glUseProgram(mProgram); // 指定使用的program
88 | GLES20.glEnable(GLES20.GL_CULL_FACE); // 启动剔除
89 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
90 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture); // 绑定纹理
91 | GLES20.glEnableVertexAttribArray(mPositionHandle);
92 | GLES20.glVertexAttribPointer(mPositionHandle, VERTEX_SIZE, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mVertexBuffer);
93 |
94 | GLES20.glEnableVertexAttribArray(mTextureHandle);
95 | if (isFrontCamera) {
96 | GLES20.glVertexAttribPointer(mTextureHandle, VERTEX_SIZE, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mFrontTextureBuffer);
97 | } else {
98 | GLES20.glVertexAttribPointer(mTextureHandle, VERTEX_SIZE, GLES20.GL_FLOAT, false, VERTEX_STRIDE, mBackTextureBuffer);
99 | }
100 | // 真正绘制的操作
101 | // GL_TRIANGLE_FAN模式,绘制 (0, 1, 2) 和 (0, 2, 3) 两个三角形
102 | // glDrawElements绘制索引,索引0为VERTEXES数组第一个点 (-1, 1),以此类推
103 | GLES20.glDrawElements(GLES20.GL_TRIANGLE_FAN, VERTEX_ORDER.length, GLES20.GL_UNSIGNED_BYTE, mDrawListBuffer);
104 |
105 | GLES20.glDisableVertexAttribArray(mPositionHandle);
106 | GLES20.glDisableVertexAttribArray(mTextureHandle);
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/glsurfaceview/CameraGLSurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.glsurfaceview;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.graphics.SurfaceTexture;
6 | import android.opengl.GLES20;
7 | import android.opengl.GLSurfaceView;
8 | import android.util.AttributeSet;
9 | import android.util.Log;
10 | import android.view.MotionEvent;
11 |
12 | import com.afei.camerademo.camera.CameraProxy;
13 |
14 | import javax.microedition.khronos.egl.EGLConfig;
15 | import javax.microedition.khronos.opengles.GL10;
16 |
17 | public class CameraGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
18 |
19 | private static final String TAG = "CameraGLSurfaceView";
20 | private CameraProxy mCameraProxy;
21 | private SurfaceTexture mSurfaceTexture;
22 | private CameraDrawer mDrawer;
23 | private int mRatioWidth = 0;
24 | private int mRatioHeight = 0;
25 | private float mOldDistance;
26 | private int mTextureId = -1;
27 |
28 | public CameraGLSurfaceView(Context context) {
29 | this(context, null);
30 | }
31 |
32 | public CameraGLSurfaceView(Context context, AttributeSet attrs) {
33 | super(context, attrs);
34 | init(context);
35 | }
36 |
37 | private void init(Context context) {
38 | mCameraProxy = new CameraProxy((Activity) context);
39 | setEGLContextClientVersion(2);
40 | setRenderer(this);
41 | setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
42 | }
43 |
44 | @Override
45 | public void onSurfaceCreated(GL10 gl, EGLConfig config) {
46 | mTextureId = OpenGLUtils.getExternalOESTextureID();
47 | mSurfaceTexture = new SurfaceTexture(mTextureId);
48 | mSurfaceTexture.setOnFrameAvailableListener(this);
49 | mCameraProxy.openCamera();
50 | mDrawer = new CameraDrawer();
51 | }
52 |
53 | @Override
54 | public void onSurfaceChanged(GL10 gl, int width, int height) {
55 | Log.d(TAG, "onSurfaceChanged. thread: " + Thread.currentThread().getName());
56 | Log.d(TAG, "onSurfaceChanged. width: " + width + ", height: " + height);
57 | int previewWidth = mCameraProxy.getPreviewWidth();
58 | int previewHeight = mCameraProxy.getPreviewHeight();
59 | if (width > height) {
60 | setAspectRatio(previewWidth, previewHeight);
61 | } else {
62 | setAspectRatio(previewHeight, previewWidth);
63 | }
64 | GLES20.glViewport(0, 0, width, height);
65 | mCameraProxy.startPreview(mSurfaceTexture);
66 | }
67 |
68 | @Override
69 | public void onDrawFrame(GL10 gl) {
70 | GLES20.glClearColor(0, 0, 0, 0);
71 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
72 | mSurfaceTexture.updateTexImage();
73 | mDrawer.draw(mTextureId, mCameraProxy.isFrontCamera());
74 | }
75 |
76 | @Override
77 | public void onFrameAvailable(SurfaceTexture surfaceTexture) {
78 | requestRender();
79 | }
80 |
81 | private void setAspectRatio(int width, int height) {
82 | if (width < 0 || height < 0) {
83 | throw new IllegalArgumentException("Size cannot be negative.");
84 | }
85 | mRatioWidth = width;
86 | mRatioHeight = height;
87 | post(new Runnable() {
88 | @Override
89 | public void run() {
90 | requestLayout(); // must run in UI thread
91 | }
92 | });
93 | }
94 |
95 | public CameraProxy getCameraProxy() {
96 | return mCameraProxy;
97 | }
98 |
99 | public SurfaceTexture getSurfaceTexture() {
100 | return mSurfaceTexture;
101 | }
102 |
103 | @Override
104 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
105 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
106 | int width = MeasureSpec.getSize(widthMeasureSpec);
107 | int height = MeasureSpec.getSize(heightMeasureSpec);
108 | if (0 == mRatioWidth || 0 == mRatioHeight) {
109 | setMeasuredDimension(width, height);
110 | } else {
111 | if (width < height * mRatioWidth / mRatioHeight) {
112 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
113 | } else {
114 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
115 | }
116 | }
117 | }
118 |
119 | @Override
120 | public boolean onTouchEvent(MotionEvent event) {
121 | if (event.getPointerCount() == 1) {
122 | // 点击聚焦
123 | mCameraProxy.focusOnPoint((int) event.getX(), (int) event.getY(), getWidth(), getHeight());
124 | return true;
125 | }
126 | switch (event.getAction() & MotionEvent.ACTION_MASK) {
127 | case MotionEvent.ACTION_POINTER_DOWN:
128 | mOldDistance = getFingerSpacing(event);
129 | break;
130 | case MotionEvent.ACTION_MOVE:
131 | float newDistance = getFingerSpacing(event);
132 | if (newDistance > mOldDistance) {
133 | mCameraProxy.handleZoom(true);
134 | } else if (newDistance < mOldDistance) {
135 | mCameraProxy.handleZoom(false);
136 | }
137 | mOldDistance = newDistance;
138 | break;
139 | default:
140 | break;
141 | }
142 | return super.onTouchEvent(event);
143 | }
144 |
145 | private static float getFingerSpacing(MotionEvent event) {
146 | float x = event.getX(0) - event.getX(1);
147 | float y = event.getY(0) - event.getY(1);
148 | return (float) Math.sqrt(x * x + y * y);
149 | }
150 |
151 | }
152 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/glsurfaceview/GLSurfaceCamera2Activity.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.glsurfaceview;
2 |
3 | import android.content.Intent;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.media.Image;
7 | import android.os.AsyncTask;
8 | import android.os.Bundle;
9 | import android.provider.MediaStore;
10 | import android.view.View;
11 | import android.widget.ImageView;
12 |
13 | import androidx.appcompat.app.AppCompatActivity;
14 |
15 | import com.afei.camerademo.ImageUtils;
16 | import com.afei.camerademo.R;
17 | import com.afei.camerademo.camera.Camera2Proxy;
18 |
19 | import java.nio.ByteBuffer;
20 |
21 | public class GLSurfaceCamera2Activity extends AppCompatActivity implements View.OnClickListener {
22 |
23 | private static final String TAG = "GLSurfaceCamera2Act";
24 |
25 | private ImageView mCloseIv;
26 | private ImageView mSwitchCameraIv;
27 | private ImageView mTakePictureIv;
28 | private ImageView mPictureIv;
29 | private Camera2GLSurfaceView mCameraView;
30 |
31 | private Camera2Proxy mCameraProxy;
32 |
33 | @Override
34 | protected void onCreate(Bundle savedInstanceState) {
35 | super.onCreate(savedInstanceState);
36 | setContentView(R.layout.activity_glsurface_camera2);
37 | initView();
38 | }
39 |
40 | private void initView() {
41 | mCloseIv = findViewById(R.id.toolbar_close_iv);
42 | mCloseIv.setOnClickListener(this);
43 | mSwitchCameraIv = findViewById(R.id.toolbar_switch_iv);
44 | mSwitchCameraIv.setOnClickListener(this);
45 | mTakePictureIv = findViewById(R.id.take_picture_iv);
46 | mTakePictureIv.setOnClickListener(this);
47 | mPictureIv = findViewById(R.id.picture_iv);
48 | mPictureIv.setOnClickListener(this);
49 | mPictureIv.setImageBitmap(ImageUtils.getLatestThumbBitmap());
50 | mCameraView = findViewById(R.id.camera_view);
51 | mCameraProxy = mCameraView.getCameraProxy();
52 | }
53 |
54 | @Override
55 | protected void onResume() {
56 | super.onResume();
57 | mCameraView.onResume();
58 | }
59 |
60 | @Override
61 | protected void onPause() {
62 | super.onPause();
63 | mCameraView.onPause();
64 | }
65 |
66 | @Override
67 | public void onClick(View v) {
68 | switch (v.getId()) {
69 | case R.id.toolbar_close_iv:
70 | finish();
71 | break;
72 | case R.id.toolbar_switch_iv:
73 | mCameraProxy.switchCamera();
74 | break;
75 | case R.id.take_picture_iv:
76 | mCameraProxy.captureStillPicture(reader -> new ImageSaveTask().execute(reader.acquireNextImage())); // 拍照
77 | break;
78 | case R.id.picture_iv:
79 | Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
80 | startActivity(intent);
81 | break;
82 | }
83 | }
84 |
85 | private class ImageSaveTask extends AsyncTask {
86 |
87 | @Override
88 | protected Bitmap doInBackground(Image... images) {
89 | ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
90 | byte[] bytes = new byte[buffer.remaining()];
91 | buffer.get(bytes);
92 | if (mCameraProxy.isFrontCamera()) {
93 | Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
94 | // 前置摄像头需要左右镜像
95 | Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
96 | ImageUtils.saveBitmap(rotateBitmap);
97 | rotateBitmap.recycle();
98 | } else {
99 | ImageUtils.saveImage(bytes);
100 | }
101 | images[0].close();
102 | return ImageUtils.getLatestThumbBitmap();
103 | }
104 |
105 | @Override
106 | protected void onPostExecute(Bitmap bitmap) {
107 | mPictureIv.setImageBitmap(bitmap);
108 | }
109 | }
110 |
111 | }
112 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/glsurfaceview/GLSurfaceCameraActivity.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.glsurfaceview;
2 |
3 | import android.content.Intent;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.hardware.Camera;
7 | import android.os.AsyncTask;
8 | import android.os.Bundle;
9 | import android.provider.MediaStore;
10 | import android.util.Log;
11 | import android.view.View;
12 | import android.widget.ImageView;
13 |
14 | import androidx.appcompat.app.AppCompatActivity;
15 |
16 | import com.afei.camerademo.ImageUtils;
17 | import com.afei.camerademo.R;
18 | import com.afei.camerademo.camera.CameraProxy;
19 |
20 | public class GLSurfaceCameraActivity extends AppCompatActivity implements View.OnClickListener {
21 |
22 | private static final String TAG = "SurfaceCameraActivity";
23 |
24 | private ImageView mCloseIv;
25 | private ImageView mSwitchCameraIv;
26 | private ImageView mTakePictureIv;
27 | private ImageView mPictureIv;
28 | private CameraGLSurfaceView mCameraView;
29 |
30 | private CameraProxy mCameraProxy;
31 |
32 | @Override
33 | protected void onCreate(Bundle savedInstanceState) {
34 | super.onCreate(savedInstanceState);
35 | setContentView(R.layout.activity_glsurface_camera);
36 | initView();
37 | }
38 |
39 | private void initView() {
40 | mCloseIv = findViewById(R.id.toolbar_close_iv);
41 | mCloseIv.setOnClickListener(this);
42 | mSwitchCameraIv = findViewById(R.id.toolbar_switch_iv);
43 | mSwitchCameraIv.setOnClickListener(this);
44 | mTakePictureIv = findViewById(R.id.take_picture_iv);
45 | mTakePictureIv.setOnClickListener(this);
46 | mPictureIv = findViewById(R.id.picture_iv);
47 | mPictureIv.setOnClickListener(this);
48 | mPictureIv.setImageBitmap(ImageUtils.getLatestThumbBitmap());
49 | mCameraView = findViewById(R.id.camera_view);
50 | mCameraProxy = mCameraView.getCameraProxy();
51 | }
52 |
53 | @Override
54 | public void onClick(View v) {
55 | switch (v.getId()) {
56 | case R.id.toolbar_close_iv:
57 | finish();
58 | break;
59 | case R.id.toolbar_switch_iv:
60 | mCameraProxy.switchCamera();
61 | mCameraProxy.startPreview(mCameraView.getSurfaceTexture());
62 | break;
63 | case R.id.take_picture_iv:
64 | mCameraProxy.takePicture(mPictureCallback);
65 | break;
66 | case R.id.picture_iv:
67 | Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
68 | startActivity(intent);
69 | break;
70 | }
71 | }
72 |
73 | private final Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
74 | @Override
75 | public void onPictureTaken(byte[] data, Camera camera) {
76 | Log.d(TAG, "onPictureTaken: callback");
77 | mCameraProxy.startPreview(mCameraView.getSurfaceTexture());// 拍照结束后继续预览
78 | new ImageSaveTask().execute(data); // 保存图片
79 | }
80 | };
81 |
82 |
83 | private class ImageSaveTask extends AsyncTask {
84 |
85 | @Override
86 | protected Bitmap doInBackground(byte[]... bytes) {
87 | long time = System.currentTimeMillis();
88 | Bitmap bitmap = BitmapFactory.decodeByteArray(bytes[0], 0, bytes[0].length);
89 | Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
90 | int rotation = mCameraProxy.getLatestRotation();
91 | time = System.currentTimeMillis();
92 | Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, rotation, mCameraProxy.isFrontCamera(), true);
93 | Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
94 | time = System.currentTimeMillis();
95 | ImageUtils.saveBitmap(rotateBitmap);
96 | Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
97 | return ImageUtils.getLatestThumbBitmap();
98 | }
99 |
100 | @Override
101 | protected void onPostExecute(Bitmap bitmap) {
102 | mPictureIv.setImageBitmap(bitmap);
103 | }
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/glsurfaceview/OpenGLUtils.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.glsurfaceview;
2 |
3 | import android.content.res.Resources;
4 | import android.opengl.GLES11Ext;
5 | import android.opengl.GLES20;
6 | import android.util.Log;
7 |
8 | import java.io.IOException;
9 | import java.io.InputStream;
10 |
11 | import javax.microedition.khronos.opengles.GL10;
12 |
13 | public class OpenGLUtils {
14 |
15 | private static final String TAG = "OpenGLUtils";
16 |
17 | public static int getExternalOESTextureID() {
18 | int[] texture = new int[1];
19 | GLES20.glGenTextures(1, texture, 0);
20 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
21 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
22 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
23 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
24 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
25 | return texture[0];
26 | }
27 |
28 | public static int loadShader(int type, String source) {
29 | // 1. create shader
30 | int shader = GLES20.glCreateShader(type);
31 | if (shader == GLES20.GL_NONE) {
32 | Log.e(TAG, "create shared failed! type: " + type);
33 | return GLES20.GL_NONE;
34 | }
35 | // 2. load shader source
36 | GLES20.glShaderSource(shader, source);
37 | // 3. compile shared source
38 | GLES20.glCompileShader(shader);
39 | // 4. check compile status
40 | int[] compiled = new int[1];
41 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
42 | if (compiled[0] == GLES20.GL_FALSE) { // compile failed
43 | Log.e(TAG, "Error compiling shader. type: " + type + ":");
44 | Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
45 | GLES20.glDeleteShader(shader); // delete shader
46 | shader = GLES20.GL_NONE;
47 | }
48 | return shader;
49 | }
50 |
51 | public static int createProgram(String vertexSource, String fragmentSource) {
52 | // 1. load shader
53 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
54 | if (vertexShader == GLES20.GL_NONE) {
55 | Log.e(TAG, "load vertex shader failed! ");
56 | return GLES20.GL_NONE;
57 | }
58 | int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
59 | if (fragmentShader == GLES20.GL_NONE) {
60 | Log.e(TAG, "load fragment shader failed! ");
61 | return GLES20.GL_NONE;
62 | }
63 | // 2. create gl program
64 | int program = GLES20.glCreateProgram();
65 | if (program == GLES20.GL_NONE) {
66 | Log.e(TAG, "create program failed! ");
67 | return GLES20.GL_NONE;
68 | }
69 | // 3. attach shader
70 | GLES20.glAttachShader(program, vertexShader);
71 | GLES20.glAttachShader(program, fragmentShader);
72 | // we can delete shader after attach
73 | GLES20.glDeleteShader(vertexShader);
74 | GLES20.glDeleteShader(fragmentShader);
75 | // 4. link program
76 | GLES20.glLinkProgram(program);
77 | // 5. check link status
78 | int[] linkStatus = new int[1];
79 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
80 | if (linkStatus[0] == GLES20.GL_FALSE) { // link failed
81 | Log.e(TAG, "Error link program: ");
82 | Log.e(TAG, GLES20.glGetProgramInfoLog(program));
83 | GLES20.glDeleteProgram(program); // delete program
84 | return GLES20.GL_NONE;
85 | }
86 | return program;
87 | }
88 |
89 | public static String loadFromAssets(String fileName, Resources resources) {
90 | String result = null;
91 | try {
92 | InputStream is = resources.getAssets().open(fileName);
93 | int length = is.available();
94 | byte[] data = new byte[length];
95 | is.read(data);
96 | is.close();
97 | result = new String(data, "UTF-8");
98 | result = result.replace("\\r\\n", "\\n");
99 | } catch (IOException e) {
100 | e.printStackTrace();
101 | }
102 | return result;
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/surfaceview/Camera2SurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.surfaceview;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.graphics.Rect;
6 | import android.util.AttributeSet;
7 | import android.util.Log;
8 | import android.util.Size;
9 | import android.view.GestureDetector;
10 | import android.view.MotionEvent;
11 | import android.view.SurfaceHolder;
12 | import android.view.SurfaceView;
13 |
14 | import com.afei.camerademo.camera.Camera2Proxy;
15 |
16 | public class Camera2SurfaceView extends SurfaceView {
17 |
18 | private static final String TAG = "Camera2SurfaceView";
19 |
20 | private Camera2Proxy mCameraProxy;
21 | private Size mPreviewSize;
22 |
23 | private GestureDetector mGestureDetector;
24 | private int mRatioWidth = 0;
25 | private int mRatioHeight = 0;
26 | private float mOldDistance;
27 |
28 | public Camera2SurfaceView(Context context) {
29 | this(context, null);
30 | }
31 |
32 | public Camera2SurfaceView(Context context, AttributeSet attrs) {
33 | this(context, attrs, 0);
34 | }
35 |
36 | public Camera2SurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
37 | this(context, attrs, defStyleAttr, 0);
38 | }
39 |
40 | public Camera2SurfaceView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
41 | super(context, attrs, defStyleAttr, defStyleRes);
42 | init(context);
43 | }
44 |
45 | private void init(Context context) {
46 | getHolder().addCallback(mSurfaceHolderCallback);
47 | mCameraProxy = new Camera2Proxy((Activity) context);
48 | mGestureDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() {
49 | @Override
50 | public boolean onSingleTapUp(MotionEvent e) { // 单击聚焦
51 | mCameraProxy.triggerFocusAtPoint(e.getX(), e.getY(), getWidth(), getHeight());
52 | return true;
53 | }
54 | });
55 | setKeepScreenOn(true); // 设置屏幕常亮
56 | }
57 |
58 | private final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
59 | @Override
60 | public void surfaceCreated(SurfaceHolder holder) {
61 | Rect rect = holder.getSurfaceFrame();
62 | mCameraProxy.setUpCameraOutputs(rect.width(), rect.height());
63 | mPreviewSize = mCameraProxy.getPreviewSize();
64 | holder.setFixedSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
65 | }
66 |
67 | @Override
68 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
69 | Log.d(TAG, "surfaceChanged: width: " + width + ", height: " + height);
70 | float ratio;
71 | if (width > height) {
72 | ratio = height * 1.0f / width;
73 | } else {
74 | ratio = width * 1.0f / height;
75 | }
76 | setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); //固定竖屏显示
77 | if (ratio == mPreviewSize.getHeight() * 1f / mPreviewSize.getWidth()) {
78 | mCameraProxy.setPreviewSurface(holder); // 等view的大小固定后再设置surface
79 | mCameraProxy.openCamera();
80 | }
81 | }
82 |
83 | @Override
84 | public void surfaceDestroyed(SurfaceHolder holder) {
85 | mCameraProxy.releaseCamera();
86 | }
87 | };
88 |
89 | private void setAspectRatio(int width, int height) {
90 | if (width < 0 || height < 0) {
91 | throw new IllegalArgumentException("Size cannot be negative.");
92 | }
93 | mRatioWidth = width;
94 | mRatioHeight = height;
95 | requestLayout();
96 | }
97 |
98 | public Camera2Proxy getCameraProxy() {
99 | return mCameraProxy;
100 | }
101 |
102 | @Override
103 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
104 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
105 | int width = MeasureSpec.getSize(widthMeasureSpec);
106 | int height = MeasureSpec.getSize(heightMeasureSpec);
107 | if (0 == mRatioWidth || 0 == mRatioHeight) {
108 | setMeasuredDimension(width, height);
109 | } else {
110 | if (width < height * mRatioWidth / mRatioHeight) {
111 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
112 | } else {
113 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
114 | }
115 | }
116 | }
117 |
118 | @Override
119 | public boolean onTouchEvent(MotionEvent event) {
120 | if (event.getPointerCount() == 1) {
121 | mCameraProxy.triggerFocusAtPoint(event.getX(), event.getY(), getWidth(), getHeight());
122 | return true;
123 | }
124 | switch (event.getAction() & MotionEvent.ACTION_MASK) {
125 | case MotionEvent.ACTION_POINTER_DOWN:
126 | mOldDistance = getFingerSpacing(event);
127 | break;
128 | case MotionEvent.ACTION_MOVE:
129 | float newDistance = getFingerSpacing(event);
130 | if (newDistance > mOldDistance) {
131 | mCameraProxy.handleZoom(true);
132 | } else if (newDistance < mOldDistance) {
133 | mCameraProxy.handleZoom(false);
134 | }
135 | mOldDistance = newDistance;
136 | break;
137 | default:
138 | break;
139 | }
140 | return super.onTouchEvent(event);
141 | }
142 |
143 | private static float getFingerSpacing(MotionEvent event) {
144 | float x = event.getX(0) - event.getX(1);
145 | float y = event.getY(0) - event.getY(1);
146 | return (float) Math.sqrt(x * x + y * y);
147 | }
148 |
149 | }
150 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/surfaceview/CameraSurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.surfaceview;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.util.AttributeSet;
6 | import android.view.MotionEvent;
7 | import android.view.SurfaceHolder;
8 | import android.view.SurfaceView;
9 |
10 | import com.afei.camerademo.camera.CameraProxy;
11 |
12 | public class CameraSurfaceView extends SurfaceView {
13 |
14 | private CameraProxy mCameraProxy;
15 | private int mRatioWidth = 0;
16 | private int mRatioHeight = 0;
17 | private float mOldDistance;
18 |
19 | public CameraSurfaceView(Context context) {
20 | this(context, null);
21 | }
22 |
23 | public CameraSurfaceView(Context context, AttributeSet attrs) {
24 | this(context, attrs, 0);
25 | }
26 |
27 | public CameraSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
28 | this(context, attrs, defStyleAttr, 0);
29 | }
30 |
31 | public CameraSurfaceView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
32 | super(context, attrs, defStyleAttr, defStyleRes);
33 | init(context);
34 | }
35 |
36 | private void init(Context context) {
37 | getHolder().addCallback(mSurfaceHolderCallback);
38 | mCameraProxy = new CameraProxy((Activity) context);
39 | }
40 |
41 | private final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
42 | @Override
43 | public void surfaceCreated(SurfaceHolder holder) {
44 | mCameraProxy.openCamera();
45 | }
46 |
47 | @Override
48 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
49 | int previewWidth = mCameraProxy.getPreviewWidth();
50 | int previewHeight = mCameraProxy.getPreviewHeight();
51 | if (width > height) {
52 | setAspectRatio(previewWidth, previewHeight);
53 | } else {
54 | setAspectRatio(previewHeight, previewWidth);
55 | }
56 | mCameraProxy.startPreview(holder);
57 | }
58 |
59 | @Override
60 | public void surfaceDestroyed(SurfaceHolder holder) {
61 | mCameraProxy.releaseCamera();
62 | }
63 | };
64 |
65 | private void setAspectRatio(int width, int height) {
66 | if (width < 0 || height < 0) {
67 | throw new IllegalArgumentException("Size cannot be negative.");
68 | }
69 | mRatioWidth = width;
70 | mRatioHeight = height;
71 | requestLayout();
72 | }
73 |
74 | public CameraProxy getCameraProxy() {
75 | return mCameraProxy;
76 | }
77 |
78 | @Override
79 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
80 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
81 | int width = MeasureSpec.getSize(widthMeasureSpec);
82 | int height = MeasureSpec.getSize(heightMeasureSpec);
83 | if (0 == mRatioWidth || 0 == mRatioHeight) {
84 | setMeasuredDimension(width, height);
85 | } else {
86 | if (width < height * mRatioWidth / mRatioHeight) {
87 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
88 | } else {
89 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
90 | }
91 | }
92 | }
93 |
94 | @Override
95 | public boolean onTouchEvent(MotionEvent event) {
96 | if (event.getPointerCount() == 1) {
97 | // 点击聚焦
98 | mCameraProxy.focusOnPoint((int) event.getX(), (int) event.getY(), getWidth(), getHeight());
99 | return true;
100 | }
101 | switch (event.getAction() & MotionEvent.ACTION_MASK) {
102 | case MotionEvent.ACTION_POINTER_DOWN:
103 | mOldDistance = getFingerSpacing(event);
104 | break;
105 | case MotionEvent.ACTION_MOVE:
106 | float newDistance = getFingerSpacing(event);
107 | if (newDistance > mOldDistance) {
108 | mCameraProxy.handleZoom(true);
109 | } else if (newDistance < mOldDistance) {
110 | mCameraProxy.handleZoom(false);
111 | }
112 | mOldDistance = newDistance;
113 | break;
114 | default:
115 | break;
116 | }
117 | return super.onTouchEvent(event);
118 | }
119 |
120 | private static float getFingerSpacing(MotionEvent event) {
121 | float x = event.getX(0) - event.getX(1);
122 | float y = event.getY(0) - event.getY(1);
123 | return (float) Math.sqrt(x * x + y * y);
124 | }
125 |
126 | }
127 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/surfaceview/SurfaceCamera2Activity.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.surfaceview;
2 |
3 | import android.content.Intent;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.media.Image;
7 | import android.os.AsyncTask;
8 | import android.os.Bundle;
9 | import android.provider.MediaStore;
10 | import android.view.View;
11 | import android.widget.ImageView;
12 |
13 | import androidx.appcompat.app.AppCompatActivity;
14 |
15 | import com.afei.camerademo.ImageUtils;
16 | import com.afei.camerademo.R;
17 | import com.afei.camerademo.camera.Camera2Proxy;
18 |
19 | import java.nio.ByteBuffer;
20 |
21 | public class SurfaceCamera2Activity extends AppCompatActivity implements View.OnClickListener {
22 |
23 | private static final String TAG = "SurfaceCamera2Activity";
24 |
25 | private ImageView mCloseIv;
26 | private ImageView mSwitchCameraIv;
27 | private ImageView mTakePictureIv;
28 | private ImageView mPictureIv;
29 | private Camera2SurfaceView mCameraView;
30 |
31 | private Camera2Proxy mCameraProxy;
32 |
33 | @Override
34 | protected void onCreate(Bundle savedInstanceState) {
35 | super.onCreate(savedInstanceState);
36 | setContentView(R.layout.activity_surface_camera2);
37 | initView();
38 | }
39 |
40 | private void initView() {
41 | mCloseIv = findViewById(R.id.toolbar_close_iv);
42 | mCloseIv.setOnClickListener(this);
43 | mSwitchCameraIv = findViewById(R.id.toolbar_switch_iv);
44 | mSwitchCameraIv.setOnClickListener(this);
45 | mTakePictureIv = findViewById(R.id.take_picture_iv);
46 | mTakePictureIv.setOnClickListener(this);
47 | mPictureIv = findViewById(R.id.picture_iv);
48 | mPictureIv.setOnClickListener(this);
49 | mPictureIv.setImageBitmap(ImageUtils.getLatestThumbBitmap());
50 | mCameraView = findViewById(R.id.camera_view);
51 | mCameraProxy = mCameraView.getCameraProxy();
52 | }
53 |
54 | @Override
55 | public void onClick(View v) {
56 | switch (v.getId()) {
57 | case R.id.toolbar_close_iv:
58 | finish();
59 | break;
60 | case R.id.toolbar_switch_iv:
61 | mCameraProxy.switchCamera();
62 | break;
63 | case R.id.take_picture_iv:
64 | mCameraProxy.captureStillPicture(reader -> new ImageSaveTask().execute(reader.acquireNextImage()));
65 | break;
66 | case R.id.picture_iv:
67 | Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
68 | startActivity(intent);
69 | break;
70 | }
71 | }
72 |
73 | private class ImageSaveTask extends AsyncTask {
74 |
75 | @Override
76 | protected Bitmap doInBackground(Image ... images) {
77 | ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
78 | byte[] bytes = new byte[buffer.remaining()];
79 | buffer.get(bytes);
80 | if (mCameraProxy.isFrontCamera()) {
81 | Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
82 | // 前置摄像头需要左右镜像
83 | Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
84 | ImageUtils.saveBitmap(rotateBitmap);
85 | rotateBitmap.recycle();
86 | } else {
87 | ImageUtils.saveImage(bytes);
88 | }
89 | images[0].close();
90 | return ImageUtils.getLatestThumbBitmap();
91 | }
92 |
93 | @Override
94 | protected void onPostExecute(Bitmap bitmap) {
95 | mPictureIv.setImageBitmap(bitmap);
96 | }
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/surfaceview/SurfaceCameraActivity.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.surfaceview;
2 |
3 | import android.content.Intent;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.hardware.Camera;
7 | import android.os.AsyncTask;
8 | import android.os.Bundle;
9 | import android.provider.MediaStore;
10 | import android.util.Log;
11 | import android.view.View;
12 | import android.widget.ImageView;
13 |
14 | import androidx.appcompat.app.AppCompatActivity;
15 |
16 | import com.afei.camerademo.ImageUtils;
17 | import com.afei.camerademo.R;
18 | import com.afei.camerademo.camera.CameraProxy;
19 |
20 | public class SurfaceCameraActivity extends AppCompatActivity implements View.OnClickListener {
21 |
22 | private static final String TAG = "SurfaceCameraActivity";
23 |
24 | private ImageView mCloseIv;
25 | private ImageView mSwitchCameraIv;
26 | private ImageView mTakePictureIv;
27 | private ImageView mPictureIv;
28 | private CameraSurfaceView mCameraView;
29 |
30 | private CameraProxy mCameraProxy;
31 |
32 | @Override
33 | protected void onCreate(Bundle savedInstanceState) {
34 | super.onCreate(savedInstanceState);
35 | setContentView(R.layout.activity_surface_camera);
36 | initView();
37 | }
38 |
39 | private void initView() {
40 | mCloseIv = findViewById(R.id.toolbar_close_iv);
41 | mCloseIv.setOnClickListener(this);
42 | mSwitchCameraIv = findViewById(R.id.toolbar_switch_iv);
43 | mSwitchCameraIv.setOnClickListener(this);
44 | mTakePictureIv = findViewById(R.id.take_picture_iv);
45 | mTakePictureIv.setOnClickListener(this);
46 | mPictureIv = findViewById(R.id.picture_iv);
47 | mPictureIv.setOnClickListener(this);
48 | mPictureIv.setImageBitmap(ImageUtils.getLatestThumbBitmap());
49 | mCameraView = findViewById(R.id.camera_view);
50 | mCameraProxy = mCameraView.getCameraProxy();
51 | }
52 |
53 | @Override
54 | public void onClick(View v) {
55 | switch (v.getId()) {
56 | case R.id.toolbar_close_iv:
57 | finish();
58 | break;
59 | case R.id.toolbar_switch_iv:
60 | mCameraProxy.switchCamera();
61 | mCameraProxy.startPreview(mCameraView.getHolder());
62 | break;
63 | case R.id.take_picture_iv:
64 | mCameraProxy.takePicture(mPictureCallback);
65 | break;
66 | case R.id.picture_iv:
67 | Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
68 | startActivity(intent);
69 | break;
70 | }
71 | }
72 |
73 | private final Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
74 | @Override
75 | public void onPictureTaken(byte[] data, Camera camera) {
76 | mCameraProxy.startPreview(mCameraView.getHolder()); // 拍照结束后继续预览
77 | new ImageSaveTask().execute(data); // 保存图片
78 | }
79 | };
80 |
81 | private class ImageSaveTask extends AsyncTask {
82 |
83 | @Override
84 | protected Bitmap doInBackground(byte[]... bytes) {
85 | long time = System.currentTimeMillis();
86 | Bitmap bitmap = BitmapFactory.decodeByteArray(bytes[0], 0, bytes[0].length);
87 | Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
88 | int rotation = mCameraProxy.getLatestRotation();
89 | time = System.currentTimeMillis();
90 | Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, rotation, mCameraProxy.isFrontCamera(), true);
91 | Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
92 | time = System.currentTimeMillis();
93 | ImageUtils.saveBitmap(rotateBitmap);
94 | Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
95 | rotateBitmap.recycle();
96 | return ImageUtils.getLatestThumbBitmap();
97 | }
98 |
99 | @Override
100 | protected void onPostExecute(Bitmap bitmap) {
101 | mPictureIv.setImageBitmap(bitmap);
102 | }
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/textureview/Camera2TextureView.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.textureview;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.graphics.SurfaceTexture;
6 | import android.util.AttributeSet;
7 | import android.util.Log;
8 | import android.view.MotionEvent;
9 | import android.view.TextureView;
10 |
11 | import com.afei.camerademo.camera.Camera2Proxy;
12 |
13 | public class Camera2TextureView extends TextureView {
14 |
15 | private static final String TAG = "CameraTextureView";
16 | private Camera2Proxy mCameraProxy;
17 | private int mRatioWidth = 0;
18 | private int mRatioHeight = 0;
19 | private float mOldDistance;
20 |
21 | public Camera2TextureView(Context context) {
22 | this(context, null);
23 | }
24 |
25 | public Camera2TextureView(Context context, AttributeSet attrs) {
26 | this(context, attrs, 0);
27 | }
28 |
29 | public Camera2TextureView(Context context, AttributeSet attrs, int defStyleAttr) {
30 | this(context, attrs, defStyleAttr, 0);
31 | }
32 |
33 | public Camera2TextureView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
34 | super(context, attrs, defStyleAttr, defStyleRes);
35 | init(context);
36 | }
37 |
38 | private void init(Context context) {
39 | setSurfaceTextureListener(mSurfaceTextureListener);
40 | mCameraProxy = new Camera2Proxy((Activity) context);
41 | }
42 |
43 | private SurfaceTextureListener mSurfaceTextureListener = new SurfaceTextureListener() {
44 | @Override
45 | public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
46 | Log.v(TAG, "onSurfaceTextureAvailable. width: " + width + ", height: " + height);
47 | mCameraProxy.setUpCameraOutputs(width, height);
48 | mCameraProxy.openCamera();
49 | mCameraProxy.setPreviewSurface(surface);
50 | // resize TextureView
51 | int previewWidth = mCameraProxy.getPreviewSize().getWidth();
52 | int previewHeight = mCameraProxy.getPreviewSize().getHeight();
53 | if (width > height) {
54 | setAspectRatio(previewWidth, previewHeight);
55 | } else {
56 | setAspectRatio(previewHeight, previewWidth);
57 | }
58 | }
59 |
60 | @Override
61 | public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
62 | Log.v(TAG, "onSurfaceTextureSizeChanged. width: " + width + ", height: " + height);
63 | }
64 |
65 | @Override
66 | public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
67 | Log.v(TAG, "onSurfaceTextureDestroyed");
68 | mCameraProxy.releaseCamera();
69 | return false;
70 | }
71 |
72 | @Override
73 | public void onSurfaceTextureUpdated(SurfaceTexture surface) {
74 | }
75 | };
76 |
77 | private void setAspectRatio(int width, int height) {
78 | if (width < 0 || height < 0) {
79 | throw new IllegalArgumentException("Size cannot be negative.");
80 | }
81 | mRatioWidth = width;
82 | mRatioHeight = height;
83 | requestLayout();
84 | }
85 |
86 | public Camera2Proxy getCameraProxy() {
87 | return mCameraProxy;
88 | }
89 |
90 | @Override
91 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
92 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
93 | int width = MeasureSpec.getSize(widthMeasureSpec);
94 | int height = MeasureSpec.getSize(heightMeasureSpec);
95 | if (0 == mRatioWidth || 0 == mRatioHeight) {
96 | setMeasuredDimension(width, height);
97 | } else {
98 | if (width < height * mRatioWidth / mRatioHeight) {
99 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
100 | } else {
101 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
102 | }
103 | }
104 | }
105 |
106 | @Override
107 | public boolean onTouchEvent(MotionEvent event) {
108 | if (event.getPointerCount() == 1) {
109 | mCameraProxy.triggerFocusAtPoint((int) event.getX(), (int) event.getY(), getWidth(), getHeight());
110 | return true;
111 | }
112 | switch (event.getAction() & MotionEvent.ACTION_MASK) {
113 | case MotionEvent.ACTION_POINTER_DOWN:
114 | mOldDistance = getFingerSpacing(event);
115 | break;
116 | case MotionEvent.ACTION_MOVE:
117 | float newDistance = getFingerSpacing(event);
118 | if (newDistance > mOldDistance) {
119 | mCameraProxy.handleZoom(true);
120 | } else if (newDistance < mOldDistance) {
121 | mCameraProxy.handleZoom(false);
122 | }
123 | mOldDistance = newDistance;
124 | break;
125 | default:
126 | break;
127 | }
128 | return super.onTouchEvent(event);
129 | }
130 |
131 | private static float getFingerSpacing(MotionEvent event) {
132 | float x = event.getX(0) - event.getX(1);
133 | float y = event.getY(0) - event.getY(1);
134 | return (float) Math.sqrt(x * x + y * y);
135 | }
136 |
137 | }
138 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/textureview/CameraTextureView.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.textureview;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.graphics.SurfaceTexture;
6 | import android.util.AttributeSet;
7 | import android.util.Log;
8 | import android.view.MotionEvent;
9 | import android.view.TextureView;
10 |
11 | import com.afei.camerademo.camera.CameraProxy;
12 |
13 | public class CameraTextureView extends TextureView {
14 |
15 | private static final String TAG = "CameraTextureView";
16 | private CameraProxy mCameraProxy;
17 | private int mRatioWidth = 0;
18 | private int mRatioHeight = 0;
19 | private float mOldDistance;
20 |
21 | public CameraTextureView(Context context) {
22 | this(context, null);
23 | }
24 |
25 | public CameraTextureView(Context context, AttributeSet attrs) {
26 | this(context, attrs, 0);
27 | }
28 |
29 | public CameraTextureView(Context context, AttributeSet attrs, int defStyleAttr) {
30 | this(context, attrs, defStyleAttr, 0);
31 | }
32 |
33 | public CameraTextureView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
34 | super(context, attrs, defStyleAttr, defStyleRes);
35 | init(context);
36 | }
37 |
38 | private void init(Context context) {
39 | setSurfaceTextureListener(mSurfaceTextureListener);
40 | mCameraProxy = new CameraProxy((Activity) context);
41 | }
42 |
43 | private SurfaceTextureListener mSurfaceTextureListener = new SurfaceTextureListener() {
44 | @Override
45 | public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
46 | Log.v(TAG, "onSurfaceTextureAvailable. width: " + width + ", height: " + height);
47 | mCameraProxy.openCamera();
48 | int previewWidth = mCameraProxy.getPreviewWidth();
49 | int previewHeight = mCameraProxy.getPreviewHeight();
50 | if (width > height) {
51 | setAspectRatio(previewWidth, previewHeight);
52 | } else {
53 | setAspectRatio(previewHeight, previewWidth);
54 | }
55 | mCameraProxy.startPreview(surface);
56 | }
57 |
58 | @Override
59 | public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
60 | Log.v(TAG, "onSurfaceTextureSizeChanged. width: " + width + ", height: " + height);
61 | }
62 |
63 | @Override
64 | public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
65 | Log.v(TAG, "onSurfaceTextureDestroyed");
66 | mCameraProxy.releaseCamera();
67 | return false;
68 | }
69 |
70 | @Override
71 | public void onSurfaceTextureUpdated(SurfaceTexture surface) {
72 | }
73 | };
74 |
75 | private void setAspectRatio(int width, int height) {
76 | if (width < 0 || height < 0) {
77 | throw new IllegalArgumentException("Size cannot be negative.");
78 | }
79 | mRatioWidth = width;
80 | mRatioHeight = height;
81 | requestLayout();
82 | }
83 |
84 | public CameraProxy getCameraProxy() {
85 | return mCameraProxy;
86 | }
87 |
88 | @Override
89 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
90 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
91 | int width = MeasureSpec.getSize(widthMeasureSpec);
92 | int height = MeasureSpec.getSize(heightMeasureSpec);
93 | if (0 == mRatioWidth || 0 == mRatioHeight) {
94 | setMeasuredDimension(width, height);
95 | } else {
96 | if (width < height * mRatioWidth / mRatioHeight) {
97 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
98 | } else {
99 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
100 | }
101 | }
102 | }
103 |
104 | @Override
105 | public boolean onTouchEvent(MotionEvent event) {
106 | if (event.getPointerCount() == 1) {
107 | // 点击聚焦
108 | mCameraProxy.focusOnPoint((int) event.getX(), (int) event.getY(), getWidth(), getHeight());
109 | return true;
110 | }
111 | switch (event.getAction() & MotionEvent.ACTION_MASK) {
112 | case MotionEvent.ACTION_POINTER_DOWN:
113 | mOldDistance = getFingerSpacing(event);
114 | break;
115 | case MotionEvent.ACTION_MOVE:
116 | float newDistance = getFingerSpacing(event);
117 | if (newDistance > mOldDistance) {
118 | mCameraProxy.handleZoom(true);
119 | } else if (newDistance < mOldDistance) {
120 | mCameraProxy.handleZoom(false);
121 | }
122 | mOldDistance = newDistance;
123 | break;
124 | default:
125 | break;
126 | }
127 | return super.onTouchEvent(event);
128 | }
129 |
130 | private static float getFingerSpacing(MotionEvent event) {
131 | float x = event.getX(0) - event.getX(1);
132 | float y = event.getY(0) - event.getY(1);
133 | return (float) Math.sqrt(x * x + y * y);
134 | }
135 |
136 | }
137 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/textureview/TextureCamera2Activity.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.textureview;
2 |
3 | import android.content.Intent;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.media.Image;
7 | import android.os.AsyncTask;
8 | import android.os.Bundle;
9 | import android.provider.MediaStore;
10 | import android.view.View;
11 | import android.widget.ImageView;
12 |
13 | import androidx.appcompat.app.AppCompatActivity;
14 |
15 | import com.afei.camerademo.ImageUtils;
16 | import com.afei.camerademo.R;
17 | import com.afei.camerademo.camera.Camera2Proxy;
18 |
19 | import java.nio.ByteBuffer;
20 |
21 | public class TextureCamera2Activity extends AppCompatActivity implements View.OnClickListener {
22 |
23 | private static final String TAG = "TextureCameraActivity";
24 |
25 | private ImageView mCloseIv;
26 | private ImageView mSwitchCameraIv;
27 | private ImageView mTakePictureIv;
28 | private ImageView mPictureIv;
29 | private Camera2TextureView mCameraView;
30 |
31 | private Camera2Proxy mCameraProxy;
32 |
33 | @Override
34 | protected void onCreate(Bundle savedInstanceState) {
35 | super.onCreate(savedInstanceState);
36 | setContentView(R.layout.activity_texture_camera2);
37 | initView();
38 | }
39 |
40 | private void initView() {
41 | mCloseIv = findViewById(R.id.toolbar_close_iv);
42 | mCloseIv.setOnClickListener(this);
43 | mSwitchCameraIv = findViewById(R.id.toolbar_switch_iv);
44 | mSwitchCameraIv.setOnClickListener(this);
45 | mTakePictureIv = findViewById(R.id.take_picture_iv);
46 | mTakePictureIv.setOnClickListener(this);
47 | mPictureIv = findViewById(R.id.picture_iv);
48 | mPictureIv.setOnClickListener(this);
49 | mPictureIv.setImageBitmap(ImageUtils.getLatestThumbBitmap());
50 | mCameraView = findViewById(R.id.camera_view);
51 | mCameraProxy = mCameraView.getCameraProxy();
52 | }
53 |
54 | @Override
55 | public void onClick(View v) {
56 | switch (v.getId()) {
57 | case R.id.toolbar_close_iv:
58 | finish();
59 | break;
60 | case R.id.toolbar_switch_iv:
61 | mCameraProxy.switchCamera();
62 | break;
63 | case R.id.take_picture_iv:
64 | mCameraProxy.captureStillPicture(reader -> new ImageSaveTask().execute(reader.acquireNextImage())); // 拍照
65 | break;
66 | case R.id.picture_iv:
67 | Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
68 | startActivity(intent);
69 | break;
70 | }
71 | }
72 |
73 | private class ImageSaveTask extends AsyncTask {
74 |
75 | @Override
76 | protected Bitmap doInBackground(Image ... images) {
77 | ByteBuffer buffer = images[0].getPlanes()[0].getBuffer();
78 | byte[] bytes = new byte[buffer.remaining()];
79 | buffer.get(bytes);
80 | if (mCameraProxy.isFrontCamera()) {
81 | Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
82 | // 前置摄像头需要左右镜像
83 | Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, 0, true, true);
84 | ImageUtils.saveBitmap(rotateBitmap);
85 | rotateBitmap.recycle();
86 | } else {
87 | ImageUtils.saveImage(bytes);
88 | }
89 | images[0].close();
90 | return ImageUtils.getLatestThumbBitmap();
91 | }
92 |
93 | @Override
94 | protected void onPostExecute(Bitmap bitmap) {
95 | mPictureIv.setImageBitmap(bitmap);
96 | }
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/app/src/main/java/com/afei/camerademo/textureview/TextureCameraActivity.java:
--------------------------------------------------------------------------------
1 | package com.afei.camerademo.textureview;
2 |
3 | import android.content.Intent;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.hardware.Camera;
7 | import android.os.AsyncTask;
8 | import android.os.Bundle;
9 | import android.provider.MediaStore;
10 | import android.util.Log;
11 | import android.view.View;
12 | import android.widget.ImageView;
13 |
14 | import androidx.appcompat.app.AppCompatActivity;
15 |
16 | import com.afei.camerademo.ImageUtils;
17 | import com.afei.camerademo.R;
18 | import com.afei.camerademo.camera.CameraProxy;
19 |
20 | public class TextureCameraActivity extends AppCompatActivity implements View.OnClickListener {
21 |
22 | private static final String TAG = "TextureCameraActivity";
23 |
24 | private ImageView mCloseIv;
25 | private ImageView mSwitchCameraIv;
26 | private ImageView mTakePictureIv;
27 | private ImageView mPictureIv;
28 | private CameraTextureView mCameraView;
29 |
30 | private CameraProxy mCameraProxy;
31 |
32 | @Override
33 | protected void onCreate(Bundle savedInstanceState) {
34 | super.onCreate(savedInstanceState);
35 | setContentView(R.layout.activity_texture_camera);
36 | initView();
37 | }
38 |
39 | private void initView() {
40 | mCloseIv = findViewById(R.id.toolbar_close_iv);
41 | mCloseIv.setOnClickListener(this);
42 | mSwitchCameraIv = findViewById(R.id.toolbar_switch_iv);
43 | mSwitchCameraIv.setOnClickListener(this);
44 | mTakePictureIv = findViewById(R.id.take_picture_iv);
45 | mTakePictureIv.setOnClickListener(this);
46 | mPictureIv = findViewById(R.id.picture_iv);
47 | mPictureIv.setOnClickListener(this);
48 | mPictureIv.setImageBitmap(ImageUtils.getLatestThumbBitmap());
49 | mCameraView = findViewById(R.id.camera_view);
50 | mCameraProxy = mCameraView.getCameraProxy();
51 | }
52 |
53 | @Override
54 | public void onClick(View v) {
55 | switch (v.getId()) {
56 | case R.id.toolbar_close_iv:
57 | finish();
58 | break;
59 | case R.id.toolbar_switch_iv:
60 | mCameraProxy.switchCamera();
61 | mCameraProxy.startPreview(mCameraView.getSurfaceTexture());
62 | break;
63 | case R.id.take_picture_iv:
64 | mCameraProxy.takePicture(mPictureCallback);
65 | break;
66 | case R.id.picture_iv:
67 | Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
68 | startActivity(intent);
69 | break;
70 | }
71 | }
72 |
73 | private final Camera.PictureCallback mPictureCallback = new Camera.PictureCallback() {
74 | @Override
75 | public void onPictureTaken(byte[] data, Camera camera) {
76 | mCameraProxy.startPreview(mCameraView.getSurfaceTexture()); // 拍照结束后继续预览
77 | new ImageSaveTask().execute(data); // 保存图片
78 | }
79 | };
80 |
81 | private class ImageSaveTask extends AsyncTask {
82 |
83 | @Override
84 | protected Bitmap doInBackground(byte[]... bytes) {
85 | long time = System.currentTimeMillis();
86 | Bitmap bitmap = BitmapFactory.decodeByteArray(bytes[0], 0, bytes[0].length);
87 | Log.d(TAG, "BitmapFactory.decodeByteArray time: " + (System.currentTimeMillis() - time));
88 | int rotation = mCameraProxy.getLatestRotation();
89 | time = System.currentTimeMillis();
90 | Bitmap rotateBitmap = ImageUtils.rotateBitmap(bitmap, rotation, mCameraProxy.isFrontCamera(), true);
91 | Log.d(TAG, "rotateBitmap time: " + (System.currentTimeMillis() - time));
92 | time = System.currentTimeMillis();
93 | ImageUtils.saveBitmap(rotateBitmap);
94 | Log.d(TAG, "saveBitmap time: " + (System.currentTimeMillis() - time));
95 | return ImageUtils.getLatestThumbBitmap();
96 | }
97 |
98 | @Override
99 | protected void onPostExecute(Bitmap bitmap) {
100 | mPictureIv.setImageBitmap(bitmap);
101 | }
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/click_button_selector.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_glsurface_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
14 |
15 |
20 |
21 |
28 |
29 |
33 |
34 |
41 |
42 |
43 |
44 |
49 |
50 |
51 |
59 |
60 |
68 |
69 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_glsurface_camera2.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
14 |
15 |
20 |
21 |
28 |
29 |
33 |
34 |
41 |
42 |
43 |
44 |
49 |
50 |
51 |
59 |
60 |
68 |
69 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
15 |
16 |
17 |
24 |
25 |
32 |
33 |
40 |
41 |
48 |
49 |
56 |
57 |
64 |
65 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_surface_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
14 |
15 |
20 |
21 |
28 |
29 |
33 |
34 |
41 |
42 |
43 |
44 |
49 |
50 |
51 |
59 |
60 |
68 |
69 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_surface_camera2.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
14 |
15 |
20 |
21 |
28 |
29 |
33 |
34 |
41 |
42 |
43 |
44 |
49 |
50 |
51 |
59 |
60 |
68 |
69 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_texture_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
14 |
15 |
20 |
21 |
28 |
29 |
33 |
34 |
41 |
42 |
43 |
44 |
49 |
50 |
51 |
59 |
60 |
68 |
69 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_texture_camera2.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
14 |
15 |
20 |
21 |
28 |
29 |
33 |
34 |
41 |
42 |
43 |
44 |
49 |
50 |
51 |
59 |
60 |
68 |
69 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_camera_switch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/app/src/main/res/mipmap-xxhdpi/ic_camera_switch.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/app/src/main/res/mipmap-xxhdpi/ic_close.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/icon_button_click.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/app/src/main/res/mipmap-xxhdpi/icon_button_click.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/icon_button_click_down.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/app/src/main/res/mipmap-xxhdpi/icon_button_click_down.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #000000
4 | #000000
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | CameraDemo
3 |
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 | plugins {
3 | id 'com.android.application' version '7.4.1' apply false
4 | id 'com.android.library' version '7.4.1' apply false
5 | }
6 |
7 | task clean(type: Delete) {
8 | delete rootProject.buildDir
9 | }
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app"s APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Enables namespacing of each library's R class so that its R class includes only the
19 | # resources declared in the library itself and none from the library's dependencies,
20 | # thereby reducing the size of the R class for that library
21 | android.nonTransitiveRClass=true
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afei-cn/CameraDemo/02fdaeae8e75ae9bcd1f9361056cb9a3268eb79b/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Jul 03 17:36:08 CST 2023
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-bin.zip
5 | zipStoreBase=GRADLE_USER_HOME
6 | zipStorePath=wrapper/dists
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 |
86 | # Determine the Java command to use to start the JVM.
87 | if [ -n "$JAVA_HOME" ] ; then
88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
89 | # IBM's JDK on AIX uses strange locations for the executables
90 | JAVACMD="$JAVA_HOME/jre/sh/java"
91 | else
92 | JAVACMD="$JAVA_HOME/bin/java"
93 | fi
94 | if [ ! -x "$JAVACMD" ] ; then
95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
96 |
97 | Please set the JAVA_HOME variable in your environment to match the
98 | location of your Java installation."
99 | fi
100 | else
101 | JAVACMD="java"
102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
103 |
104 | Please set the JAVA_HOME variable in your environment to match the
105 | location of your Java installation."
106 | fi
107 |
108 | # Increase the maximum file descriptors if we can.
109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
110 | MAX_FD_LIMIT=`ulimit -H -n`
111 | if [ $? -eq 0 ] ; then
112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
113 | MAX_FD="$MAX_FD_LIMIT"
114 | fi
115 | ulimit -n $MAX_FD
116 | if [ $? -ne 0 ] ; then
117 | warn "Could not set maximum file descriptor limit: $MAX_FD"
118 | fi
119 | else
120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
121 | fi
122 | fi
123 |
124 | # For Darwin, add options to specify how the application appears in the dock
125 | if $darwin; then
126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
127 | fi
128 |
129 | # For Cygwin or MSYS, switch paths to Windows format before running java
130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
133 |
134 | JAVACMD=`cygpath --unix "$JAVACMD"`
135 |
136 | # We build the pattern for arguments to be converted via cygpath
137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
138 | SEP=""
139 | for dir in $ROOTDIRSRAW ; do
140 | ROOTDIRS="$ROOTDIRS$SEP$dir"
141 | SEP="|"
142 | done
143 | OURCYGPATTERN="(^($ROOTDIRS))"
144 | # Add a user-defined pattern to the cygpath arguments
145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
147 | fi
148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
149 | i=0
150 | for arg in "$@" ; do
151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
153 |
154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
156 | else
157 | eval `echo args$i`="\"$arg\""
158 | fi
159 | i=`expr $i + 1`
160 | done
161 | case $i in
162 | 0) set -- ;;
163 | 1) set -- "$args0" ;;
164 | 2) set -- "$args0" "$args1" ;;
165 | 3) set -- "$args0" "$args1" "$args2" ;;
166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
172 | esac
173 | fi
174 |
175 | # Escape application args
176 | save () {
177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
178 | echo " "
179 | }
180 | APP_ARGS=`save "$@"`
181 |
182 | # Collect all arguments for the java command, following the shell quoting and substitution rules
183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
184 |
185 | exec "$JAVACMD" "$@"
186 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | pluginManagement {
2 | repositories {
3 | gradlePluginPortal()
4 | google()
5 | mavenCentral()
6 | }
7 | }
8 | dependencyResolutionManagement {
9 | repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
10 | repositories {
11 | google()
12 | mavenCentral()
13 | }
14 | }
15 | rootProject.name = "CameraDemo"
16 |
17 | include ':app'
18 |
--------------------------------------------------------------------------------