├── lib-camera
├── .gitignore
├── consumer-rules.pro
├── libs
│ ├── arm64-v8a
│ │ └── libyuv.so
│ └── armeabi-v7a
│ │ └── libyuv.so
├── src
│ └── main
│ │ ├── java
│ │ └── com
│ │ │ └── android
│ │ │ └── xz
│ │ │ ├── camera
│ │ │ ├── YUVFormat.java
│ │ │ ├── callback
│ │ │ │ ├── PictureBufferCallback.java
│ │ │ │ ├── PreviewBufferCallback.java
│ │ │ │ └── CameraCallback.java
│ │ │ ├── view
│ │ │ │ ├── base
│ │ │ │ │ ├── BaseCameraView.java
│ │ │ │ │ ├── BaseSurfaceView.java
│ │ │ │ │ ├── RenderHandler.java
│ │ │ │ │ └── BaseTextureView.java
│ │ │ │ ├── CameraGLSurfaceView.java
│ │ │ │ ├── Camera2GLSurfaceView.java
│ │ │ │ ├── CameraGLESSurfaceView.java
│ │ │ │ ├── Camera2GLESSurfaceView.java
│ │ │ │ ├── CameraTextureView.java
│ │ │ │ ├── Camera2TextureView.java
│ │ │ │ ├── CameraGLTextureView.java
│ │ │ │ ├── Camera2GLTextureView.java
│ │ │ │ ├── CameraSurfaceView.java
│ │ │ │ └── Camera2SurfaceView.java
│ │ │ └── ICameraManager.java
│ │ │ ├── encoder
│ │ │ ├── MediaRecordListener.java
│ │ │ ├── IAudioEncoder.java
│ │ │ ├── IVideoEncoder.java
│ │ │ ├── TextureEncoder.java
│ │ │ ├── BufferMovieEncoder.java
│ │ │ ├── MediaMuxerWrapper.java
│ │ │ └── MediaSurfaceEncoder.java
│ │ │ ├── gles
│ │ │ ├── filiter
│ │ │ │ ├── AFilter.java
│ │ │ │ └── Texture2DFilter.java
│ │ │ ├── WindowSurface.java
│ │ │ ├── MatrixUtils.java
│ │ │ └── EglSurfaceBase.java
│ │ │ ├── permission
│ │ │ └── IPermissionsResult.java
│ │ │ └── util
│ │ │ └── YUVUtils.java
│ │ └── AndroidManifest.xml
├── proguard-rules.pro
└── build.gradle
├── app
├── .gitignore
├── src
│ └── main
│ │ ├── res
│ │ ├── values
│ │ │ ├── strings.xml
│ │ │ ├── colors.xml
│ │ │ └── themes.xml
│ │ ├── mipmap-hdpi
│ │ │ ├── ic_launcher.webp
│ │ │ └── ic_launcher_round.webp
│ │ ├── mipmap-mdpi
│ │ │ ├── ic_launcher.webp
│ │ │ └── ic_launcher_round.webp
│ │ ├── mipmap-xhdpi
│ │ │ ├── ic_launcher.webp
│ │ │ └── ic_launcher_round.webp
│ │ ├── mipmap-xxhdpi
│ │ │ ├── ic_launcher.webp
│ │ │ └── ic_launcher_round.webp
│ │ ├── mipmap-xxxhdpi
│ │ │ ├── ic_launcher.webp
│ │ │ └── ic_launcher_round.webp
│ │ ├── drawable-xhdpi
│ │ │ └── ic_switch_camera.png
│ │ ├── drawable-xxhdpi
│ │ │ └── ic_switch_camera.png
│ │ ├── mipmap-anydpi-v26
│ │ │ ├── ic_launcher.xml
│ │ │ └── ic_launcher_round.xml
│ │ ├── layout
│ │ │ ├── activity_camera.xml
│ │ │ ├── activity_display_media.xml
│ │ │ ├── activity_texture_camera.xml
│ │ │ ├── activity_glsurface_camera.xml
│ │ │ ├── activity_glsurface_camera2.xml
│ │ │ ├── activity_gltexture_camera.xml
│ │ │ ├── activity_gltexture_camera2.xml
│ │ │ ├── activity_surface_camera2.xml
│ │ │ ├── activity_texture_camera2.xml
│ │ │ ├── activity_glessurface_camera.xml
│ │ │ ├── activity_glessurface_camera2.xml
│ │ │ ├── activity_surface_camera.xml
│ │ │ ├── activity_media_codec_surface.xml
│ │ │ ├── activity_media_codec_buffer.xml
│ │ │ └── activity_main.xml
│ │ ├── drawable
│ │ │ ├── img_switch_normal.xml
│ │ │ ├── tv_timer_bg.xml
│ │ │ ├── img_switch_pressed.xml
│ │ │ ├── btn_capture_normal.xml
│ │ │ ├── btn_capture_pressed.xml
│ │ │ ├── img_switch_bg.xml
│ │ │ ├── btn_capture_bg.xml
│ │ │ └── ic_launcher_background.xml
│ │ ├── xml
│ │ │ ├── backup_rules.xml
│ │ │ └── data_extraction_rules.xml
│ │ ├── values-night
│ │ │ └── themes.xml
│ │ └── drawable-v24
│ │ │ └── ic_launcher_foreground.xml
│ │ ├── java
│ │ └── com
│ │ │ └── android
│ │ │ └── xz
│ │ │ └── camerademo
│ │ │ ├── CameraActivity.java
│ │ │ ├── MediaDisplayActivity.java
│ │ │ ├── base
│ │ │ └── BaseCameraActivity.java
│ │ │ ├── util
│ │ │ └── ScreenTools.java
│ │ │ ├── MainActivity.java
│ │ │ ├── mediacodec_activity
│ │ │ ├── MediaCodecBufferActivity.java
│ │ │ └── MediaCodecSurfaceActivity.java
│ │ │ └── view
│ │ │ └── CaptureButton.java
│ │ └── AndroidManifest.xml
├── proguard-rules.pro
└── build.gradle
├── img
├── camera.jpg
└── index.jpg
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── .gitignore
├── settings.gradle
├── gradle.properties
├── gradlew.bat
├── README.md
└── gradlew
/lib-camera/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/lib-camera/consumer-rules.pro:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 | /release
3 |
--------------------------------------------------------------------------------
/img/camera.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/xiaozhi003/AndroidCamera/HEAD/img/camera.jpg
--------------------------------------------------------------------------------
/img/index.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/xiaozhi003/AndroidCamera/HEAD/img/index.jpg
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
31 | * Object is immutable, which means we can safely pass it between threads without 32 | * explicit synchronization (and don't need to worry about it getting tweaked out from 33 | * under us). 34 | *
35 | * TODO: make frame rate and iframe interval configurable? Maybe use builder pattern
36 | * with reasonable defaults for those and bit rate.
37 | */
38 | public static class EncoderConfig {
39 | final File mOutputFile;
40 | final int mWidth;
41 | final int mHeight;
42 | final int mBitRate;
43 | final EGLContext mEglContext;
44 |
45 | public EncoderConfig(File outputFile, int width, int height, int bitRate,
46 | EGLContext sharedEglContext) {
47 | mOutputFile = outputFile;
48 | mWidth = width;
49 | mHeight = height;
50 | mBitRate = bitRate;
51 | mEglContext = sharedEglContext;
52 | }
53 |
54 | @Override
55 | public String toString() {
56 | return "EncoderConfig: " + mWidth + "x" + mHeight + " @" + mBitRate +
57 | " to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
58 | }
59 | }
60 |
61 | public abstract void setRecordListener(MediaRecordListener recordListener);
62 |
63 | public abstract void startRecord(EncoderConfig config);
64 |
65 | public abstract void stopRecord();
66 |
67 | public abstract boolean isRecording();
68 |
69 | public abstract void updateSharedContext(EGLContext sharedContext);
70 |
71 | public abstract void frameAvailable(SurfaceTexture st);
72 |
73 | public abstract void setTextureId(int id);
74 | }
75 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/app/src/main/java/com/android/xz/camerademo/MediaDisplayActivity.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.camerademo;
2 |
3 | import androidx.appcompat.app.AppCompatActivity;
4 |
5 | import android.graphics.Bitmap;
6 | import android.graphics.BitmapFactory;
7 | import android.media.MediaPlayer;
8 | import android.net.Uri;
9 | import android.os.Bundle;
10 | import android.util.Size;
11 | import android.view.View;
12 | import android.view.ViewTreeObserver;
13 | import android.view.Window;
14 | import android.view.WindowManager;
15 | import android.widget.ImageView;
16 | import android.widget.MediaController;
17 | import android.widget.VideoView;
18 |
19 | import com.android.xz.camerademo.util.ScreenTools;
20 | import com.android.xz.util.ImageUtils;
21 | import com.android.xz.util.Logs;
22 |
23 | import java.io.File;
24 | import java.io.FileNotFoundException;
25 | import java.io.InputStream;
26 |
27 | public class MediaDisplayActivity extends AppCompatActivity {
28 |
29 | public static final String EXTRA_MEDIA_PATH = "com.android.xz.media_path";
30 |
31 | private static final String TAG = MediaDisplayActivity.class.getSimpleName();
32 |
33 | private String mMediaPath;
34 | private VideoView mVideoView;
35 | private ImageView mImageView;
36 |
37 | @Override
38 | protected void onCreate(Bundle savedInstanceState) {
39 | super.onCreate(savedInstanceState);
40 | ScreenTools.setTransparentStatusBar(this);
41 | setContentView(R.layout.activity_display_media);
42 |
43 | mMediaPath = getIntent().getStringExtra(EXTRA_MEDIA_PATH);
44 | mVideoView = findViewById(R.id.videoView);
45 | mImageView = findViewById(R.id.imageView);
46 |
47 | if (mMediaPath.endsWith("mp4")) {
48 | displayVideo();
49 | } else {
50 | displayImage();
51 | }
52 | }
53 |
54 | private void displayVideo() {
55 | mVideoView.setVisibility(View.VISIBLE);
56 | mImageView.setVisibility(View.GONE);
57 | // 设置path会报java.io.FileNotFoundException: No content provider 警告
58 | // mVideoView.setVideoPath(mVideoPath);
59 | mVideoView.setVideoURI(Uri.fromFile(new File(mMediaPath)));
60 |
61 | // 创建媒体控制器(MediaController)
62 | MediaController mediaController = new MediaController(this);
63 | mediaController.setAnchorView(mVideoView);
64 |
65 | // 关联媒体控制器
66 | mVideoView.setMediaController(mediaController);
67 |
68 | // 开始播放视频
69 | mVideoView.setOnCompletionListener(mp -> Logs.i(TAG, "onCompletion..."));
70 | mVideoView.setOnPreparedListener(mp -> {
71 | Logs.i(TAG, "onPrepared...");
72 | mp.start();
73 | });
74 | }
75 |
76 | private void displayImage() {
77 | mVideoView.setVisibility(View.GONE);
78 | mImageView.setVisibility(View.VISIBLE);
79 | mImageView.getViewTreeObserver().addOnGlobalLayoutListener(() -> new Thread(() -> {
80 | Bitmap bitmap = ImageUtils.getCorrectOrientationBitmap(mMediaPath, new Size(mImageView.getMeasuredWidth(), mImageView.getMeasuredHeight()));
81 | mImageView.post(() -> mImageView.setImageBitmap(bitmap));
82 | }).start());
83 | }
84 | }
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_media_codec_surface.xml:
--------------------------------------------------------------------------------
1 |
25 | * It's good practice to explicitly release() the surface, preferably from a "finally" block.
26 | */
27 | public class WindowSurface extends EglSurfaceBase {
28 | private Surface mSurface;
29 | private boolean mReleaseSurface;
30 |
31 | /**
32 | * Associates an EGL surface with the native window surface.
33 | *
34 | * Set releaseSurface to true if you want the Surface to be released when release() is
35 | * called. This is convenient, but can interfere with framework classes that expect to
36 | * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
37 | * surfaceDestroyed() callback won't fire).
38 | */
39 | public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
40 | super(eglCore);
41 | createWindowSurface(surface);
42 | mSurface = surface;
43 | mReleaseSurface = releaseSurface;
44 | }
45 |
46 | /**
47 | * Associates an EGL surface with the SurfaceTexture.
48 | */
49 | public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
50 | super(eglCore);
51 | createWindowSurface(surfaceTexture);
52 | }
53 |
54 | /**
55 | * Releases any resources associated with the EGL surface (and, if configured to do so,
56 | * with the Surface as well).
57 | *
58 | * Does not require that the surface's EGL context be current.
59 | */
60 | public void release() {
61 | releaseEglSurface();
62 | if (mSurface != null) {
63 | if (mReleaseSurface) {
64 | mSurface.release();
65 | }
66 | mSurface = null;
67 | }
68 | }
69 |
70 | /**
71 | * Recreate the EGLSurface, using the new EglBase. The caller should have already
72 | * freed the old EGLSurface with releaseEglSurface().
73 | *
74 | * This is useful when we want to update the EGLSurface associated with a Surface.
75 | * For example, if we want to share with a different EGLContext, which can only
76 | * be done by tearing down and recreating the context. (That's handled by the caller;
77 | * this just creates a new EGLSurface for the Surface we were handed earlier.)
78 | *
79 | * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
80 | * context somewhere, the create call will fail with complaints from the Surface
81 | * about already being connected.
82 | */
83 | public void recreate(EglCore newEglCore) {
84 | if (mSurface == null) {
85 | throw new RuntimeException("not yet implemented for SurfaceTexture");
86 | }
87 | mEglCore = newEglCore; // switch to new context
88 | createWindowSurface(mSurface); // create new surface
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 | *
18 | *
19 | * 可在Activity的onCreat()中调用
20 | *
21 | *
22 | *
23 | * 注意:需在顶部控件布局中加入以下属性让内容出现在状态栏之下:
24 | *
25 | * android:clipToPadding="true" // true 会贴近上层布局 ; false 与上层布局有一定间隙
26 | *
27 | * android:fitsSystemWindows="true" //true 会保留actionBar,title,虚拟键的空间 ; false 不保留
28 | *
29 | * @paramactivity activity
30 | */
31 | public static void setTransparentStatusBar(Activity activity) {
32 | // 5.0及以上
33 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
34 | View decorView = activity.getWindow().getDecorView();
35 | int option = View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
36 |
37 | | View.SYSTEM_UI_FLAG_LAYOUT_STABLE;
38 | decorView.setSystemUiVisibility(option);
39 | activity.getWindow().setStatusBarColor(Color.TRANSPARENT);
40 | // 4.4到5.0
41 | } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
42 | WindowManager.LayoutParams localLayoutParams = activity.getWindow().getAttributes();
43 | localLayoutParams.flags = (WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS | localLayoutParams.flags);
44 | }
45 | }
46 |
47 | /**
48 | * 修改状态栏颜色,支持4.4以上版本
49 | *
50 | * @param activity
51 | * @param colorId
52 | */
53 | public static void setStatusBarColor(Activity activity, int colorId) {
54 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
55 | Window window = activity.getWindow();
56 | window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
57 | window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
58 | window.setStatusBarColor(activity.getResources().getColor(colorId));
59 | } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
60 | //使用SystemBarTint库使4.4版本状态栏变色,需要先将状态栏设置为透明
61 | // transparencyBar(activity);
62 | // SystemBarTintManager tintManager = new SystemBarTintManager(activity);
63 | // tintManager.setStatusBarTintEnabled(true);
64 | // tintManager.setStatusBarTintResource(colorId);
65 | }
66 | }
67 |
68 | /**
69 | * Google原生修改状态栏文字颜色
70 | *
71 | * @param activity
72 | * @param dark
73 | */
74 | public static void setAndroidNativeLightStatusBar(Activity activity, boolean dark) {
75 | View decor = activity.getWindow().getDecorView();
76 | if (dark) {
77 | decor.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR);
78 | } else {
79 | decor.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
80 | }
81 | }
82 |
83 | /**
84 | * 获得屏幕高度
85 | *
86 | * @param context
87 | * @return
88 | */
89 | public static int getScreenWidth(Context context) {
90 | WindowManager wm = (WindowManager) context
91 | .getSystemService(Context.WINDOW_SERVICE);
92 | DisplayMetrics outMetrics = new DisplayMetrics();
93 | wm.getDefaultDisplay().getMetrics(outMetrics);
94 | return outMetrics.widthPixels;
95 | }
96 |
97 | /**
98 | * 获得屏幕宽度
99 | *
100 | * @param context
101 | * @return
102 | */
103 | public static int getScreenHeight(Context context) {
104 | WindowManager wm = (WindowManager) context
105 | .getSystemService(Context.WINDOW_SERVICE);
106 | DisplayMetrics outMetrics = new DisplayMetrics();
107 | wm.getDefaultDisplay().getMetrics(outMetrics);
108 | return outMetrics.heightPixels;
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/camera/view/base/BaseSurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.camera.view.base;
2 |
3 | import android.content.Context;
4 | import android.graphics.PixelFormat;
5 | import android.util.AttributeSet;
6 | import android.util.Size;
7 | import android.view.SurfaceHolder;
8 | import android.view.SurfaceView;
9 |
10 | import com.android.xz.camera.ICameraManager;
11 | import com.android.xz.camera.callback.CameraCallback;
12 | import com.android.xz.util.Logs;
13 |
14 | /**
15 | * 摄像头预览SurfaceView
16 | *
17 | * @author xiaozhi
18 | * @since 2024/8/22
19 | */
20 | public abstract class BaseSurfaceView extends SurfaceView implements SurfaceHolder.Callback, CameraCallback, BaseCameraView {
21 |
22 | protected static final String TAG = BaseSurfaceView.class.getSimpleName();
23 | private SurfaceHolder mSurfaceHolder;
24 | protected Context mContext;
25 | private boolean hasSurface; // 是否存在摄像头显示层
26 | private ICameraManager mCameraManager;
27 | private int mRatioWidth = 0;
28 | private int mRatioHeight = 0;
29 | protected int mSurfaceWidth;
30 | protected int mSurfaceHeight;
31 | protected Size mPreviewSize = new Size(0, 0);
32 |
33 | public BaseSurfaceView(Context context) {
34 | super(context);
35 | init(context);
36 | }
37 |
38 | public BaseSurfaceView(Context context, AttributeSet attrs) {
39 | super(context, attrs);
40 | init(context);
41 | }
42 |
43 | public BaseSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
44 | super(context, attrs, defStyleAttr);
45 | init(context);
46 | }
47 |
48 | protected void init(Context context) {
49 | mContext = context;
50 | mSurfaceHolder = getHolder();
51 | mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
52 | mSurfaceHolder.addCallback(this);
53 | mCameraManager = createCameraManager(context);
54 | mCameraManager.setCameraCallback(this);
55 | }
56 |
57 | public abstract ICameraManager createCameraManager(Context context);
58 |
59 | public ICameraManager getCameraManager() {
60 | return mCameraManager;
61 | }
62 |
63 | @Override
64 | public void surfaceCreated(SurfaceHolder holder) {
65 | Logs.i(TAG, "surfaceCreated..." + hasSurface);
66 | if (!hasSurface && holder != null) {
67 | hasSurface = true;
68 | openCamera();
69 | }
70 | }
71 |
72 | @Override
73 | public abstract void surfaceChanged(SurfaceHolder holder, int format, int width, int height);
74 |
75 | @Override
76 | public void surfaceDestroyed(SurfaceHolder holder) {
77 | Logs.v(TAG, "surfaceDestroyed.");
78 | closeCamera();
79 | hasSurface = false;
80 | }
81 |
82 | public SurfaceHolder getSurfaceHolder() {
83 | return mSurfaceHolder;
84 | }
85 |
86 | @Override
87 | public void onResume() {
88 | if (hasSurface) {
89 | // 当activity暂停,但是并未停止的时候,surface仍然存在,所以 surfaceCreated()
90 | // 并不会调用,需要在此处初始化摄像头
91 | openCamera();
92 | }
93 | }
94 |
95 | @Override
96 | public void onPause() {
97 | closeCamera();
98 | }
99 |
100 | @Override
101 | public void onDestroy() {
102 | }
103 |
104 | /**
105 | * 打开摄像头
106 | */
107 | private void openCamera() {
108 | if (mSurfaceHolder == null) {
109 | Logs.e(TAG, "SurfaceHolder is null.");
110 | return;
111 | }
112 | if (mCameraManager.isOpen()) {
113 | Logs.w(TAG, "Camera is opened!");
114 | return;
115 | }
116 | mCameraManager.openCamera();
117 | }
118 |
119 | /**
120 | * 关闭摄像头
121 | */
122 | private void closeCamera() {
123 | mCameraManager.releaseCamera();
124 | }
125 |
126 | private void setAspectRatio(int width, int height) {
127 | if (width < 0 || height < 0) {
128 | return;
129 | }
130 | mRatioWidth = width;
131 | mRatioHeight = height;
132 | requestLayout();
133 | }
134 |
135 | @Override
136 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
137 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
138 | int width = MeasureSpec.getSize(widthMeasureSpec);
139 | int height = MeasureSpec.getSize(heightMeasureSpec);
140 | if (0 == mRatioWidth || 0 == mRatioHeight) {
141 | setMeasuredDimension(width, width * 4 / 3);
142 | } else {
143 | if (width < height * mRatioWidth / mRatioHeight) {
144 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
145 | } else {
146 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
147 | }
148 | }
149 | }
150 |
151 | @Override
152 | public abstract void onOpen();
153 |
154 | @Override
155 | public void onOpenError(int error, String msg) {
156 | }
157 |
158 | @Override
159 | public void onPreview(int previewWidth, int previewHeight) {
160 | setAspectRatio(previewHeight, previewWidth);
161 | }
162 |
163 | @Override
164 | public void onPreviewError(int error, String msg) {
165 | }
166 |
167 | @Override
168 | public void onClose() {
169 | }
170 | }
171 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # AndroidCamera
2 |
3 | 本项目主要涉及Android开发中Camera的相关操作、预览方式、视频录制等。项目结构简单代码耦合性低,适合学习和使用
4 |
5 |
6 |
47 | * The flag tells the caller whether or not it can expect a surfaceChanged() to
48 | * arrive very soon.
49 | *
50 | * Call from UI thread.
51 | */
52 | public void sendSurfaceAvailable(Object surface) {
53 | sendMessage(obtainMessage(MSG_SURFACE_AVAILABLE, 0, 0, surface));
54 | }
55 |
56 | /**
57 | * Sends the "surface changed" message, forwarding what we got from the SurfaceHolder.
58 | *
59 | * Call from UI thread.
60 | */
61 | public void sendSurfaceChanged(int format, int width,
62 | int height) {
63 | // ignore format
64 | sendMessage(obtainMessage(MSG_SURFACE_CHANGED, width, height));
65 | }
66 |
67 | /**
68 | * Sends the "shutdown" message, which tells the render thread to halt.
69 | *
70 | * Call from UI thread.
71 | */
72 | public void sendSurfaceDestroyed() {
73 | sendMessage(obtainMessage(MSG_SURFACE_DESTROYED));
74 | }
75 |
76 | /**
77 | * Sends the "shutdown" message, which tells the render thread to halt.
78 | *
79 | * Call from UI thread.
80 | */
81 | public void sendShutdown() {
82 | sendMessage(obtainMessage(MSG_SHUTDOWN));
83 | }
84 |
85 | /**
86 | * Sends the "frame available" message.
87 | *
88 | * Call from UI thread.
89 | */
90 | public void sendFrameAvailable() {
91 | sendMessage(obtainMessage(MSG_FRAME_AVAILABLE));
92 | }
93 |
94 | /**
95 | * Sends the "rotation" message.
96 | *
97 | * Call from UI thread.
98 | */
99 | public void sendRotate(int rotation, int cameraId) {
100 | sendMessage(obtainMessage(MSG_ROTATE_VALUE, rotation, cameraId));
101 | }
102 |
103 | /**
104 | * Sends the "preview size" message.
105 | *
106 | * Call from UI thread.
107 | */
108 | public void sendPreviewSize(int width, int height) {
109 | sendMessage(obtainMessage(MSG_SIZE_VALUE, width, height));
110 | }
111 |
112 | public void sendRecordState(boolean state) {
113 | sendMessage(obtainMessage(MSG_RECORD_STATE, state));
114 | }
115 |
116 | @Override // runs on RenderThread
117 | public void handleMessage(Message msg) {
118 | int what = msg.what;
119 | //Log.d(TAG, "RenderHandler [" + this + "]: what=" + what);
120 |
121 | RenderThread renderThread = mWeakRenderThread.get();
122 | if (renderThread == null) {
123 | Log.w(TAG, "RenderHandler.handleMessage: weak ref is null");
124 | return;
125 | }
126 |
127 | switch (what) {
128 | case MSG_SURFACE_AVAILABLE:
129 | renderThread.surfaceAvailable(msg.obj);
130 | break;
131 | case MSG_SURFACE_CHANGED:
132 | renderThread.surfaceChanged(msg.arg1, msg.arg2);
133 | break;
134 | case MSG_SURFACE_DESTROYED:
135 | renderThread.surfaceDestroyed();
136 | break;
137 | case MSG_SHUTDOWN:
138 | renderThread.shutdown();
139 | break;
140 | case MSG_FRAME_AVAILABLE:
141 | renderThread.frameAvailable();
142 | break;
143 | case MSG_SIZE_VALUE:
144 | renderThread.setCameraPreviewSize(msg.arg1, msg.arg2);
145 | break;
146 | case MSG_ROTATE_VALUE:
147 | // renderThread.setRotate(msg.arg1, msg.arg2);
148 | break;
149 | case MSG_RECORD_STATE:
150 | renderThread.changeRecordingState((boolean) msg.obj);
151 | break;
152 | default:
153 | throw new RuntimeException("unknown message " + what);
154 | }
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/app/src/main/java/com/android/xz/camerademo/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.camerademo;
2 |
3 | import android.Manifest;
4 | import android.content.Context;
5 | import android.content.Intent;
6 | import android.os.Bundle;
7 | import android.view.View;
8 | import android.widget.Toast;
9 |
10 | import androidx.annotation.NonNull;
11 | import androidx.annotation.Nullable;
12 | import androidx.appcompat.app.AppCompatActivity;
13 |
14 | import com.android.xz.camerademo.mediacodec_activity.MediaCodecBufferActivity;
15 | import com.android.xz.camerademo.mediacodec_activity.MediaCodecSurfaceActivity;
16 | import com.android.xz.permission.IPermissionsResult;
17 | import com.android.xz.permission.PermissionUtils;
18 | import com.android.xz.util.ImageUtils;
19 |
20 | public class MainActivity extends AppCompatActivity implements View.OnClickListener {
21 |
22 | private Context mContext;
23 |
24 | @Override
25 | protected void onCreate(Bundle savedInstanceState) {
26 | super.onCreate(savedInstanceState);
27 | setContentView(R.layout.activity_main);
28 | mContext = this;
29 | ImageUtils.init(getApplicationContext());
30 |
31 | findViewById(R.id.surfaceCameraBtn).setOnClickListener(this);
32 | findViewById(R.id.textureCameraBtn).setOnClickListener(this);
33 | findViewById(R.id.glTextureCameraBtn).setOnClickListener(this);
34 | findViewById(R.id.glSurfaceCameraBtn).setOnClickListener(this);
35 | findViewById(R.id.surfaceCamera2Btn).setOnClickListener(this);
36 | findViewById(R.id.textureCamera2Btn).setOnClickListener(this);
37 | findViewById(R.id.glSurfaceCamera2Btn).setOnClickListener(this);
38 | findViewById(R.id.glTextureCamera2Btn).setOnClickListener(this);
39 | findViewById(R.id.glSurfaceHolderCameraBtn).setOnClickListener(this);
40 | findViewById(R.id.glSurfaceHolderCamera2Btn).setOnClickListener(this);
41 | findViewById(R.id.mediaCodecBufferBtn).setOnClickListener(this);
42 | findViewById(R.id.mediaCodecSurfaceBtn).setOnClickListener(this);
43 | }
44 |
45 | @Override
46 | public void onClick(View v) {
47 | PermissionUtils.getInstance().requestPermission(this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO}, new IPermissionsResult() {
48 | @Override
49 | public void passPermissions() {
50 | switch (v.getId()) {
51 | case R.id.surfaceCameraBtn:
52 | startCameraActivity(R.layout.activity_surface_camera);
53 | break;
54 | case R.id.textureCameraBtn:
55 | startCameraActivity(R.layout.activity_texture_camera);
56 | break;
57 | case R.id.glTextureCameraBtn:
58 | startCameraActivity(R.layout.activity_gltexture_camera);
59 | break;
60 | case R.id.glSurfaceCameraBtn:
61 | startCameraActivity(R.layout.activity_glsurface_camera);
62 | break;
63 | case R.id.glSurfaceHolderCameraBtn:
64 | startCameraActivity(R.layout.activity_glessurface_camera);
65 | break;
66 | case R.id.surfaceCamera2Btn:
67 | startCameraActivity(R.layout.activity_surface_camera2);
68 | break;
69 | case R.id.textureCamera2Btn:
70 | startCameraActivity(R.layout.activity_texture_camera2);
71 | break;
72 | case R.id.glSurfaceCamera2Btn:
73 | startCameraActivity(R.layout.activity_glsurface_camera2);
74 | break;
75 | case R.id.glTextureCamera2Btn:
76 | startCameraActivity(R.layout.activity_gltexture_camera2);
77 | break;
78 | case R.id.glSurfaceHolderCamera2Btn:
79 | startCameraActivity(R.layout.activity_glessurface_camera2);
80 | break;
81 | case R.id.mediaCodecBufferBtn:
82 | startActivity(new Intent(mContext, MediaCodecBufferActivity.class));
83 | break;
84 | case R.id.mediaCodecSurfaceBtn:
85 | startActivity(new Intent(mContext, MediaCodecSurfaceActivity.class));
86 | break;
87 | }
88 | }
89 |
90 | @Override
91 | public void forbidPermissions() {
92 | Toast.makeText(mContext, "用户拒绝Camera授权", Toast.LENGTH_SHORT).show();
93 | }
94 | });
95 | }
96 |
97 | private void startCameraActivity(int layoutId) {
98 | Intent intent = new Intent(this, CameraActivity.class);
99 | intent.putExtra(CameraActivity.EXTRA_LAYOUT_ID, layoutId);
100 | startActivity(intent);
101 | }
102 |
103 | @Override
104 | protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
105 | super.onActivityResult(requestCode, resultCode, data);
106 | PermissionUtils.getInstance().onActivityResult(requestCode, resultCode, data);
107 | }
108 |
109 | @Override
110 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
111 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
112 | PermissionUtils.getInstance().onRequestPermissionsResult(this, requestCode, permissions, grantResults);
113 | }
114 | }
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
22 | * 您至少需要一个顶点着色器来绘制形状,以及一个 片段着色器来为该形状着色。
23 | * 这些着色器必须经过编译,然后添加到 OpenGL ES 程序中,该程序随后用于绘制 形状。
24 | */
25 |
26 | // 顶点着色器代码
27 | private final String vertexShaderCode =
28 | // This matrix member variable provides a hook to manipulate
29 | // the coordinates of the objects that use this vertex shader
30 | "attribute vec4 vPosition;\n" +
31 | "attribute vec2 vTexCoordinate;\n" +
32 | "varying vec2 aTexCoordinate;\n" +
33 | "void main() {\n" +
34 | // the matrix must be included as a modifier of gl_Position
35 | // Note that the uMVPMatrix factor *must be first* in order
36 | // for the matrix multiplication product to be correct.
37 | " gl_Position = vPosition;\n" +
38 | " aTexCoordinate = vTexCoordinate;\n" +
39 | "}";
40 |
41 | // 片段着色器代码
42 | private final String fragmentShaderCode =
43 | "precision mediump float;\n" +
44 | "uniform sampler2D vTexture;\n" +
45 | "varying vec2 aTexCoordinate;\n" +
46 | "void main() {\n" +
47 | " gl_FragColor = texture2D(vTexture, aTexCoordinate);\n" +
48 | "}\n";
49 |
50 | /**
51 | * OpenGL程序句柄
52 | */
53 | private int mProgram;
54 |
55 | /**
56 | * 顶点坐标缓冲区
57 | */
58 | private FloatBuffer mVertexBuffer;
59 | /**
60 | * 纹理坐标缓冲区
61 | */
62 | private FloatBuffer mTextureBuffer;
63 |
64 | /**
65 | * 此数组中每个顶点的坐标数
66 | */
67 | static final int COORDS_PER_VERTEX = 2;
68 |
69 | /**
70 | * 顶点坐标数组
71 | * 顶点坐标系中原点(0,0)在画布中心
72 | * 向左为x轴正方向
73 | * 向上为y轴正方向
74 | * 画布四个角坐标如下:
75 | * (-1, 1),(1, 1)
76 | * (-1,-1),(1,-1)
77 | */
78 | private float vertexCoords[] = {
79 | -1.0f, 1.0f, // 左上
80 | -1.0f, -1.0f, // 左下
81 | 1.0f, 1.0f, // 右上
82 | 1.0f, -1.0f // 右下
83 | };
84 |
85 | /**
86 | * 纹理坐标数组
87 | * 这里我们需要注意纹理坐标系,原点(0,0s)在画布左下角
88 | * 向左为x轴正方向
89 | * 向上为y轴正方向
90 | * 画布四个角坐标如下:
91 | * (0,1),(1,1)
92 | * (0,0),(1,0)
93 | */
94 | private float textureCoords[] = {
95 | 0.0f, 1.0f, // 左上
96 | 0.0f, 0.0f, // 左下
97 | 1.0f, 1.0f, // 右上
98 | 1.0f, 0.0f, // 右下
99 | };
100 |
101 | /**
102 | * 顶点坐标句柄
103 | */
104 | private int mPositionHandle;
105 | /**
106 | * 纹理坐标句柄
107 | */
108 | private int mTexCoordinateHandle;
109 | /**
110 | * 纹理贴图句柄
111 | */
112 | private int mTexHandle;
113 |
114 | private final int vertexCount = vertexCoords.length / COORDS_PER_VERTEX;
115 | private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
116 |
117 | public Texture2DFilter() {
118 | // 初始化形状坐标的顶点字节缓冲区
119 | mVertexBuffer = ByteBuffer.allocateDirect(vertexCoords.length * 4)
120 | .order(ByteOrder.nativeOrder())
121 | .asFloatBuffer()
122 | .put(vertexCoords);
123 | mVertexBuffer.position(0);
124 |
125 | // 初始化纹理坐标顶点字节缓冲区
126 | mTextureBuffer = ByteBuffer.allocateDirect(textureCoords.length * 4)
127 | .order(ByteOrder.nativeOrder())
128 | .asFloatBuffer()
129 | .put(textureCoords);
130 | mTextureBuffer.position(0);
131 | }
132 |
133 | @Override
134 | public void surfaceCreated() {
135 | // 创建OpenGLES程序
136 | mProgram = GLESUtils.createProgram(vertexShaderCode, fragmentShaderCode);
137 |
138 | // 获取顶点着色器vPosition成员的句柄
139 | mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
140 | // 获取顶点着色器中纹理坐标的句柄
141 | mTexCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "vTexCoordinate");
142 | // 获取Texture句柄
143 | mTexHandle = GLES20.glGetUniformLocation(mProgram, "vTexture");
144 | }
145 |
146 | @Override
147 | public void surfaceChanged(int width, int height) {
148 | GLES20.glViewport(0, 0, width, height);
149 | }
150 |
151 | @Override
152 | public int draw(int textureId, float[] matrix) {
153 | // 将程序添加到OpenGL ES环境
154 | GLES20.glUseProgram(mProgram);
155 | GLESUtils.checkGlError("glUseProgram");
156 |
157 | // 为正方形顶点启用控制句柄
158 | GLES20.glEnableVertexAttribArray(mPositionHandle);
159 | GLESUtils.checkGlError("glEnableVertexAttribArray");
160 | // 写入坐标数据
161 | GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, mVertexBuffer);
162 | GLESUtils.checkGlError("glVertexAttribPointer");
163 |
164 | // 启用纹理坐标控制句柄
165 | GLES20.glEnableVertexAttribArray(mTexCoordinateHandle);
166 | GLESUtils.checkGlError("glEnableVertexAttribArray");
167 | // 写入坐标数据
168 | GLES20.glVertexAttribPointer(mTexCoordinateHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, mTextureBuffer);
169 | GLESUtils.checkGlError("glVertexAttribPointer");
170 |
171 | // 激活纹理编号0
172 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
173 | // 绑定纹理
174 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
175 | // 设置纹理采样器编号,该编号和glActiveTexture中设置的编号相同
176 | GLES20.glUniform1i(mTexHandle, 0);
177 |
178 | // 绘制
179 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
180 | GLESUtils.checkGlError("glDrawArrays");
181 |
182 | // 取消绑定纹理
183 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
184 |
185 | // 禁用顶点阵列
186 | GLES20.glDisableVertexAttribArray(mPositionHandle);
187 | GLES20.glDisableVertexAttribArray(mTexCoordinateHandle);
188 |
189 | return textureId;
190 | }
191 |
192 | @Override
193 | public void release() {
194 | GLES20.glDeleteProgram(mProgram);
195 | mProgram = -1;
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/camera/view/base/BaseTextureView.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.camera.view.base;
2 |
3 | import android.content.Context;
4 | import android.graphics.Matrix;
5 | import android.graphics.SurfaceTexture;
6 | import android.util.AttributeSet;
7 | import android.view.TextureView;
8 |
9 | import com.android.xz.camera.ICameraManager;
10 | import com.android.xz.camera.callback.CameraCallback;
11 | import com.android.xz.util.Logs;
12 |
13 | /**
14 | * 摄像头预览TextureView
15 | *
16 | * @author xiaozhi
17 | * @since 2024/8/22
18 | */
19 | public abstract class BaseTextureView extends TextureView implements TextureView.SurfaceTextureListener, CameraCallback, BaseCameraView {
20 | private static final String TAG = BaseTextureView.class.getSimpleName();
21 |
22 | private Context mContext;
23 | private SurfaceTexture mSurfaceTexture;
24 | private boolean isMirror;
25 | private boolean hasSurface; // 是否存在摄像头显示层
26 | private ICameraManager mCameraManager;
27 |
28 | private int mRatioWidth = 0;
29 | private int mRatioHeight = 0;
30 |
31 | private int mTextureWidth;
32 | private int mTextureHeight;
33 |
34 | public BaseTextureView(Context context) {
35 | super(context);
36 | init(context);
37 | }
38 |
39 | public BaseTextureView(Context context, AttributeSet attrs) {
40 | super(context, attrs);
41 | init(context);
42 | }
43 |
44 | public BaseTextureView(Context context, AttributeSet attrs, int defStyleAttr) {
45 | super(context, attrs, defStyleAttr);
46 | init(context);
47 | }
48 |
49 | private void init(Context context) {
50 | mContext = context;
51 | mCameraManager = createCameraManager(context);
52 | mCameraManager.setCameraCallback(this);
53 | setSurfaceTextureListener(this);
54 | }
55 |
56 | public abstract ICameraManager createCameraManager(Context context);
57 |
58 | /**
59 | * 获取摄像头工具类
60 | *
61 | * @return
62 | */
63 | public ICameraManager getCameraManager() {
64 | return mCameraManager;
65 | }
66 |
67 | /**
68 | * 是否镜像
69 | *
70 | * @return
71 | */
72 | public boolean isMirror() {
73 | return isMirror;
74 | }
75 |
76 | /**
77 | * 设置是否镜像
78 | *
79 | * @param mirror
80 | */
81 | public void setMirror(boolean mirror) {
82 | isMirror = mirror;
83 | requestLayout();
84 | }
85 |
86 | private void setAspectRatio(int width, int height) {
87 | if (width < 0 || height < 0) {
88 | throw new IllegalArgumentException("Size cannot be negative.");
89 | }
90 | mRatioWidth = width;
91 | mRatioHeight = height;
92 | requestLayout();
93 | }
94 |
95 | @Override
96 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
97 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
98 | int width = MeasureSpec.getSize(widthMeasureSpec);
99 | int height = MeasureSpec.getSize(heightMeasureSpec);
100 | if (0 == mRatioWidth || 0 == mRatioHeight) {
101 | setMeasuredDimension(width, width * 4 / 3);
102 | } else {
103 | if (width < height * mRatioWidth / mRatioHeight) {
104 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
105 | } else {
106 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
107 | }
108 | }
109 |
110 | if (isMirror) {
111 | Matrix transform = new Matrix();
112 | transform.setScale(-1, 1, getMeasuredWidth() / 2, 0);
113 | setTransform(transform);
114 | } else {
115 | setTransform(null);
116 | }
117 | }
118 |
119 | /**
120 | * 获取SurfaceTexture
121 | *
122 | * @return
123 | */
124 | @Override
125 | public SurfaceTexture getSurfaceTexture() {
126 | return mSurfaceTexture;
127 | }
128 |
129 | @Override
130 | public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
131 | Logs.i(TAG, "onSurfaceTextureAvailable.");
132 | mTextureWidth = width;
133 | mTextureHeight = height;
134 | mSurfaceTexture = surfaceTexture;
135 | hasSurface = true;
136 | openCamera();
137 | }
138 |
139 | @Override
140 | public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
141 | Logs.i(TAG, "onSurfaceTextureSizeChanged.");
142 | }
143 |
144 | @Override
145 | public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
146 | Logs.v(TAG, "onSurfaceTextureDestroyed.");
147 | closeCamera();
148 | hasSurface = false;
149 | return true;
150 | }
151 |
152 | @Override
153 | public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
154 | }
155 |
156 | /**
157 | * 打开摄像头并预览
158 | */
159 | @Override
160 | public void onResume() {
161 | if (hasSurface) {
162 | // 当activity暂停,但是并未停止的时候,surface仍然存在,所以 surfaceCreated()
163 | // 并不会调用,需要在此处初始化摄像头
164 | openCamera();
165 | }
166 | }
167 |
168 | /**
169 | * 停止预览并关闭摄像头
170 | */
171 | @Override
172 | public void onPause() {
173 | closeCamera();
174 | }
175 |
176 | @Override
177 | public void onDestroy() {
178 | }
179 |
180 | /**
181 | * 初始化摄像头,较为关键的内容
182 | */
183 | private void openCamera() {
184 | if (mSurfaceTexture == null) {
185 | Logs.e(TAG, "mSurfaceTexture is null.");
186 | return;
187 | }
188 | if (mCameraManager.isOpen()) {
189 | Logs.w(TAG, "Camera is opened!");
190 | return;
191 | }
192 | mCameraManager.openCamera();
193 | }
194 |
195 | private void closeCamera() {
196 | mCameraManager.releaseCamera();
197 | }
198 |
199 | @Override
200 | public void onOpen() {
201 | mCameraManager.startPreview(mSurfaceTexture);
202 | }
203 |
204 | @Override
205 | public void onOpenError(int error, String msg) {
206 |
207 | }
208 |
209 | @Override
210 | public void onPreview(int previewWidth, int previewHeight) {
211 | if (mTextureWidth > mTextureHeight) {
212 | setAspectRatio(previewWidth, previewHeight);
213 | } else {
214 | setAspectRatio(previewHeight, previewWidth);
215 | }
216 | }
217 |
218 | @Override
219 | public void onPreviewError(int error, String msg) {
220 |
221 | }
222 |
223 | @Override
224 | public void onClose() {
225 |
226 | }
227 | }
228 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/BufferMovieEncoder.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.encoder;
2 |
3 | import android.content.Context;
4 | import android.media.MediaCodecInfo;
5 | import android.os.Handler;
6 | import android.util.Log;
7 | import android.util.Size;
8 |
9 | import com.android.xz.camera.YUVFormat;
10 | import com.android.xz.util.ImageUtils;
11 | import com.android.xz.util.Logs;
12 | import com.android.xz.util.YUVUtils;
13 |
14 | import java.io.File;
15 | import java.nio.ByteBuffer;
16 | import java.util.Date;
17 |
18 | /**
19 | * 对Camera预览NV21数据编码
20 | *
21 | * @author xiaozhi
22 | * @since 2024/8/30
23 | */
24 | public class BufferMovieEncoder {
25 | private static final String TAG = BufferMovieEncoder.class.getSimpleName();
26 | private MediaVideoBufferEncoder mEncoder;
27 | private MediaMuxerWrapper mMuxerWrapper;
28 | private MediaRecordListener mRecordListener;
29 | private Context mContext;
30 | private Handler mUIHandler;
31 | private byte[] mTempData;
32 | private Size mSize;
33 | private int mOrientation;
34 |
35 | public BufferMovieEncoder(Context context) {
36 | mContext = context;
37 | mUIHandler = new Handler(mContext.getMainLooper());
38 | }
39 |
40 | public void setRecordListener(MediaRecordListener recordListener) {
41 | mRecordListener = recordListener;
42 | }
43 |
44 | /**
45 | * 开始录制
46 | *
47 | * @param orientation 编码数据方向
48 | * @param size 编码视频预览尺寸,通Camera的预览尺寸
49 | */
50 | public void startRecord(int orientation, Size size) {
51 | mOrientation = orientation;
52 | mSize = size;
53 | try {
54 | if (mMuxerWrapper != null) return;
55 | if (mTempData == null) {
56 | mTempData = new byte[mSize.getWidth() * mSize.getHeight() * 3 / 2];
57 | }
58 | String name = "VID_" + ImageUtils.DATE_FORMAT.format(new Date(System.currentTimeMillis())) + ".mp4";
59 | File outputFile = new File(ImageUtils.getVideoPath(), name);
60 |
61 | final MediaMuxerWrapper muxerWrapper = new MediaMuxerWrapper(".mp4", outputFile);
62 | muxerWrapper.setOrientationHint(mOrientation);
63 | new MediaVideoBufferEncoder(muxerWrapper, mSize.getWidth(), mSize.getHeight(), new MediaEncoder.MediaEncoderListener() {
64 | String path;
65 | @Override
66 | public void onPrepared(MediaEncoder encoder) {
67 | Logs.i(TAG, "onPrepared.");
68 | mEncoder = (MediaVideoBufferEncoder) encoder;
69 | path = mEncoder.getOutputPath();
70 | if (mRecordListener != null) {
71 | mRecordListener.onStart();
72 | }
73 | }
74 |
75 | @Override
76 | public void onStopped(MediaEncoder encoder) {
77 | Logs.i(TAG, "onStopped");
78 | mUIHandler.post(() -> {
79 | if (mRecordListener != null) {
80 | mRecordListener.onStopped(path);
81 | }
82 | });
83 | }
84 | });
85 | // for audio capturing
86 | new MediaAudioEncoder(mContext, muxerWrapper, new MediaEncoder.MediaEncoderListener() {
87 | @Override
88 | public void onPrepared(MediaEncoder encoder) {
89 |
90 | }
91 |
92 | @Override
93 | public void onStopped(MediaEncoder encoder) {
94 |
95 | }
96 | });
97 |
98 | muxerWrapper.prepare();
99 | muxerWrapper.startRecording();
100 | mMuxerWrapper = muxerWrapper;
101 | } catch (Exception e) {
102 | e.printStackTrace();
103 | }
104 | }
105 |
106 | /**
107 | * 停止录制
108 | */
109 | public void stopRecord() {
110 | final MediaMuxerWrapper muxerWrapper = mMuxerWrapper;
111 | mMuxerWrapper = null;
112 | mEncoder = null;
113 | if (muxerWrapper != null) {
114 | muxerWrapper.stopRecording();
115 | }
116 | }
117 |
118 | /**
119 | * 编码数据
120 | *
121 | * @param data nv21
122 | */
123 | public void encode(byte[] data) {
124 | encode(data, YUVFormat.NV21);
125 | }
126 |
127 | /**
128 | * 编码数据
129 | *
130 | * @param data YUV420
131 | */
132 | public void encode(byte[] data, YUVFormat yuvFormat) {
133 | if (mEncoder != null) {
134 | int mColorFormat = mEncoder.getColorFormat();
135 | byte[] encodeData = null;
136 | long start = System.currentTimeMillis();
137 | if (mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar
138 | || mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar) { // 19, 20:I420
139 | if (yuvFormat == YUVFormat.NV21) {
140 | YUVUtils.nativeNV21ToI420(data, mSize.getWidth(), mSize.getHeight(), mTempData);
141 | encodeData = mTempData;
142 | } else {
143 | encodeData = data;
144 | }
145 | } else if (mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar
146 | || mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar
147 | || mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar) { // 21, 39:NV12
148 | // 使用C层转换最快
149 | if (yuvFormat == YUVFormat.NV21) {
150 | YUVUtils.nativeNV21ToNV12(data, mSize.getWidth(), mSize.getHeight(), mTempData);
151 | } else {
152 | YUVUtils.nativeI420ToNV12(data, mSize.getWidth(), mSize.getHeight(), mTempData);
153 | }
154 | encodeData = mTempData;
155 | } else if (mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar) {// 2141391872:NV21
156 | if (yuvFormat == YUVFormat.NV21) {
157 | encodeData = data;
158 | } else {
159 | YUVUtils.nativeI420ToNV21(data, mSize.getWidth(), mSize.getHeight(), mTempData);
160 | encodeData = mTempData;
161 | }
162 | }
163 | // Log.i(TAG, "耗时:" + (System.currentTimeMillis() - start) + "ms");
164 | mEncoder.frameAvailableSoon();
165 | ByteBuffer buffer = ByteBuffer.wrap(encodeData);
166 | mEncoder.encode(buffer);
167 | }
168 | }
169 | }
170 |
--------------------------------------------------------------------------------
/app/src/main/java/com/android/xz/camerademo/mediacodec_activity/MediaCodecSurfaceActivity.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.camerademo.mediacodec_activity;
2 |
3 | import android.content.Intent;
4 | import android.graphics.Bitmap;
5 | import android.media.MediaMetadataRetriever;
6 | import android.os.AsyncTask;
7 | import android.os.Bundle;
8 | import android.util.Size;
9 | import android.view.View;
10 | import android.widget.ImageView;
11 | import android.widget.TextView;
12 |
13 | import androidx.appcompat.app.AppCompatActivity;
14 |
15 | import com.android.xz.camera.CameraManager;
16 | import com.android.xz.camera.view.CameraGLSurfaceView;
17 | import com.android.xz.camerademo.MediaDisplayActivity;
18 | import com.android.xz.camerademo.R;
19 | import com.android.xz.camerademo.view.CaptureButton;
20 | import com.android.xz.encoder.MediaRecordListener;
21 | import com.android.xz.util.ImageUtils;
22 |
23 | import java.util.Timer;
24 | import java.util.TimerTask;
25 | import java.util.concurrent.Executors;
26 |
27 | public class MediaCodecSurfaceActivity extends AppCompatActivity {
28 |
29 | private static final String TAG = MediaCodecSurfaceActivity.class.getSimpleName();
30 | private CameraGLSurfaceView mCameraGLSurfaceView;
31 | private CameraManager mCameraManager;
32 | private ImageView mPictureIv;
33 | private CaptureButton mCaptureBtn;
34 | private TextView mTimeTv;
35 | private Timer mTimer = new Timer();
36 | private TimerTask mTimerTask;
37 |
38 | @Override
39 | protected void onCreate(Bundle savedInstanceState) {
40 | super.onCreate(savedInstanceState);
41 | setContentView(R.layout.activity_media_codec_surface);
42 |
43 | mCameraGLSurfaceView = findViewById(R.id.cameraView);
44 | mCameraGLSurfaceView.setRecordListener(mRecordListener);
45 | mCameraManager = (CameraManager) mCameraGLSurfaceView.getCameraManager();
46 | mCaptureBtn = findViewById(R.id.captureBtn);
47 | mTimeTv = findViewById(R.id.timeTv);
48 | mCaptureBtn.setClickListener(mClickListener);
49 | findViewById(R.id.switchCameraBtn).setOnClickListener(v -> mCameraManager.switchCamera());
50 | mPictureIv = findViewById(R.id.pictureIv);
51 | mPictureIv.setOnClickListener(v -> {
52 | String path = (String) v.getTag();
53 | Intent intent = new Intent(this, MediaDisplayActivity.class);
54 | intent.putExtra(MediaDisplayActivity.EXTRA_MEDIA_PATH, path);
55 | startActivity(intent);
56 | });
57 | }
58 |
59 | @Override
60 | protected void onResume() {
61 | super.onResume();
62 | mCameraGLSurfaceView.onResume();
63 | }
64 |
65 | @Override
66 | protected void onPause() {
67 | super.onPause();
68 | mCameraGLSurfaceView.onPause();
69 | }
70 |
71 | @Override
72 | protected void onDestroy() {
73 | super.onDestroy();
74 | mCameraGLSurfaceView.onDestroy();
75 | }
76 |
77 | private void capture() {
78 | mCameraManager.takePicture(data -> {
79 | new ImageSaveTask().executeOnExecutor(Executors.newSingleThreadExecutor(), data); // 保存图片
80 | });
81 | }
82 |
83 | private void startRecord() {
84 | mCameraGLSurfaceView.startRecord();
85 | }
86 |
87 | private void stopRecord() {
88 | mCameraGLSurfaceView.stopRecord();
89 | }
90 |
91 | private final CaptureButton.ClickListener mClickListener = new CaptureButton.ClickListener() {
92 | @Override
93 | public void onCapture() {
94 | capture();
95 | }
96 |
97 | @Override
98 | public void onStartRecord() {
99 | startRecord();
100 | }
101 |
102 | @Override
103 | public void onStopRecord() {
104 | stopRecord();
105 | }
106 | };
107 |
108 | private final MediaRecordListener mRecordListener = new MediaRecordListener() {
109 | @Override
110 | public void onStart() {
111 | mTimeTv.setVisibility(View.VISIBLE);
112 | mTimer.scheduleAtFixedRate(mTimerTask = new RecordTimerTask(), 0, 1000);
113 | }
114 |
115 | @Override
116 | public void onStopped(String videoPath) {
117 | new VideoTask().executeOnExecutor(Executors.newSingleThreadExecutor(), videoPath);
118 | mPictureIv.setTag(videoPath);
119 | mCaptureBtn.stopRecord();
120 | mTimeTv.setVisibility(View.GONE);
121 | if (mTimerTask != null) {
122 | mTimerTask.cancel();
123 | mTimerTask = null;
124 | }
125 | }
126 | };
127 |
128 | private class VideoTask extends AsyncTask
35 | * There can be multiple surfaces associated with a single context.
36 | */
37 | public class EglSurfaceBase {
38 | protected static final String TAG = EglSurfaceBase.class.getSimpleName();
39 |
40 | // EglCore object we're associated with. It may be associated with multiple surfaces.
41 | protected EglCore mEglCore;
42 |
43 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
44 | private int mWidth = -1;
45 | private int mHeight = -1;
46 |
47 | protected EglSurfaceBase(EglCore eglCore) {
48 | mEglCore = eglCore;
49 | }
50 |
51 | /**
52 | * Creates a window surface.
53 | *
54 | * @param surface May be a Surface or SurfaceTexture.
55 | */
56 | public void createWindowSurface(Object surface) {
57 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
58 | throw new IllegalStateException("surface already created");
59 | }
60 | mEGLSurface = mEglCore.createWindowSurface(surface);
61 |
62 | // Don't cache width/height here, because the size of the underlying surface can change
63 | // out from under us (see e.g. HardwareScalerActivity).
64 | //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
65 | //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
66 | }
67 |
68 | /**
69 | * Creates an off-screen surface.
70 | */
71 | public void createOffscreenSurface(int width, int height) {
72 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
73 | throw new IllegalStateException("surface already created");
74 | }
75 | mEGLSurface = mEglCore.createOffscreenSurface(width, height);
76 | mWidth = width;
77 | mHeight = height;
78 | }
79 |
80 | /**
81 | * Returns the surface's width, in pixels.
82 | *
83 | * If this is called on a window surface, and the underlying surface is in the process
84 | * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
85 | * callback). The size should match after the next buffer swap.
86 | */
87 | public int getWidth() {
88 | if (mWidth < 0) {
89 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
90 | } else {
91 | return mWidth;
92 | }
93 | }
94 |
95 | /**
96 | * Returns the surface's height, in pixels.
97 | */
98 | public int getHeight() {
99 | if (mHeight < 0) {
100 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
101 | } else {
102 | return mHeight;
103 | }
104 | }
105 |
106 | /**
107 | * Release the EGL surface.
108 | */
109 | public void releaseEglSurface() {
110 | mEglCore.releaseSurface(mEGLSurface);
111 | mEGLSurface = EGL14.EGL_NO_SURFACE;
112 | mWidth = mHeight = -1;
113 | }
114 |
115 | /**
116 | * Makes our EGL context and surface current.
117 | */
118 | public void makeCurrent() {
119 | mEglCore.makeCurrent(mEGLSurface);
120 | }
121 |
122 | /**
123 | * Makes our EGL context and surface current for drawing, using the supplied surface
124 | * for reading.
125 | */
126 | public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
127 | mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
128 | }
129 |
130 | /**
131 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
132 | *
133 | * @return false on failure
134 | */
135 | public boolean swapBuffers() {
136 | boolean result = mEglCore.swapBuffers(mEGLSurface);
137 | if (!result) {
138 | Log.d(TAG, "WARNING: swapBuffers() failed");
139 | }
140 | return result;
141 | }
142 |
143 | /**
144 | * Sends the presentation time stamp to EGL.
145 | *
146 | * @param nsecs Timestamp, in nanoseconds.
147 | */
148 | public void setPresentationTime(long nsecs) {
149 | mEglCore.setPresentationTime(mEGLSurface, nsecs);
150 | }
151 |
152 | /**
153 | * Saves the EGL surface to a file.
154 | *
155 | * Expects that this object's EGL surface is current.
156 | */
157 | public void saveFrame(File file) throws IOException {
158 | if (!mEglCore.isCurrent(mEGLSurface)) {
159 | throw new RuntimeException("Expected EGL context/surface is not current");
160 | }
161 |
162 | // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
163 | // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
164 | // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
165 | // Bitmap "copy pixels" method wants the same format GL provides.
166 | //
167 | // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
168 | // here often.
169 | //
170 | // Making this even more interesting is the upside-down nature of GL, which means
171 | // our output will look upside down relative to what appears on screen if the
172 | // typical GL conventions are used.
173 |
174 | String filename = file.toString();
175 |
176 | int width = getWidth();
177 | int height = getHeight();
178 | ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
179 | buf.order(ByteOrder.LITTLE_ENDIAN);
180 | GLES20.glReadPixels(0, 0, width, height,
181 | GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
182 | GLESUtils.checkGlError("glReadPixels");
183 | buf.rewind();
184 |
185 | BufferedOutputStream bos = null;
186 | try {
187 | bos = new BufferedOutputStream(new FileOutputStream(filename));
188 | Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
189 | bmp.copyPixelsFromBuffer(buf);
190 | bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
191 | bmp.recycle();
192 | } finally {
193 | if (bos != null) bos.close();
194 | }
195 | Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/MediaMuxerWrapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * UVCCamera
3 | * library and sample to access to UVC web camera on non-rooted Android device
4 | *
5 | * Copyright (c) 2014-2017 saki t_saki@serenegiant.com
6 | *
7 | * Licensed under the Apache License, Version 2.0 (the "License");
8 | * you may not use this file except in compliance with the License.
9 | * You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | *
19 | * All files in the folder are under this Apache License, Version 2.0.
20 | * Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
21 | * may have a different license, see the respective files.
22 | */
23 |
24 | package com.android.xz.encoder;
25 |
26 | import android.media.MediaCodec;
27 | import android.media.MediaFormat;
28 | import android.media.MediaMuxer;
29 | import android.text.TextUtils;
30 | import android.util.Log;
31 |
32 | import com.android.xz.util.Logs;
33 |
34 | import java.io.File;
35 | import java.io.IOException;
36 | import java.nio.ByteBuffer;
37 |
38 | public class MediaMuxerWrapper {
39 | private static final boolean DEBUG = Logs.issIsLogEnabled(); // TODO set false on release
40 | private static final String TAG = MediaMuxerWrapper.class.getSimpleName();
41 |
42 | private String mOutputPath;
43 | private final MediaMuxer mMediaMuxer; // API >= 18
44 | private int mEncoderCount, mStartedCount;
45 | private boolean mIsStarted;
46 | private MediaEncoder mVideoEncoder, mAudioEncoder;
47 |
48 | /**
49 | * Constructor
50 | *
51 | * @param ext extension of output file
52 | * @throws IOException
53 | */
54 | public MediaMuxerWrapper(String ext, File outputFile) throws IOException {
55 | if (TextUtils.isEmpty(ext)) ext = ".mp4";
56 | try {
57 | mOutputPath = outputFile.toString();
58 | } catch (final NullPointerException e) {
59 | throw new RuntimeException("This app has no permission of writing external storage");
60 | }
61 | mMediaMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
62 | mEncoderCount = mStartedCount = 0;
63 | mIsStarted = false;
64 | }
65 |
66 | public void setOrientationHint(int orientationHint) {
67 | if (mMediaMuxer != null) {
68 | mMediaMuxer.setOrientationHint(orientationHint);
69 | }
70 | }
71 |
72 | public String getOutputPath() {
73 | return mOutputPath;
74 | }
75 |
76 | public void prepare() throws IOException {
77 | if (mVideoEncoder != null)
78 | mVideoEncoder.prepare();
79 | if (mAudioEncoder != null)
80 | mAudioEncoder.prepare();
81 | }
82 |
83 | public void startRecording() {
84 | if (mVideoEncoder != null)
85 | mVideoEncoder.startRecording();
86 | if (mAudioEncoder != null)
87 | mAudioEncoder.startRecording();
88 | }
89 |
90 | public void stopRecording() {
91 | if (mVideoEncoder != null)
92 | mVideoEncoder.stopRecording();
93 | mVideoEncoder = null;
94 | if (mAudioEncoder != null)
95 | mAudioEncoder.stopRecording();
96 | mAudioEncoder = null;
97 | }
98 |
99 | public synchronized boolean isStarted() {
100 | return mIsStarted;
101 | }
102 |
103 | //**********************************************************************
104 | //**********************************************************************
105 |
106 | /**
107 | * assign encoder to this calss. this is called from encoder.
108 | *
109 | * @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
110 | */
111 | /*package*/ void addEncoder(final MediaEncoder encoder) {
112 | if (encoder instanceof MediaVideoEncoder) {
113 | if (mVideoEncoder != null)
114 | throw new IllegalArgumentException("Video encoder already added.");
115 | mVideoEncoder = encoder;
116 | } else if (encoder instanceof MediaSurfaceEncoder) {
117 | if (mVideoEncoder != null)
118 | throw new IllegalArgumentException("Video encoder already added.");
119 | mVideoEncoder = encoder;
120 | } else if (encoder instanceof MediaVideoBufferEncoder) {
121 | if (mVideoEncoder != null)
122 | throw new IllegalArgumentException("Video encoder already added.");
123 | mVideoEncoder = encoder;
124 | } else if (encoder instanceof MediaAudioEncoder) {
125 | if (mAudioEncoder != null)
126 | throw new IllegalArgumentException("Video encoder already added.");
127 | mAudioEncoder = encoder;
128 | } else
129 | throw new IllegalArgumentException("unsupported encoder");
130 | mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0);
131 | }
132 |
133 | /**
134 | * request start recording from encoder
135 | *
136 | * @return true when muxer is ready to write
137 | */
138 | /*package*/
139 | synchronized boolean start() {
140 | if (DEBUG) Log.v(TAG, "start:");
141 | mStartedCount++;
142 | if ((mEncoderCount > 0) && (mStartedCount == mEncoderCount)) {
143 | mMediaMuxer.start();
144 | mIsStarted = true;
145 | notifyAll();
146 | if (DEBUG) Log.v(TAG, "MediaMuxer started:");
147 | }
148 | return mIsStarted;
149 | }
150 |
151 | /**
152 | * request stop recording from encoder when encoder received EOS
153 | */
154 | /*package*/
155 | synchronized void stop() {
156 | if (DEBUG) Log.v(TAG, "stop:mStatredCount=" + mStartedCount);
157 | mStartedCount--;
158 | if ((mEncoderCount > 0) && (mStartedCount <= 0)) {
159 | try {
160 | mMediaMuxer.stop();
161 | } catch (final Exception e) {
162 | Log.w(TAG, e);
163 | }
164 | mIsStarted = false;
165 | if (DEBUG) Log.v(TAG, "MediaMuxer stopped:");
166 | }
167 | }
168 |
169 | /**
170 | * assign encoder to muxer
171 | *
172 | * @param format
173 | * @return minus value indicate error
174 | */
175 | /*package*/
176 | synchronized int addTrack(final MediaFormat format) {
177 | if (mIsStarted)
178 | throw new IllegalStateException("muxer already started");
179 | final int trackIx = mMediaMuxer.addTrack(format);
180 | if (DEBUG)
181 | Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format);
182 | return trackIx;
183 | }
184 |
185 | /**
186 | * write encoded data to muxer
187 | *
188 | * @param trackIndex
189 | * @param byteBuf
190 | * @param bufferInfo
191 | */
192 | /*package*/
193 | synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
194 | if (mStartedCount > 0)
195 | mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/MediaSurfaceEncoder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * UVCCamera
3 | * library and sample to access to UVC web camera on non-rooted Android device
4 | *
5 | * Copyright (c) 2014-2017 saki t_saki@serenegiant.com
6 | *
7 | * Licensed under the Apache License, Version 2.0 (the "License");
8 | * you may not use this file except in compliance with the License.
9 | * You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | *
19 | * All files in the folder are under this Apache License, Version 2.0.
20 | * Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
21 | * may have a different license, see the respective files.
22 | */
23 |
24 | package com.android.xz.encoder;
25 |
26 | import android.media.MediaCodec;
27 | import android.media.MediaCodecInfo;
28 | import android.media.MediaCodecList;
29 | import android.media.MediaFormat;
30 | import android.util.Log;
31 | import android.view.Surface;
32 |
33 | import com.android.xz.util.Logs;
34 |
35 | import java.io.IOException;
36 |
37 | public class MediaSurfaceEncoder extends MediaEncoder implements IVideoEncoder {
38 | private static final boolean DEBUG = Logs.issIsLogEnabled(); // TODO set false on release
39 | private static final String TAG = "MediaSurfaceEncoder";
40 |
41 | private static final String MIME_TYPE = "video/avc";
42 | // parameters for recording
43 | private final int mWidth, mHeight;
44 | private static final int FRAME_RATE = 30;
45 | private static final float BPP = 0.5f;
46 |
47 | private Surface mSurface;
48 |
49 | public MediaSurfaceEncoder(final MediaMuxerWrapper muxer, final int width, final int height, final MediaEncoderListener listener) {
50 | super(muxer, listener);
51 | if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
52 | mWidth = width;
53 | mHeight = height;
54 | }
55 |
56 | public int getWidth() {
57 | return mWidth;
58 | }
59 |
60 | public int getHeight() {
61 | return mHeight;
62 | }
63 |
64 | /**
65 | * Returns the encoder's input surface.
66 | */
67 | public Surface getInputSurface() {
68 | return mSurface;
69 | }
70 |
71 | public void setInputSurface(Surface surface) {
72 | mSurface = surface;
73 | }
74 |
75 | @Override
76 | protected void prepare() throws IOException {
77 | if (DEBUG) Log.i(TAG, "prepare: ");
78 | mTrackIndex = -1;
79 | mMuxerStarted = mIsEOS = false;
80 |
81 | final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
82 | if (videoCodecInfo == null) {
83 | Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
84 | return;
85 | }
86 | if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
87 |
88 | final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
89 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
90 | format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
91 | format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
92 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
93 | if (DEBUG) Log.i(TAG, "format: " + format);
94 |
95 | mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
96 | mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
97 | // get Surface for encoder input
98 | // this method only can call between #configure and #start
99 | mSurface = mMediaCodec.createInputSurface(); // API >= 18
100 | mMediaCodec.start();
101 | if (DEBUG) Log.i(TAG, "prepare finishing");
102 | if (mListener != null) {
103 | mListener.onPrepared(this);
104 | }
105 | }
106 |
107 | @Override
108 | protected void release() {
109 | if (DEBUG) Log.i(TAG, "release:");
110 | if (mSurface != null) {
111 | mSurface.release();
112 | mSurface = null;
113 | }
114 | super.release();
115 | }
116 |
117 | private int calcBitRate() {
118 | final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight / 2);
119 | Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
120 | return bitrate;
121 | }
122 |
123 | /**
124 | * select the first codec that match a specific MIME type
125 | *
126 | * @param mimeType
127 | * @return null if no codec matched
128 | */
129 | protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
130 | if (DEBUG) Log.v(TAG, "selectVideoCodec:");
131 |
132 | // get the list of available codecs
133 | final int numCodecs = MediaCodecList.getCodecCount();
134 | for (int i = 0; i < numCodecs; i++) {
135 | final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
136 |
137 | if (!codecInfo.isEncoder()) { // skipp decoder
138 | continue;
139 | }
140 | // select first codec that match a specific MIME type and color format
141 | final String[] types = codecInfo.getSupportedTypes();
142 | for (int j = 0; j < types.length; j++) {
143 | if (types[j].equalsIgnoreCase(mimeType)) {
144 | if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
145 | final int format = selectColorFormat(codecInfo, mimeType);
146 | if (format > 0) {
147 | return codecInfo;
148 | }
149 | }
150 | }
151 | }
152 | return null;
153 | }
154 |
155 | /**
156 | * select color format available on specific codec and we can use.
157 | *
158 | * @return 0 if no colorFormat is matched
159 | */
160 | protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
161 | if (DEBUG) Log.i(TAG, "selectColorFormat: ");
162 | int result = 0;
163 | final MediaCodecInfo.CodecCapabilities caps;
164 | try {
165 | Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
166 | caps = codecInfo.getCapabilitiesForType(mimeType);
167 | } finally {
168 | Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
169 | }
170 | int colorFormat;
171 | for (int i = 0; i < caps.colorFormats.length; i++) {
172 | colorFormat = caps.colorFormats[i];
173 | if (isRecognizedVideoFormat(colorFormat)) {
174 | if (result == 0)
175 | result = colorFormat;
176 | break;
177 | }
178 | }
179 | if (result == 0)
180 | Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
181 | return result;
182 | }
183 |
184 | /**
185 | * color formats that we can use in this class
186 | */
187 | protected static int[] recognizedFormats;
188 |
189 | static {
190 | recognizedFormats = new int[]{
191 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
192 | };
193 | }
194 |
195 | private static final boolean isRecognizedVideoFormat(final int colorFormat) {
196 | if (DEBUG) Log.i(TAG, "isRecognizedVideoFormat:colorFormat=" + colorFormat);
197 | final int n = recognizedFormats != null ? recognizedFormats.length : 0;
198 | for (int i = 0; i < n; i++) {
199 | if (recognizedFormats[i] == colorFormat) {
200 | return true;
201 | }
202 | }
203 | return false;
204 | }
205 |
206 | }
207 |
--------------------------------------------------------------------------------
7 |
8 |
28 |
29 |
75 |
76 |
77 |
78 | ## FAQ
79 |
80 | ### 1.如何切换预览尺寸
81 |
82 | `ICameraManager`提供了`setPreviewSize(Size size)`接口可以在openCamera之前设置想要的预览尺寸
83 |
84 | ### 2.如何获取预览帧数据
85 |
86 | `ICameraManager`提供了`addPreviewBufferCallback(PreviewBufferCallback previewBufferCallback)`接口可以在回调中获取Camera预览数据,格式为**NV21**
87 |
88 | > 注意:获取的byte[]是可复用的,需要您自行arrayCopy一份使用
89 |
90 | ### 3.如何拍照
91 |
92 | `ICameraManager`提供了`takePicture(PictureBufferCallback pictureCallback)`接口可以在回调中获取拍照数据,格式为**JPG**
93 |
94 |
95 |
96 | ## Blog
97 |
98 | Camera系列
99 |
100 | [Android Camera系列(一):SurfaceView+Camera](https://blog.csdn.net/xiaozhiwz/article/details/141472537)
101 |
102 | [Android Camera系列(二):TextureView+Camera](https://blog.csdn.net/xiaozhiwz/article/details/141855031)
103 |
104 | [Android Camera系列(三):GLSurfaceView+Camera](https://blog.csdn.net/xiaozhiwz/article/details/141860162)
105 |
106 | [Android Camera系列(四):TextureView+OpenGL ES+Camera](https://blog.csdn.net/xiaozhiwz/article/details/142781497)
107 |
108 | [Android Camera系列(五):Camera2](https://blog.csdn.net/xiaozhiwz/article/details/142555345)
109 |
110 | [Android Camera系列(六):MediaCodec视频编码上-编码YUV](https://blog.csdn.net/xiaozhiwz/article/details/143114530)
111 |
112 | [Android Camera系列(七):MediaCodec视频编码中-OpenGL ES多线程渲染](https://blog.csdn.net/xiaozhiwz/article/details/143144103)
113 |
114 | [Android Camera系列(八):MediaCodec视频编码下-OpenGL ES离屏渲染](https://blog.csdn.net/xiaozhiwz/article/details/144508534)
115 |
116 |
117 |
118 | OpenGL ES系列
119 |
120 | [Android OpenGLES开发:EGL环境搭建](https://blog.csdn.net/xiaozhiwz/article/details/141868444)
121 |
122 | [Android OpenGLES2.0开发(一):艰难的开始](https://blog.csdn.net/xiaozhiwz/article/details/142354149)
123 |
124 | [Android OpenGLES2.0开发(二):环境搭建](https://blog.csdn.net/xiaozhiwz/article/details/142366766)
125 |
126 | [Android OpenGLES2.0开发(三):绘制一个三角形](https://blog.csdn.net/xiaozhiwz/article/details/142453506)
127 |
128 | [Android OpenGLES2.0开发(四):矩阵变换和相机投影](https://blog.csdn.net/xiaozhiwz/article/details/142488394)
129 |
130 | [Android OpenGLES2.0开发(五):绘制正方形和圆形](https://blog.csdn.net/xiaozhiwz/article/details/142530158)
131 |
132 | [Android OpenGLES2.0开发(六):着色器语言GLSL](https://blog.csdn.net/xiaozhiwz/article/details/142790866)
133 |
134 | [Android OpenGLES2.0开发(七):纹理贴图之显示图片](https://blog.csdn.net/xiaozhiwz/article/details/142871148)
135 |
136 | [Android OpenGLES2.0开发(八):Camera预览](https://blog.csdn.net/xiaozhiwz/article/details/143239446)
137 |
138 | [Android OpenGLES2.0开发(九):图片滤镜](https://blog.csdn.net/xiaozhiwz/article/details/143847341)
139 |
140 | [Android OpenGLES2.0开发(十):FBO离屏渲染](https://blog.csdn.net/xiaozhiwz/article/details/144554451)
141 |
142 | [Android OpenGLES2.0开发(十一):渲染YUV](https://blog.csdn.net/xiaozhiwz/article/details/142589796)
143 |
144 |
145 |
146 |
147 |
148 | 参考:
149 |
150 | 1. [https://github.com/afei-cn/CameraDemo](https://github.com/afei-cn/CameraDemo)
151 | 2. [https://github.com/saki4510t/UVCCamera](https://github.com/saki4510t/UVCCamera)
152 | 3. [https://github.com/google/grafika](https://github.com/google/grafika)
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/camera/view/base/RenderHandler.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.camera.view.base;
2 |
3 | import android.os.Handler;
4 | import android.os.Message;
5 | import android.util.Log;
6 |
7 | import java.lang.ref.WeakReference;
8 |
9 | /**
10 | * OpenGL ES渲染Camera预览数据线程的通信Handler
11 | *
12 | * @author xiaozhi
13 | * @since 2024/8/22
14 | */
15 | public class RenderHandler extends Handler {
16 |
17 | private static final String TAG = RenderHandler.class.getSimpleName();
18 | private static final int MSG_SURFACE_AVAILABLE = 0;
19 | private static final int MSG_SURFACE_CHANGED = 1;
20 | private static final int MSG_SURFACE_DESTROYED = 2;
21 | private static final int MSG_SHUTDOWN = 3;
22 | private static final int MSG_FRAME_AVAILABLE = 4;
23 | private static final int MSG_ZOOM_VALUE = 5;
24 | private static final int MSG_SIZE_VALUE = 6;
25 | private static final int MSG_ROTATE_VALUE = 7;
26 | private static final int MSG_POSITION = 8;
27 | private static final int MSG_REDRAW = 9;
28 | private static final int MSG_RECORD_STATE = 10;
29 |
30 | // This shouldn't need to be a weak ref, since we'll go away when the Looper quits,
31 | // but no real harm in it.
32 | private WeakReference类名
30 | 功能说明
31 |
32 |
33 |
34 |
37 | CameraSurfaceView
35 | 1.SurfaceView+Camera
36 |
2.包含MediaCodec+Buffer录制视频
38 |
41 | CameraTextureView
39 | TextureView+Camera
40 |
42 |
45 | CameraGLSurfaceView
43 | 1.GLSurfaceView+Camera
44 |
2.MediaCodec+Surface录制视频,多线程共享EGL方式
46 |
49 | CameraGLESSurfaceView
47 | 1.SurfaceView+OpenGL ES+Camera
48 |
2.MediaCodec+Surface录制视频,三种渲染方式:
50 |
53 | CameraGLTextureView
51 | 1.TextureView+OpenGL ES+Camera
52 |
2.MediaCodec+Surface录制视频,三种渲染方式:
54 |
57 | Camera2SurfaceView
55 | SurfaceView+Camera2
56 |
58 |
61 | Camera2TextureView
59 | TextureView+Camera2
60 |
62 |
65 | Camera2GLSurfaceView
63 | 1.GLSurfaceView+Camera2
64 |
2.MediaCodec+Surface录制视频,多线程共享EGL方式
66 |
69 | Camera2GLESSurfaceView
67 | 1.SurfaceView+OpenGL ES+Camera2
68 |
2.MediaCodec+Surface录制视频,三种渲染方式:
70 |
73 |
74 | Camera2GLTextureView
71 | 1.TextureView+OpenGL ES+Camera2
72 |
2.MediaCodec+Surface录制视频,三种渲染方式: