├── .gitignore ├── LICENSE ├── README.md ├── app ├── .gitignore ├── build.gradle ├── proguard-rules.pro └── src │ └── main │ ├── AndroidManifest.xml │ ├── java │ └── com │ │ └── android │ │ └── xz │ │ └── camerademo │ │ ├── CameraActivity.java │ │ ├── MainActivity.java │ │ ├── MediaDisplayActivity.java │ │ ├── base │ │ └── BaseCameraActivity.java │ │ ├── mediacodec_activity │ │ ├── MediaCodecBufferActivity.java │ │ └── MediaCodecSurfaceActivity.java │ │ ├── util │ │ └── ScreenTools.java │ │ └── view │ │ └── CaptureButton.java │ └── res │ ├── drawable-v24 │ └── ic_launcher_foreground.xml │ ├── drawable-xhdpi │ └── ic_switch_camera.png │ ├── drawable-xxhdpi │ └── ic_switch_camera.png │ ├── drawable │ ├── btn_capture_bg.xml │ ├── btn_capture_normal.xml │ ├── btn_capture_pressed.xml │ ├── ic_launcher_background.xml │ ├── img_switch_bg.xml │ ├── img_switch_normal.xml │ ├── img_switch_pressed.xml │ └── tv_timer_bg.xml │ ├── layout │ ├── activity_camera.xml │ ├── activity_display_media.xml │ ├── activity_glessurface_camera.xml │ ├── activity_glessurface_camera2.xml │ ├── activity_glsurface_camera.xml │ ├── activity_glsurface_camera2.xml │ ├── activity_gltexture_camera.xml │ ├── activity_gltexture_camera2.xml │ ├── activity_main.xml │ ├── activity_media_codec_buffer.xml │ ├── activity_media_codec_surface.xml │ ├── activity_surface_camera.xml │ ├── activity_surface_camera2.xml │ ├── activity_texture_camera.xml │ └── activity_texture_camera2.xml │ ├── mipmap-anydpi-v26 │ ├── ic_launcher.xml │ └── ic_launcher_round.xml │ ├── mipmap-hdpi │ ├── ic_launcher.webp │ └── ic_launcher_round.webp │ ├── mipmap-mdpi │ ├── ic_launcher.webp │ └── ic_launcher_round.webp │ ├── mipmap-xhdpi │ ├── ic_launcher.webp │ └── ic_launcher_round.webp │ ├── mipmap-xxhdpi │ ├── ic_launcher.webp │ └── ic_launcher_round.webp │ ├── mipmap-xxxhdpi │ ├── ic_launcher.webp │ └── ic_launcher_round.webp │ ├── values-night │ └── themes.xml │ ├── values │ ├── colors.xml │ ├── strings.xml │ └── themes.xml │ └── xml │ ├── backup_rules.xml │ └── data_extraction_rules.xml ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── img ├── camera.jpg └── index.jpg ├── lib-camera ├── .gitignore ├── build.gradle ├── consumer-rules.pro ├── libs │ ├── arm64-v8a │ │ └── libyuv.so │ └── armeabi-v7a │ │ └── libyuv.so ├── proguard-rules.pro └── src │ └── main │ ├── AndroidManifest.xml │ └── java │ └── com │ └── android │ └── xz │ ├── camera │ ├── Camera2Manager.java │ ├── CameraManager.java │ ├── ICameraManager.java │ ├── YUVFormat.java │ ├── callback │ │ ├── CameraCallback.java │ │ ├── PictureBufferCallback.java │ │ └── PreviewBufferCallback.java │ └── view │ │ ├── Camera2GLESSurfaceView.java │ │ ├── Camera2GLSurfaceView.java │ │ ├── Camera2GLTextureView.java │ │ ├── Camera2SurfaceView.java │ │ ├── Camera2TextureView.java │ │ ├── CameraGLESSurfaceView.java │ │ ├── CameraGLSurfaceView.java │ │ ├── CameraGLTextureView.java │ │ ├── CameraSurfaceView.java │ │ ├── CameraTextureView.java │ │ └── base │ │ ├── BaseCameraView.java │ │ ├── BaseGLESSurfaceView.java │ │ ├── BaseGLSurfaceView.java │ │ ├── BaseGLTextureView.java │ │ ├── BaseSurfaceView.java │ │ ├── BaseTextureView.java │ │ ├── RenderHandler.java │ │ └── RenderThread.java │ ├── encoder │ ├── BufferMovieEncoder.java │ ├── IAudioEncoder.java │ ├── IVideoEncoder.java │ ├── MediaAudioEncoder.java │ ├── MediaEncoder.java │ ├── MediaMuxerWrapper.java │ ├── MediaRecordListener.java │ ├── MediaSurfaceEncoder.java │ ├── MediaVideoBufferEncoder.java │ ├── MediaVideoEncoder.java │ ├── TextureEncoder.java │ ├── TextureMovieEncoder.java │ ├── TextureMovieEncoder1.java │ ├── TextureMovieEncoder2.java │ └── VideoEncoderCore.java │ ├── gles │ ├── EglCore.java │ ├── EglSurfaceBase.java │ ├── GLESUtils.java │ ├── MatrixUtils.java │ ├── WindowSurface.java │ └── filiter │ │ ├── AFilter.java │ │ ├── CameraFilter.java │ │ └── Texture2DFilter.java │ ├── permission │ ├── IPermissionsResult.java │ └── PermissionUtils.java │ └── util │ ├── FileUtils.java │ ├── ImageUtils.java │ ├── Logs.java │ └── YUVUtils.java └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | .gradle 3 | /local.properties 4 | /.idea/caches 5 | /.idea/libraries 6 | /.idea/modules.xml 7 | /.idea/workspace.xml 8 | /.idea/navEditor.xml 9 | /.idea/assetWizardSettings.xml 10 | .DS_Store 11 | /build 12 | /captures 13 | .externalNativeBuild 14 | .cxx 15 | local.properties 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AndroidCamera 2 | 3 | 本项目主要涉及Android开发中Camera的相关操作、预览方式、视频录制等。项目结构简单代码耦合性低,适合学习和使用 4 | 5 |
6 |
7 |
8 |
类名 | 30 |功能说明 | 31 | 32 | 33 |
---|---|
CameraSurfaceView | 35 |1.SurfaceView+Camera 2.包含MediaCodec+Buffer录制视频 |
36 |
CameraTextureView | 39 |TextureView+Camera | 40 |
CameraGLSurfaceView | 43 |1.GLSurfaceView+Camera 2.MediaCodec+Surface录制视频,多线程共享EGL方式 |
44 |
CameraGLESSurfaceView | 47 |1.SurfaceView+OpenGL ES+Camera 2.MediaCodec+Surface录制视频,三种渲染方式:
|
48 |
CameraGLTextureView | 51 |1.TextureView+OpenGL ES+Camera 2.MediaCodec+Surface录制视频,三种渲染方式:
|
52 |
Camera2SurfaceView | 55 |SurfaceView+Camera2 | 56 |
Camera2TextureView | 59 |TextureView+Camera2 | 60 |
Camera2GLSurfaceView | 63 |1.GLSurfaceView+Camera2 2.MediaCodec+Surface录制视频,多线程共享EGL方式 |
64 |
Camera2GLESSurfaceView | 67 |1.SurfaceView+OpenGL ES+Camera2 2.MediaCodec+Surface录制视频,三种渲染方式:
|
68 |
Camera2GLTextureView | 71 |1.TextureView+OpenGL ES+Camera2 2.MediaCodec+Surface录制视频,三种渲染方式:
|
72 |
17 | *
18 | *
19 | * 可在Activity的onCreat()中调用 20 | *
21 | *
22 | *
23 | * 注意:需在顶部控件布局中加入以下属性让内容出现在状态栏之下: 24 | *
25 | * android:clipToPadding="true" // true 会贴近上层布局 ; false 与上层布局有一定间隙 26 | *
27 | * android:fitsSystemWindows="true" //true 会保留actionBar,title,虚拟键的空间 ; false 不保留
28 | *
29 | * @paramactivity activity
30 | */
31 | public static void setTransparentStatusBar(Activity activity) {
32 | // 5.0及以上
33 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
34 | View decorView = activity.getWindow().getDecorView();
35 | int option = View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
36 |
37 | | View.SYSTEM_UI_FLAG_LAYOUT_STABLE;
38 | decorView.setSystemUiVisibility(option);
39 | activity.getWindow().setStatusBarColor(Color.TRANSPARENT);
40 | // 4.4到5.0
41 | } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
42 | WindowManager.LayoutParams localLayoutParams = activity.getWindow().getAttributes();
43 | localLayoutParams.flags = (WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS | localLayoutParams.flags);
44 | }
45 | }
46 |
47 | /**
48 | * 修改状态栏颜色,支持4.4以上版本
49 | *
50 | * @param activity
51 | * @param colorId
52 | */
53 | public static void setStatusBarColor(Activity activity, int colorId) {
54 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
55 | Window window = activity.getWindow();
56 | window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
57 | window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
58 | window.setStatusBarColor(activity.getResources().getColor(colorId));
59 | } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
60 | //使用SystemBarTint库使4.4版本状态栏变色,需要先将状态栏设置为透明
61 | // transparencyBar(activity);
62 | // SystemBarTintManager tintManager = new SystemBarTintManager(activity);
63 | // tintManager.setStatusBarTintEnabled(true);
64 | // tintManager.setStatusBarTintResource(colorId);
65 | }
66 | }
67 |
68 | /**
69 | * Google原生修改状态栏文字颜色
70 | *
71 | * @param activity
72 | * @param dark
73 | */
74 | public static void setAndroidNativeLightStatusBar(Activity activity, boolean dark) {
75 | View decor = activity.getWindow().getDecorView();
76 | if (dark) {
77 | decor.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR);
78 | } else {
79 | decor.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
80 | }
81 | }
82 |
83 | /**
84 | * 获得屏幕高度
85 | *
86 | * @param context
87 | * @return
88 | */
89 | public static int getScreenWidth(Context context) {
90 | WindowManager wm = (WindowManager) context
91 | .getSystemService(Context.WINDOW_SERVICE);
92 | DisplayMetrics outMetrics = new DisplayMetrics();
93 | wm.getDefaultDisplay().getMetrics(outMetrics);
94 | return outMetrics.widthPixels;
95 | }
96 |
97 | /**
98 | * 获得屏幕宽度
99 | *
100 | * @param context
101 | * @return
102 | */
103 | public static int getScreenHeight(Context context) {
104 | WindowManager wm = (WindowManager) context
105 | .getSystemService(Context.WINDOW_SERVICE);
106 | DisplayMetrics outMetrics = new DisplayMetrics();
107 | wm.getDefaultDisplay().getMetrics(outMetrics);
108 | return outMetrics.heightPixels;
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/app/src/main/java/com/android/xz/camerademo/view/CaptureButton.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.camerademo.view;
2 |
3 | import android.animation.AnimatorSet;
4 | import android.animation.ObjectAnimator;
5 | import android.animation.ValueAnimator;
6 | import android.content.Context;
7 | import android.graphics.Canvas;
8 | import android.graphics.Color;
9 | import android.graphics.Paint;
10 | import android.graphics.RectF;
11 | import android.util.AttributeSet;
12 | import android.view.View;
13 | import android.view.animation.DecelerateInterpolator;
14 | import android.view.animation.LinearInterpolator;
15 |
16 | import androidx.annotation.NonNull;
17 | import androidx.annotation.Nullable;
18 |
19 | import com.android.xz.util.Logs;
20 |
21 | public class CaptureButton extends View {
22 |
23 | private static final String TAG = CaptureButton.class.getSimpleName();
24 | private Context mContext;
25 | private int mWidth;
26 | private int mHeight;
27 | private int mBgColor = Color.parseColor("#CCCCCC");
28 | private int mRecordColor = Color.parseColor("#FF0000");
29 | private float mHalfLength;
30 | private float mCircleRadius;
31 | private Paint mBgPaint;
32 | private boolean mRecording = false;
33 | private ObjectAnimator mCaptureAnimator;
34 | private ObjectAnimator mRecordAnimator;
35 | private AnimatorSet mRecordAnimatorSet;
36 |
37 | public CaptureButton(Context context) {
38 | super(context);
39 | init(context);
40 | }
41 |
42 | public CaptureButton(Context context, @Nullable AttributeSet attrs) {
43 | super(context, attrs);
44 | init(context);
45 | }
46 |
47 | public CaptureButton(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
48 | super(context, attrs, defStyleAttr);
49 | init(context);
50 | }
51 |
52 | public CaptureButton(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) {
53 | super(context, attrs, defStyleAttr, defStyleRes);
54 | init(context);
55 | }
56 |
57 | private void init(Context context) {
58 | mContext = context;
59 | mBgPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
60 | mBgPaint.setColor(mBgColor);
61 | mBgPaint.setStrokeWidth(6);
62 |
63 | setOnClickListener(onClickListener);
64 | setLongClickable(true);
65 | setOnLongClickListener(onLongClickListener);
66 | }
67 |
68 | public void setHalfLength(float halfLength) {
69 | mHalfLength = halfLength;
70 | invalidate();
71 | }
72 |
73 | public void setCircleRadius(float circleRadius) {
74 | mCircleRadius = circleRadius;
75 | invalidate();
76 | }
77 |
78 | @Override
79 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
80 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
81 | int width = MeasureSpec.getSize(widthMeasureSpec);
82 | int height = MeasureSpec.getSize(heightMeasureSpec);
83 |
84 | if (width > height) {
85 | setMeasuredDimension(height, height);
86 | mWidth = height;
87 | mHeight = height;
88 | } else {
89 | setMeasuredDimension(width, width);
90 | mWidth = width;
91 | mHeight = width;
92 | }
93 | Logs.i(TAG, "width:" + width);
94 | setCircleRadius(mWidth * 9 / 10 / 2);
95 | }
96 |
97 | public void stopRecord() {
98 | Logs.i(TAG, "stopRecord...");
99 | if (mRecording) {
100 | mRecording = false;
101 | setCircleRadius(mWidth * 9 / 10 / 2);
102 | invalidate();
103 | }
104 | }
105 |
106 | @Override
107 | protected void onDraw(@NonNull Canvas canvas) {
108 | super.onDraw(canvas);
109 | mBgPaint.setColor(mBgColor);
110 | int centerX = mWidth / 2;
111 | int centerY = mHeight / 2;
112 | int radius = mWidth / 2;
113 | canvas.drawCircle(centerX, centerY, radius, mBgPaint);
114 |
115 | mBgPaint.setColor(mRecordColor);
116 | canvas.drawRoundRect(new RectF(centerX - mHalfLength, centerY - mHalfLength, centerX + mHalfLength, centerY + mHalfLength), 6, 6, mBgPaint);
117 | mBgPaint.setColor(Color.WHITE);
118 | canvas.drawCircle(centerX, centerY, mCircleRadius, mBgPaint);
119 | }
120 |
121 | private View.OnClickListener onClickListener = new View.OnClickListener() {
122 | @Override
123 | public void onClick(View v) {
124 | if (mRecording) {
125 | stopRecord();
126 | if (mClickListener != null) {
127 | mClickListener.onStopRecord();
128 | }
129 | } else {
130 | if (mClickListener != null) {
131 | mClickListener.onCapture();
132 | }
133 | }
134 | }
135 | };
136 |
137 | private View.OnLongClickListener onLongClickListener = new OnLongClickListener() {
138 | @Override
139 | public boolean onLongClick(View v) {
140 | if (!mRecording) {
141 | mRecording = true;
142 | if (mCaptureAnimator == null) {
143 | ObjectAnimator animator = ObjectAnimator.ofFloat(CaptureButton.this, "circleRadius", mWidth * 9 / 10 / 2, 0);
144 | animator.setInterpolator(new DecelerateInterpolator());
145 | animator.setDuration(100);
146 | ((ValueAnimator) animator).addUpdateListener(animation -> setCircleRadius((float) animation.getAnimatedValue()));
147 | mCaptureAnimator = animator;
148 | }
149 | if (mRecordAnimator == null) {
150 | ObjectAnimator animator = ObjectAnimator.ofFloat(CaptureButton.this, "halfLength", 0, mWidth / 6);
151 | animator.setInterpolator(new DecelerateInterpolator());
152 | animator.setDuration(100);
153 | ((ValueAnimator) animator).addUpdateListener(animation -> setHalfLength((float) animation.getAnimatedValue()));
154 | mRecordAnimator = animator;
155 | }
156 | if (mRecordAnimatorSet == null) {
157 | mRecordAnimatorSet = new AnimatorSet();
158 | }
159 | mRecordAnimatorSet.playSequentially(mCaptureAnimator, mRecordAnimator);
160 | mRecordAnimatorSet.start();
161 | if (mClickListener != null) {
162 | mClickListener.onStartRecord();
163 | }
164 | }
165 | return true;
166 | }
167 | };
168 |
169 | public void setClickListener(ClickListener clickListener) {
170 | mClickListener = clickListener;
171 | }
172 |
173 | private ClickListener mClickListener;
174 |
175 | public interface ClickListener {
176 | void onCapture();
177 |
178 | void onStartRecord();
179 |
180 | void onStopRecord();
181 | }
182 | }
183 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
47 | * The flag tells the caller whether or not it can expect a surfaceChanged() to
48 | * arrive very soon.
49 | *
50 | * Call from UI thread.
51 | */
52 | public void sendSurfaceAvailable(Object surface) {
53 | sendMessage(obtainMessage(MSG_SURFACE_AVAILABLE, 0, 0, surface));
54 | }
55 |
56 | /**
57 | * Sends the "surface changed" message, forwarding what we got from the SurfaceHolder.
58 | *
59 | * Call from UI thread.
60 | */
61 | public void sendSurfaceChanged(int format, int width,
62 | int height) {
63 | // ignore format
64 | sendMessage(obtainMessage(MSG_SURFACE_CHANGED, width, height));
65 | }
66 |
67 | /**
68 | * Sends the "shutdown" message, which tells the render thread to halt.
69 | *
70 | * Call from UI thread.
71 | */
72 | public void sendSurfaceDestroyed() {
73 | sendMessage(obtainMessage(MSG_SURFACE_DESTROYED));
74 | }
75 |
76 | /**
77 | * Sends the "shutdown" message, which tells the render thread to halt.
78 | *
79 | * Call from UI thread.
80 | */
81 | public void sendShutdown() {
82 | sendMessage(obtainMessage(MSG_SHUTDOWN));
83 | }
84 |
85 | /**
86 | * Sends the "frame available" message.
87 | *
88 | * Call from UI thread.
89 | */
90 | public void sendFrameAvailable() {
91 | sendMessage(obtainMessage(MSG_FRAME_AVAILABLE));
92 | }
93 |
94 | /**
95 | * Sends the "rotation" message.
96 | *
97 | * Call from UI thread.
98 | */
99 | public void sendRotate(int rotation, int cameraId) {
100 | sendMessage(obtainMessage(MSG_ROTATE_VALUE, rotation, cameraId));
101 | }
102 |
103 | /**
104 | * Sends the "preview size" message.
105 | *
106 | * Call from UI thread.
107 | */
108 | public void sendPreviewSize(int width, int height) {
109 | sendMessage(obtainMessage(MSG_SIZE_VALUE, width, height));
110 | }
111 |
112 | public void sendRecordState(boolean state) {
113 | sendMessage(obtainMessage(MSG_RECORD_STATE, state));
114 | }
115 |
116 | @Override // runs on RenderThread
117 | public void handleMessage(Message msg) {
118 | int what = msg.what;
119 | //Log.d(TAG, "RenderHandler [" + this + "]: what=" + what);
120 |
121 | RenderThread renderThread = mWeakRenderThread.get();
122 | if (renderThread == null) {
123 | Log.w(TAG, "RenderHandler.handleMessage: weak ref is null");
124 | return;
125 | }
126 |
127 | switch (what) {
128 | case MSG_SURFACE_AVAILABLE:
129 | renderThread.surfaceAvailable(msg.obj);
130 | break;
131 | case MSG_SURFACE_CHANGED:
132 | renderThread.surfaceChanged(msg.arg1, msg.arg2);
133 | break;
134 | case MSG_SURFACE_DESTROYED:
135 | renderThread.surfaceDestroyed();
136 | break;
137 | case MSG_SHUTDOWN:
138 | renderThread.shutdown();
139 | break;
140 | case MSG_FRAME_AVAILABLE:
141 | renderThread.frameAvailable();
142 | break;
143 | case MSG_SIZE_VALUE:
144 | renderThread.setCameraPreviewSize(msg.arg1, msg.arg2);
145 | break;
146 | case MSG_ROTATE_VALUE:
147 | // renderThread.setRotate(msg.arg1, msg.arg2);
148 | break;
149 | case MSG_RECORD_STATE:
150 | renderThread.changeRecordingState((boolean) msg.obj);
151 | break;
152 | default:
153 | throw new RuntimeException("unknown message " + what);
154 | }
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/BufferMovieEncoder.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.encoder;
2 |
3 | import android.content.Context;
4 | import android.media.MediaCodecInfo;
5 | import android.os.Handler;
6 | import android.util.Log;
7 | import android.util.Size;
8 |
9 | import com.android.xz.camera.YUVFormat;
10 | import com.android.xz.util.ImageUtils;
11 | import com.android.xz.util.Logs;
12 | import com.android.xz.util.YUVUtils;
13 |
14 | import java.io.File;
15 | import java.nio.ByteBuffer;
16 | import java.util.Date;
17 |
18 | /**
19 | * 对Camera预览NV21数据编码
20 | *
21 | * @author xiaozhi
22 | * @since 2024/8/30
23 | */
24 | public class BufferMovieEncoder {
25 | private static final String TAG = BufferMovieEncoder.class.getSimpleName();
26 | private MediaVideoBufferEncoder mEncoder;
27 | private MediaMuxerWrapper mMuxerWrapper;
28 | private MediaRecordListener mRecordListener;
29 | private Context mContext;
30 | private Handler mUIHandler;
31 | private byte[] mTempData;
32 | private Size mSize;
33 | private int mOrientation;
34 |
35 | public BufferMovieEncoder(Context context) {
36 | mContext = context;
37 | mUIHandler = new Handler(mContext.getMainLooper());
38 | }
39 |
40 | public void setRecordListener(MediaRecordListener recordListener) {
41 | mRecordListener = recordListener;
42 | }
43 |
44 | /**
45 | * 开始录制
46 | *
47 | * @param orientation 编码数据方向
48 | * @param size 编码视频预览尺寸,通Camera的预览尺寸
49 | */
50 | public void startRecord(int orientation, Size size) {
51 | mOrientation = orientation;
52 | mSize = size;
53 | try {
54 | if (mMuxerWrapper != null) return;
55 | if (mTempData == null) {
56 | mTempData = new byte[mSize.getWidth() * mSize.getHeight() * 3 / 2];
57 | }
58 | String name = "VID_" + ImageUtils.DATE_FORMAT.format(new Date(System.currentTimeMillis())) + ".mp4";
59 | File outputFile = new File(ImageUtils.getVideoPath(), name);
60 |
61 | final MediaMuxerWrapper muxerWrapper = new MediaMuxerWrapper(".mp4", outputFile);
62 | muxerWrapper.setOrientationHint(mOrientation);
63 | new MediaVideoBufferEncoder(muxerWrapper, mSize.getWidth(), mSize.getHeight(), new MediaEncoder.MediaEncoderListener() {
64 | String path;
65 | @Override
66 | public void onPrepared(MediaEncoder encoder) {
67 | Logs.i(TAG, "onPrepared.");
68 | mEncoder = (MediaVideoBufferEncoder) encoder;
69 | path = mEncoder.getOutputPath();
70 | if (mRecordListener != null) {
71 | mRecordListener.onStart();
72 | }
73 | }
74 |
75 | @Override
76 | public void onStopped(MediaEncoder encoder) {
77 | Logs.i(TAG, "onStopped");
78 | mUIHandler.post(() -> {
79 | if (mRecordListener != null) {
80 | mRecordListener.onStopped(path);
81 | }
82 | });
83 | }
84 | });
85 | // for audio capturing
86 | new MediaAudioEncoder(mContext, muxerWrapper, new MediaEncoder.MediaEncoderListener() {
87 | @Override
88 | public void onPrepared(MediaEncoder encoder) {
89 |
90 | }
91 |
92 | @Override
93 | public void onStopped(MediaEncoder encoder) {
94 |
95 | }
96 | });
97 |
98 | muxerWrapper.prepare();
99 | muxerWrapper.startRecording();
100 | mMuxerWrapper = muxerWrapper;
101 | } catch (Exception e) {
102 | e.printStackTrace();
103 | }
104 | }
105 |
106 | /**
107 | * 停止录制
108 | */
109 | public void stopRecord() {
110 | final MediaMuxerWrapper muxerWrapper = mMuxerWrapper;
111 | mMuxerWrapper = null;
112 | mEncoder = null;
113 | if (muxerWrapper != null) {
114 | muxerWrapper.stopRecording();
115 | }
116 | }
117 |
118 | /**
119 | * 编码数据
120 | *
121 | * @param data nv21
122 | */
123 | public void encode(byte[] data) {
124 | encode(data, YUVFormat.NV21);
125 | }
126 |
127 | /**
128 | * 编码数据
129 | *
130 | * @param data YUV420
131 | */
132 | public void encode(byte[] data, YUVFormat yuvFormat) {
133 | if (mEncoder != null) {
134 | int mColorFormat = mEncoder.getColorFormat();
135 | byte[] encodeData = null;
136 | long start = System.currentTimeMillis();
137 | if (mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar
138 | || mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar) { // 19, 20:I420
139 | if (yuvFormat == YUVFormat.NV21) {
140 | YUVUtils.nativeNV21ToI420(data, mSize.getWidth(), mSize.getHeight(), mTempData);
141 | encodeData = mTempData;
142 | } else {
143 | encodeData = data;
144 | }
145 | } else if (mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar
146 | || mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar
147 | || mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar) { // 21, 39:NV12
148 | // 使用C层转换最快
149 | if (yuvFormat == YUVFormat.NV21) {
150 | YUVUtils.nativeNV21ToNV12(data, mSize.getWidth(), mSize.getHeight(), mTempData);
151 | } else {
152 | YUVUtils.nativeI420ToNV12(data, mSize.getWidth(), mSize.getHeight(), mTempData);
153 | }
154 | encodeData = mTempData;
155 | } else if (mColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar) {// 2141391872:NV21
156 | if (yuvFormat == YUVFormat.NV21) {
157 | encodeData = data;
158 | } else {
159 | YUVUtils.nativeI420ToNV21(data, mSize.getWidth(), mSize.getHeight(), mTempData);
160 | encodeData = mTempData;
161 | }
162 | }
163 | // Log.i(TAG, "耗时:" + (System.currentTimeMillis() - start) + "ms");
164 | mEncoder.frameAvailableSoon();
165 | ByteBuffer buffer = ByteBuffer.wrap(encodeData);
166 | mEncoder.encode(buffer);
167 | }
168 | }
169 | }
170 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/IAudioEncoder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * UVCCamera
3 | * library and sample to access to UVC web camera on non-rooted Android device
4 | *
5 | * Copyright (c) 2014-2017 saki t_saki@serenegiant.com
6 | *
7 | * Licensed under the Apache License, Version 2.0 (the "License");
8 | * you may not use this file except in compliance with the License.
9 | * You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | *
19 | * All files in the folder are under this Apache License, Version 2.0.
20 | * Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
21 | * may have a different license, see the respective files.
22 | */
23 |
24 | package com.android.xz.encoder;
25 |
26 | public interface IAudioEncoder {
27 | }
28 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/IVideoEncoder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * UVCCamera
3 | * library and sample to access to UVC web camera on non-rooted Android device
4 | *
5 | * Copyright (c) 2014-2017 saki t_saki@serenegiant.com
6 | *
7 | * Licensed under the Apache License, Version 2.0 (the "License");
8 | * you may not use this file except in compliance with the License.
9 | * You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | *
19 | * All files in the folder are under this Apache License, Version 2.0.
20 | * Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
21 | * may have a different license, see the respective files.
22 | */
23 |
24 | package com.android.xz.encoder;
25 |
26 | public interface IVideoEncoder {
27 | public boolean frameAvailableSoon();
28 | }
29 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/MediaMuxerWrapper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * UVCCamera
3 | * library and sample to access to UVC web camera on non-rooted Android device
4 | *
5 | * Copyright (c) 2014-2017 saki t_saki@serenegiant.com
6 | *
7 | * Licensed under the Apache License, Version 2.0 (the "License");
8 | * you may not use this file except in compliance with the License.
9 | * You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | *
19 | * All files in the folder are under this Apache License, Version 2.0.
20 | * Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
21 | * may have a different license, see the respective files.
22 | */
23 |
24 | package com.android.xz.encoder;
25 |
26 | import android.media.MediaCodec;
27 | import android.media.MediaFormat;
28 | import android.media.MediaMuxer;
29 | import android.text.TextUtils;
30 | import android.util.Log;
31 |
32 | import com.android.xz.util.Logs;
33 |
34 | import java.io.File;
35 | import java.io.IOException;
36 | import java.nio.ByteBuffer;
37 |
38 | public class MediaMuxerWrapper {
39 | private static final boolean DEBUG = Logs.issIsLogEnabled(); // TODO set false on release
40 | private static final String TAG = MediaMuxerWrapper.class.getSimpleName();
41 |
42 | private String mOutputPath;
43 | private final MediaMuxer mMediaMuxer; // API >= 18
44 | private int mEncoderCount, mStartedCount;
45 | private boolean mIsStarted;
46 | private MediaEncoder mVideoEncoder, mAudioEncoder;
47 |
48 | /**
49 | * Constructor
50 | *
51 | * @param ext extension of output file
52 | * @throws IOException
53 | */
54 | public MediaMuxerWrapper(String ext, File outputFile) throws IOException {
55 | if (TextUtils.isEmpty(ext)) ext = ".mp4";
56 | try {
57 | mOutputPath = outputFile.toString();
58 | } catch (final NullPointerException e) {
59 | throw new RuntimeException("This app has no permission of writing external storage");
60 | }
61 | mMediaMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
62 | mEncoderCount = mStartedCount = 0;
63 | mIsStarted = false;
64 | }
65 |
66 | public void setOrientationHint(int orientationHint) {
67 | if (mMediaMuxer != null) {
68 | mMediaMuxer.setOrientationHint(orientationHint);
69 | }
70 | }
71 |
72 | public String getOutputPath() {
73 | return mOutputPath;
74 | }
75 |
76 | public void prepare() throws IOException {
77 | if (mVideoEncoder != null)
78 | mVideoEncoder.prepare();
79 | if (mAudioEncoder != null)
80 | mAudioEncoder.prepare();
81 | }
82 |
83 | public void startRecording() {
84 | if (mVideoEncoder != null)
85 | mVideoEncoder.startRecording();
86 | if (mAudioEncoder != null)
87 | mAudioEncoder.startRecording();
88 | }
89 |
90 | public void stopRecording() {
91 | if (mVideoEncoder != null)
92 | mVideoEncoder.stopRecording();
93 | mVideoEncoder = null;
94 | if (mAudioEncoder != null)
95 | mAudioEncoder.stopRecording();
96 | mAudioEncoder = null;
97 | }
98 |
99 | public synchronized boolean isStarted() {
100 | return mIsStarted;
101 | }
102 |
103 | //**********************************************************************
104 | //**********************************************************************
105 |
106 | /**
107 | * assign encoder to this calss. this is called from encoder.
108 | *
109 | * @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
110 | */
111 | /*package*/ void addEncoder(final MediaEncoder encoder) {
112 | if (encoder instanceof MediaVideoEncoder) {
113 | if (mVideoEncoder != null)
114 | throw new IllegalArgumentException("Video encoder already added.");
115 | mVideoEncoder = encoder;
116 | } else if (encoder instanceof MediaSurfaceEncoder) {
117 | if (mVideoEncoder != null)
118 | throw new IllegalArgumentException("Video encoder already added.");
119 | mVideoEncoder = encoder;
120 | } else if (encoder instanceof MediaVideoBufferEncoder) {
121 | if (mVideoEncoder != null)
122 | throw new IllegalArgumentException("Video encoder already added.");
123 | mVideoEncoder = encoder;
124 | } else if (encoder instanceof MediaAudioEncoder) {
125 | if (mAudioEncoder != null)
126 | throw new IllegalArgumentException("Video encoder already added.");
127 | mAudioEncoder = encoder;
128 | } else
129 | throw new IllegalArgumentException("unsupported encoder");
130 | mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0);
131 | }
132 |
133 | /**
134 | * request start recording from encoder
135 | *
136 | * @return true when muxer is ready to write
137 | */
138 | /*package*/
139 | synchronized boolean start() {
140 | if (DEBUG) Log.v(TAG, "start:");
141 | mStartedCount++;
142 | if ((mEncoderCount > 0) && (mStartedCount == mEncoderCount)) {
143 | mMediaMuxer.start();
144 | mIsStarted = true;
145 | notifyAll();
146 | if (DEBUG) Log.v(TAG, "MediaMuxer started:");
147 | }
148 | return mIsStarted;
149 | }
150 |
151 | /**
152 | * request stop recording from encoder when encoder received EOS
153 | */
154 | /*package*/
155 | synchronized void stop() {
156 | if (DEBUG) Log.v(TAG, "stop:mStatredCount=" + mStartedCount);
157 | mStartedCount--;
158 | if ((mEncoderCount > 0) && (mStartedCount <= 0)) {
159 | try {
160 | mMediaMuxer.stop();
161 | } catch (final Exception e) {
162 | Log.w(TAG, e);
163 | }
164 | mIsStarted = false;
165 | if (DEBUG) Log.v(TAG, "MediaMuxer stopped:");
166 | }
167 | }
168 |
169 | /**
170 | * assign encoder to muxer
171 | *
172 | * @param format
173 | * @return minus value indicate error
174 | */
175 | /*package*/
176 | synchronized int addTrack(final MediaFormat format) {
177 | if (mIsStarted)
178 | throw new IllegalStateException("muxer already started");
179 | final int trackIx = mMediaMuxer.addTrack(format);
180 | if (DEBUG)
181 | Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format);
182 | return trackIx;
183 | }
184 |
185 | /**
186 | * write encoded data to muxer
187 | *
188 | * @param trackIndex
189 | * @param byteBuf
190 | * @param bufferInfo
191 | */
192 | /*package*/
193 | synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
194 | if (mStartedCount > 0)
195 | mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/MediaRecordListener.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.encoder;
2 |
3 | public interface MediaRecordListener {
4 | void onStart();
5 |
6 | void onStopped(String videoPath);
7 | }
8 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/MediaSurfaceEncoder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * UVCCamera
3 | * library and sample to access to UVC web camera on non-rooted Android device
4 | *
5 | * Copyright (c) 2014-2017 saki t_saki@serenegiant.com
6 | *
7 | * Licensed under the Apache License, Version 2.0 (the "License");
8 | * you may not use this file except in compliance with the License.
9 | * You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | *
19 | * All files in the folder are under this Apache License, Version 2.0.
20 | * Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
21 | * may have a different license, see the respective files.
22 | */
23 |
24 | package com.android.xz.encoder;
25 |
26 | import android.media.MediaCodec;
27 | import android.media.MediaCodecInfo;
28 | import android.media.MediaCodecList;
29 | import android.media.MediaFormat;
30 | import android.util.Log;
31 | import android.view.Surface;
32 |
33 | import com.android.xz.util.Logs;
34 |
35 | import java.io.IOException;
36 |
37 | public class MediaSurfaceEncoder extends MediaEncoder implements IVideoEncoder {
38 | private static final boolean DEBUG = Logs.issIsLogEnabled(); // TODO set false on release
39 | private static final String TAG = "MediaSurfaceEncoder";
40 |
41 | private static final String MIME_TYPE = "video/avc";
42 | // parameters for recording
43 | private final int mWidth, mHeight;
44 | private static final int FRAME_RATE = 30;
45 | private static final float BPP = 0.5f;
46 |
47 | private Surface mSurface;
48 |
49 | public MediaSurfaceEncoder(final MediaMuxerWrapper muxer, final int width, final int height, final MediaEncoderListener listener) {
50 | super(muxer, listener);
51 | if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
52 | mWidth = width;
53 | mHeight = height;
54 | }
55 |
56 | public int getWidth() {
57 | return mWidth;
58 | }
59 |
60 | public int getHeight() {
61 | return mHeight;
62 | }
63 |
64 | /**
65 | * Returns the encoder's input surface.
66 | */
67 | public Surface getInputSurface() {
68 | return mSurface;
69 | }
70 |
71 | public void setInputSurface(Surface surface) {
72 | mSurface = surface;
73 | }
74 |
75 | @Override
76 | protected void prepare() throws IOException {
77 | if (DEBUG) Log.i(TAG, "prepare: ");
78 | mTrackIndex = -1;
79 | mMuxerStarted = mIsEOS = false;
80 |
81 | final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
82 | if (videoCodecInfo == null) {
83 | Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
84 | return;
85 | }
86 | if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
87 |
88 | final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
89 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
90 | format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
91 | format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
92 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
93 | if (DEBUG) Log.i(TAG, "format: " + format);
94 |
95 | mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
96 | mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
97 | // get Surface for encoder input
98 | // this method only can call between #configure and #start
99 | mSurface = mMediaCodec.createInputSurface(); // API >= 18
100 | mMediaCodec.start();
101 | if (DEBUG) Log.i(TAG, "prepare finishing");
102 | if (mListener != null) {
103 | mListener.onPrepared(this);
104 | }
105 | }
106 |
107 | @Override
108 | protected void release() {
109 | if (DEBUG) Log.i(TAG, "release:");
110 | if (mSurface != null) {
111 | mSurface.release();
112 | mSurface = null;
113 | }
114 | super.release();
115 | }
116 |
117 | private int calcBitRate() {
118 | final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight / 2);
119 | Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
120 | return bitrate;
121 | }
122 |
123 | /**
124 | * select the first codec that match a specific MIME type
125 | *
126 | * @param mimeType
127 | * @return null if no codec matched
128 | */
129 | protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
130 | if (DEBUG) Log.v(TAG, "selectVideoCodec:");
131 |
132 | // get the list of available codecs
133 | final int numCodecs = MediaCodecList.getCodecCount();
134 | for (int i = 0; i < numCodecs; i++) {
135 | final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
136 |
137 | if (!codecInfo.isEncoder()) { // skipp decoder
138 | continue;
139 | }
140 | // select first codec that match a specific MIME type and color format
141 | final String[] types = codecInfo.getSupportedTypes();
142 | for (int j = 0; j < types.length; j++) {
143 | if (types[j].equalsIgnoreCase(mimeType)) {
144 | if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
145 | final int format = selectColorFormat(codecInfo, mimeType);
146 | if (format > 0) {
147 | return codecInfo;
148 | }
149 | }
150 | }
151 | }
152 | return null;
153 | }
154 |
155 | /**
156 | * select color format available on specific codec and we can use.
157 | *
158 | * @return 0 if no colorFormat is matched
159 | */
160 | protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
161 | if (DEBUG) Log.i(TAG, "selectColorFormat: ");
162 | int result = 0;
163 | final MediaCodecInfo.CodecCapabilities caps;
164 | try {
165 | Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
166 | caps = codecInfo.getCapabilitiesForType(mimeType);
167 | } finally {
168 | Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
169 | }
170 | int colorFormat;
171 | for (int i = 0; i < caps.colorFormats.length; i++) {
172 | colorFormat = caps.colorFormats[i];
173 | if (isRecognizedVideoFormat(colorFormat)) {
174 | if (result == 0)
175 | result = colorFormat;
176 | break;
177 | }
178 | }
179 | if (result == 0)
180 | Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
181 | return result;
182 | }
183 |
184 | /**
185 | * color formats that we can use in this class
186 | */
187 | protected static int[] recognizedFormats;
188 |
189 | static {
190 | recognizedFormats = new int[]{
191 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
192 | };
193 | }
194 |
195 | private static final boolean isRecognizedVideoFormat(final int colorFormat) {
196 | if (DEBUG) Log.i(TAG, "isRecognizedVideoFormat:colorFormat=" + colorFormat);
197 | final int n = recognizedFormats != null ? recognizedFormats.length : 0;
198 | for (int i = 0; i < n; i++) {
199 | if (recognizedFormats[i] == colorFormat) {
200 | return true;
201 | }
202 | }
203 | return false;
204 | }
205 |
206 | }
207 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/encoder/TextureEncoder.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.encoder;
2 |
3 | import android.content.Context;
4 | import android.graphics.SurfaceTexture;
5 | import android.opengl.EGLContext;
6 | import android.os.Handler;
7 |
8 | import java.io.File;
9 |
10 | public abstract class TextureEncoder {
11 |
12 | protected static final int MSG_START_RECORDING = 0;
13 | protected static final int MSG_STOP_RECORDING = 1;
14 | protected static final int MSG_FRAME_AVAILABLE = 2;
15 | protected static final int MSG_SET_TEXTURE_ID = 3;
16 | protected static final int MSG_UPDATE_SHARED_CONTEXT = 4;
17 | protected static final int MSG_QUIT = 5;
18 |
19 | protected Context mContext;
20 | protected Handler mUIHandler;
21 | protected MediaRecordListener mRecordListener;
22 |
23 | public TextureEncoder(Context context) {
24 | mContext = context;
25 | mUIHandler = new Handler(mContext.getMainLooper());
26 | }
27 |
28 | /**
29 | * Encoder configuration.
30 | *
31 | * Object is immutable, which means we can safely pass it between threads without
32 | * explicit synchronization (and don't need to worry about it getting tweaked out from
33 | * under us).
34 | *
35 | * TODO: make frame rate and iframe interval configurable? Maybe use builder pattern
36 | * with reasonable defaults for those and bit rate.
37 | */
38 | public static class EncoderConfig {
39 | final File mOutputFile;
40 | final int mWidth;
41 | final int mHeight;
42 | final int mBitRate;
43 | final EGLContext mEglContext;
44 |
45 | public EncoderConfig(File outputFile, int width, int height, int bitRate,
46 | EGLContext sharedEglContext) {
47 | mOutputFile = outputFile;
48 | mWidth = width;
49 | mHeight = height;
50 | mBitRate = bitRate;
51 | mEglContext = sharedEglContext;
52 | }
53 |
54 | @Override
55 | public String toString() {
56 | return "EncoderConfig: " + mWidth + "x" + mHeight + " @" + mBitRate +
57 | " to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
58 | }
59 | }
60 |
61 | public abstract void setRecordListener(MediaRecordListener recordListener);
62 |
63 | public abstract void startRecord(EncoderConfig config);
64 |
65 | public abstract void stopRecord();
66 |
67 | public abstract boolean isRecording();
68 |
69 | public abstract void updateSharedContext(EGLContext sharedContext);
70 |
71 | public abstract void frameAvailable(SurfaceTexture st);
72 |
73 | public abstract void setTextureId(int id);
74 | }
75 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/gles/EglSurfaceBase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.android.xz.gles;
18 |
19 | import android.graphics.Bitmap;
20 | import android.opengl.EGL14;
21 | import android.opengl.EGLSurface;
22 | import android.opengl.GLES20;
23 | import android.util.Log;
24 |
25 | import java.io.BufferedOutputStream;
26 | import java.io.File;
27 | import java.io.FileOutputStream;
28 | import java.io.IOException;
29 | import java.nio.ByteBuffer;
30 | import java.nio.ByteOrder;
31 |
32 | /**
33 | * Common base class for EGL surfaces.
34 | *
35 | * There can be multiple surfaces associated with a single context.
36 | */
37 | public class EglSurfaceBase {
38 | protected static final String TAG = EglSurfaceBase.class.getSimpleName();
39 |
40 | // EglCore object we're associated with. It may be associated with multiple surfaces.
41 | protected EglCore mEglCore;
42 |
43 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
44 | private int mWidth = -1;
45 | private int mHeight = -1;
46 |
47 | protected EglSurfaceBase(EglCore eglCore) {
48 | mEglCore = eglCore;
49 | }
50 |
51 | /**
52 | * Creates a window surface.
53 | *
54 | * @param surface May be a Surface or SurfaceTexture.
55 | */
56 | public void createWindowSurface(Object surface) {
57 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
58 | throw new IllegalStateException("surface already created");
59 | }
60 | mEGLSurface = mEglCore.createWindowSurface(surface);
61 |
62 | // Don't cache width/height here, because the size of the underlying surface can change
63 | // out from under us (see e.g. HardwareScalerActivity).
64 | //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
65 | //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
66 | }
67 |
68 | /**
69 | * Creates an off-screen surface.
70 | */
71 | public void createOffscreenSurface(int width, int height) {
72 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
73 | throw new IllegalStateException("surface already created");
74 | }
75 | mEGLSurface = mEglCore.createOffscreenSurface(width, height);
76 | mWidth = width;
77 | mHeight = height;
78 | }
79 |
80 | /**
81 | * Returns the surface's width, in pixels.
82 | *
83 | * If this is called on a window surface, and the underlying surface is in the process
84 | * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
85 | * callback). The size should match after the next buffer swap.
86 | */
87 | public int getWidth() {
88 | if (mWidth < 0) {
89 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
90 | } else {
91 | return mWidth;
92 | }
93 | }
94 |
95 | /**
96 | * Returns the surface's height, in pixels.
97 | */
98 | public int getHeight() {
99 | if (mHeight < 0) {
100 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
101 | } else {
102 | return mHeight;
103 | }
104 | }
105 |
106 | /**
107 | * Release the EGL surface.
108 | */
109 | public void releaseEglSurface() {
110 | mEglCore.releaseSurface(mEGLSurface);
111 | mEGLSurface = EGL14.EGL_NO_SURFACE;
112 | mWidth = mHeight = -1;
113 | }
114 |
115 | /**
116 | * Makes our EGL context and surface current.
117 | */
118 | public void makeCurrent() {
119 | mEglCore.makeCurrent(mEGLSurface);
120 | }
121 |
122 | /**
123 | * Makes our EGL context and surface current for drawing, using the supplied surface
124 | * for reading.
125 | */
126 | public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
127 | mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
128 | }
129 |
130 | /**
131 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
132 | *
133 | * @return false on failure
134 | */
135 | public boolean swapBuffers() {
136 | boolean result = mEglCore.swapBuffers(mEGLSurface);
137 | if (!result) {
138 | Log.d(TAG, "WARNING: swapBuffers() failed");
139 | }
140 | return result;
141 | }
142 |
143 | /**
144 | * Sends the presentation time stamp to EGL.
145 | *
146 | * @param nsecs Timestamp, in nanoseconds.
147 | */
148 | public void setPresentationTime(long nsecs) {
149 | mEglCore.setPresentationTime(mEGLSurface, nsecs);
150 | }
151 |
152 | /**
153 | * Saves the EGL surface to a file.
154 | *
155 | * Expects that this object's EGL surface is current.
156 | */
157 | public void saveFrame(File file) throws IOException {
158 | if (!mEglCore.isCurrent(mEGLSurface)) {
159 | throw new RuntimeException("Expected EGL context/surface is not current");
160 | }
161 |
162 | // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
163 | // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
164 | // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
165 | // Bitmap "copy pixels" method wants the same format GL provides.
166 | //
167 | // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
168 | // here often.
169 | //
170 | // Making this even more interesting is the upside-down nature of GL, which means
171 | // our output will look upside down relative to what appears on screen if the
172 | // typical GL conventions are used.
173 |
174 | String filename = file.toString();
175 |
176 | int width = getWidth();
177 | int height = getHeight();
178 | ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
179 | buf.order(ByteOrder.LITTLE_ENDIAN);
180 | GLES20.glReadPixels(0, 0, width, height,
181 | GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
182 | GLESUtils.checkGlError("glReadPixels");
183 | buf.rewind();
184 |
185 | BufferedOutputStream bos = null;
186 | try {
187 | bos = new BufferedOutputStream(new FileOutputStream(filename));
188 | Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
189 | bmp.copyPixelsFromBuffer(buf);
190 | bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
191 | bmp.recycle();
192 | } finally {
193 | if (bos != null) bos.close();
194 | }
195 | Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/gles/MatrixUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | *
3 | * FastDrawerHelper.java
4 | *
5 | * Created by Wuwang on 2016/11/17
6 | * Copyright © 2016年 深圳哎吖科技. All rights reserved.
7 | */
8 | package com.android.xz.gles;
9 |
10 | import android.graphics.Bitmap;
11 | import android.opengl.Matrix;
12 |
13 | /**
14 | * Description:
15 | */
16 | public class MatrixUtils {
17 |
18 | public static final int TYPE_FITXY = 0;
19 | public static final int TYPE_CENTERCROP = 1;
20 | public static final int TYPE_CENTERINSIDE = 2;
21 | public static final int TYPE_FITSTART = 3;
22 | public static final int TYPE_FITEND = 4;
23 |
24 | MatrixUtils() {
25 |
26 | }
27 |
28 | /**
29 | * use {@link #getMatrix} instead
30 | */
31 | @Deprecated
32 | public static void getShowMatrix(float[] matrix, int imgWidth, int imgHeight, int viewWidth, int
33 | viewHeight) {
34 | if (imgHeight > 0 && imgWidth > 0 && viewWidth > 0 && viewHeight > 0) {
35 | float sWhView = (float) viewWidth / viewHeight;
36 | float sWhImg = (float) imgWidth / imgHeight;
37 | float[] projection = new float[16];
38 | float[] camera = new float[16];
39 | if (sWhImg > sWhView) {
40 | Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3);
41 | } else {
42 | Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3);
43 | }
44 | Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
45 | Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
46 | }
47 | }
48 |
49 | public static void getMatrix(float[] matrix, int type, int imgWidth, int imgHeight, int viewWidth,
50 | int viewHeight) {
51 | if (imgHeight > 0 && imgWidth > 0 && viewWidth > 0 && viewHeight > 0) {
52 | float[] projection = new float[16];
53 | float[] camera = new float[16];
54 | if (type == TYPE_FITXY) {
55 | Matrix.orthoM(projection, 0, -1, 1, -1, 1, 1, 3);
56 | Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
57 | Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
58 | }
59 | float sWhView = (float) viewWidth / viewHeight;
60 | float sWhImg = (float) imgWidth / imgHeight;
61 | if (sWhImg > sWhView) {
62 | switch (type) {
63 | case TYPE_CENTERCROP:
64 | Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3);
65 | break;
66 | case TYPE_CENTERINSIDE:
67 | Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3);
68 | break;
69 | case TYPE_FITSTART:
70 | Matrix.orthoM(projection, 0, -1, 1, 1 - 2 * sWhImg / sWhView, 1, 1, 3);
71 | break;
72 | case TYPE_FITEND:
73 | Matrix.orthoM(projection, 0, -1, 1, -1, 2 * sWhImg / sWhView - 1, 1, 3);
74 | break;
75 | }
76 | } else {
77 | switch (type) {
78 | case TYPE_CENTERCROP:
79 | Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3);
80 | break;
81 | case TYPE_CENTERINSIDE:
82 | Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3);
83 | break;
84 | case TYPE_FITSTART:
85 | Matrix.orthoM(projection, 0, -1, 2 * sWhView / sWhImg - 1, -1, 1, 1, 3);
86 | break;
87 | case TYPE_FITEND:
88 | Matrix.orthoM(projection, 0, 1 - 2 * sWhView / sWhImg, 1, -1, 1, 1, 3);
89 | break;
90 | }
91 | }
92 | Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
93 | Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
94 | }
95 | }
96 |
97 | public static void getCenterInsideMatrix(float[] matrix, int imgWidth, int imgHeight, int viewWidth, int
98 | viewHeight) {
99 | if (imgHeight > 0 && imgWidth > 0 && viewWidth > 0 && viewHeight > 0) {
100 | float sWhView = (float) viewWidth / viewHeight;
101 | float sWhImg = (float) imgWidth / imgHeight;
102 | float[] projection = new float[16];
103 | float[] camera = new float[16];
104 | if (sWhImg > sWhView) {
105 | Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3);
106 | } else {
107 | Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3);
108 | }
109 | Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
110 | Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
111 | }
112 | }
113 |
114 | public static float[] rotate(float[] m, float angle) {
115 | Matrix.rotateM(m, 0, angle, 0, 0, 1);
116 | return m;
117 | }
118 |
119 | public static float[] flip(float[] m, boolean x, boolean y) {
120 | if (x || y) {
121 | Matrix.scaleM(m, 0, x ? -1 : 1, y ? -1 : 1, 1);
122 | }
123 | return m;
124 | }
125 |
126 | public static float[] scale(float[] m, float x, float y) {
127 | Matrix.scaleM(m, 0, x, y, 1);
128 | return m;
129 | }
130 |
131 | public static float[] getOriginalMatrix() {
132 | float[] identityMatrix = new float[16];
133 | Matrix.setIdentityM(identityMatrix, 0);
134 | return identityMatrix;
135 | }
136 |
137 | public static float[] calculateMatrixForBitmap(Bitmap bitmap, int width, int height) {
138 | Bitmap mBitmap = bitmap;
139 | int w = mBitmap.getWidth();
140 | int h = mBitmap.getHeight();
141 | float sWH = w / (float) h;
142 | float sWidthHeight = width / (float) height;
143 | // uXY=sWidthHeight;
144 | float[] mViewMatrix = new float[16];
145 | float[] mProjectMatrix = new float[16];
146 | float[] mModelMatrix = new float[16];
147 | if (width > height) {
148 | if (sWH > sWidthHeight) {
149 | Matrix.orthoM(mProjectMatrix, 0, -sWidthHeight * sWH, sWidthHeight * sWH, -1, 1, 3, 5);
150 | } else {
151 | Matrix.orthoM(mProjectMatrix, 0, -sWidthHeight / sWH, sWidthHeight / sWH, -1, 1, 3, 5);
152 | }
153 | } else {
154 | if (sWH > sWidthHeight) {
155 | Matrix.orthoM(mProjectMatrix, 0, -1, 1, -1 / sWidthHeight * sWH, 1 / sWidthHeight * sWH, 3, 5);
156 | } else {
157 | Matrix.orthoM(mProjectMatrix, 0, -1, 1, -sWH / sWidthHeight, sWH / sWidthHeight, 3, 5);
158 | }
159 | }
160 | //设置相机位置
161 | Matrix.setLookAtM(mViewMatrix, 0, 0, 0, 5.0f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
162 | //计算变换矩阵
163 | Matrix.multiplyMM(mModelMatrix, 0, mProjectMatrix, 0, mViewMatrix, 0);
164 | return mModelMatrix;
165 | }
166 | }
167 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/gles/WindowSurface.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.android.xz.gles;
18 |
19 | import android.graphics.SurfaceTexture;
20 | import android.view.Surface;
21 |
22 | /**
23 | * Recordable EGL window surface.
24 | *
25 | * It's good practice to explicitly release() the surface, preferably from a "finally" block.
26 | */
27 | public class WindowSurface extends EglSurfaceBase {
28 | private Surface mSurface;
29 | private boolean mReleaseSurface;
30 |
31 | /**
32 | * Associates an EGL surface with the native window surface.
33 | *
34 | * Set releaseSurface to true if you want the Surface to be released when release() is
35 | * called. This is convenient, but can interfere with framework classes that expect to
36 | * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
37 | * surfaceDestroyed() callback won't fire).
38 | */
39 | public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
40 | super(eglCore);
41 | createWindowSurface(surface);
42 | mSurface = surface;
43 | mReleaseSurface = releaseSurface;
44 | }
45 |
46 | /**
47 | * Associates an EGL surface with the SurfaceTexture.
48 | */
49 | public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
50 | super(eglCore);
51 | createWindowSurface(surfaceTexture);
52 | }
53 |
54 | /**
55 | * Releases any resources associated with the EGL surface (and, if configured to do so,
56 | * with the Surface as well).
57 | *
58 | * Does not require that the surface's EGL context be current.
59 | */
60 | public void release() {
61 | releaseEglSurface();
62 | if (mSurface != null) {
63 | if (mReleaseSurface) {
64 | mSurface.release();
65 | }
66 | mSurface = null;
67 | }
68 | }
69 |
70 | /**
71 | * Recreate the EGLSurface, using the new EglBase. The caller should have already
72 | * freed the old EGLSurface with releaseEglSurface().
73 | *
74 | * This is useful when we want to update the EGLSurface associated with a Surface.
75 | * For example, if we want to share with a different EGLContext, which can only
76 | * be done by tearing down and recreating the context. (That's handled by the caller;
77 | * this just creates a new EGLSurface for the Surface we were handed earlier.)
78 | *
79 | * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
80 | * context somewhere, the create call will fail with complaints from the Surface
81 | * about already being connected.
82 | */
83 | public void recreate(EglCore newEglCore) {
84 | if (mSurface == null) {
85 | throw new RuntimeException("not yet implemented for SurfaceTexture");
86 | }
87 | mEglCore = newEglCore; // switch to new context
88 | createWindowSurface(mSurface); // create new surface
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/gles/filiter/AFilter.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.gles.filiter;
2 |
3 | public interface AFilter {
4 |
5 | void surfaceCreated();
6 |
7 | void surfaceChanged(int width, int height);
8 |
9 | int draw(int textureId, float[] matrix);
10 |
11 | void release();
12 | }
13 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/gles/filiter/Texture2DFilter.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.gles.filiter;
2 |
3 | import android.opengl.GLES20;
4 |
5 | import com.android.xz.gles.GLESUtils;
6 |
7 | import java.nio.ByteBuffer;
8 | import java.nio.ByteOrder;
9 | import java.nio.FloatBuffer;
10 |
11 | /**
12 | * 将离屏渲染的数据绘制到屏幕中
13 | */
14 | public class Texture2DFilter implements AFilter {
15 |
16 | /**
17 | * 绘制的流程
18 | * 1.顶点着色程序 - 用于渲染形状的顶点的 OpenGL ES 图形代码
19 | * 2.片段着色器 - 用于渲染具有特定颜色或形状的 OpenGL ES 代码 纹理。
20 | * 3.程序 - 包含您想要用于绘制的着色器的 OpenGL ES 对象 一个或多个形状
21 | *
22 | * 您至少需要一个顶点着色器来绘制形状,以及一个 片段着色器来为该形状着色。
23 | * 这些着色器必须经过编译,然后添加到 OpenGL ES 程序中,该程序随后用于绘制 形状。
24 | */
25 |
26 | // 顶点着色器代码
27 | private final String vertexShaderCode =
28 | // This matrix member variable provides a hook to manipulate
29 | // the coordinates of the objects that use this vertex shader
30 | "attribute vec4 vPosition;\n" +
31 | "attribute vec2 vTexCoordinate;\n" +
32 | "varying vec2 aTexCoordinate;\n" +
33 | "void main() {\n" +
34 | // the matrix must be included as a modifier of gl_Position
35 | // Note that the uMVPMatrix factor *must be first* in order
36 | // for the matrix multiplication product to be correct.
37 | " gl_Position = vPosition;\n" +
38 | " aTexCoordinate = vTexCoordinate;\n" +
39 | "}";
40 |
41 | // 片段着色器代码
42 | private final String fragmentShaderCode =
43 | "precision mediump float;\n" +
44 | "uniform sampler2D vTexture;\n" +
45 | "varying vec2 aTexCoordinate;\n" +
46 | "void main() {\n" +
47 | " gl_FragColor = texture2D(vTexture, aTexCoordinate);\n" +
48 | "}\n";
49 |
50 | /**
51 | * OpenGL程序句柄
52 | */
53 | private int mProgram;
54 |
55 | /**
56 | * 顶点坐标缓冲区
57 | */
58 | private FloatBuffer mVertexBuffer;
59 | /**
60 | * 纹理坐标缓冲区
61 | */
62 | private FloatBuffer mTextureBuffer;
63 |
64 | /**
65 | * 此数组中每个顶点的坐标数
66 | */
67 | static final int COORDS_PER_VERTEX = 2;
68 |
69 | /**
70 | * 顶点坐标数组
71 | * 顶点坐标系中原点(0,0)在画布中心
72 | * 向左为x轴正方向
73 | * 向上为y轴正方向
74 | * 画布四个角坐标如下:
75 | * (-1, 1),(1, 1)
76 | * (-1,-1),(1,-1)
77 | */
78 | private float vertexCoords[] = {
79 | -1.0f, 1.0f, // 左上
80 | -1.0f, -1.0f, // 左下
81 | 1.0f, 1.0f, // 右上
82 | 1.0f, -1.0f // 右下
83 | };
84 |
85 | /**
86 | * 纹理坐标数组
87 | * 这里我们需要注意纹理坐标系,原点(0,0s)在画布左下角
88 | * 向左为x轴正方向
89 | * 向上为y轴正方向
90 | * 画布四个角坐标如下:
91 | * (0,1),(1,1)
92 | * (0,0),(1,0)
93 | */
94 | private float textureCoords[] = {
95 | 0.0f, 1.0f, // 左上
96 | 0.0f, 0.0f, // 左下
97 | 1.0f, 1.0f, // 右上
98 | 1.0f, 0.0f, // 右下
99 | };
100 |
101 | /**
102 | * 顶点坐标句柄
103 | */
104 | private int mPositionHandle;
105 | /**
106 | * 纹理坐标句柄
107 | */
108 | private int mTexCoordinateHandle;
109 | /**
110 | * 纹理贴图句柄
111 | */
112 | private int mTexHandle;
113 |
114 | private final int vertexCount = vertexCoords.length / COORDS_PER_VERTEX;
115 | private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
116 |
117 | public Texture2DFilter() {
118 | // 初始化形状坐标的顶点字节缓冲区
119 | mVertexBuffer = ByteBuffer.allocateDirect(vertexCoords.length * 4)
120 | .order(ByteOrder.nativeOrder())
121 | .asFloatBuffer()
122 | .put(vertexCoords);
123 | mVertexBuffer.position(0);
124 |
125 | // 初始化纹理坐标顶点字节缓冲区
126 | mTextureBuffer = ByteBuffer.allocateDirect(textureCoords.length * 4)
127 | .order(ByteOrder.nativeOrder())
128 | .asFloatBuffer()
129 | .put(textureCoords);
130 | mTextureBuffer.position(0);
131 | }
132 |
133 | @Override
134 | public void surfaceCreated() {
135 | // 创建OpenGLES程序
136 | mProgram = GLESUtils.createProgram(vertexShaderCode, fragmentShaderCode);
137 |
138 | // 获取顶点着色器vPosition成员的句柄
139 | mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
140 | // 获取顶点着色器中纹理坐标的句柄
141 | mTexCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "vTexCoordinate");
142 | // 获取Texture句柄
143 | mTexHandle = GLES20.glGetUniformLocation(mProgram, "vTexture");
144 | }
145 |
146 | @Override
147 | public void surfaceChanged(int width, int height) {
148 | GLES20.glViewport(0, 0, width, height);
149 | }
150 |
151 | @Override
152 | public int draw(int textureId, float[] matrix) {
153 | // 将程序添加到OpenGL ES环境
154 | GLES20.glUseProgram(mProgram);
155 | GLESUtils.checkGlError("glUseProgram");
156 |
157 | // 为正方形顶点启用控制句柄
158 | GLES20.glEnableVertexAttribArray(mPositionHandle);
159 | GLESUtils.checkGlError("glEnableVertexAttribArray");
160 | // 写入坐标数据
161 | GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, mVertexBuffer);
162 | GLESUtils.checkGlError("glVertexAttribPointer");
163 |
164 | // 启用纹理坐标控制句柄
165 | GLES20.glEnableVertexAttribArray(mTexCoordinateHandle);
166 | GLESUtils.checkGlError("glEnableVertexAttribArray");
167 | // 写入坐标数据
168 | GLES20.glVertexAttribPointer(mTexCoordinateHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, mTextureBuffer);
169 | GLESUtils.checkGlError("glVertexAttribPointer");
170 |
171 | // 激活纹理编号0
172 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
173 | // 绑定纹理
174 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
175 | // 设置纹理采样器编号,该编号和glActiveTexture中设置的编号相同
176 | GLES20.glUniform1i(mTexHandle, 0);
177 |
178 | // 绘制
179 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
180 | GLESUtils.checkGlError("glDrawArrays");
181 |
182 | // 取消绑定纹理
183 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
184 |
185 | // 禁用顶点阵列
186 | GLES20.glDisableVertexAttribArray(mPositionHandle);
187 | GLES20.glDisableVertexAttribArray(mTexCoordinateHandle);
188 |
189 | return textureId;
190 | }
191 |
192 | @Override
193 | public void release() {
194 | GLES20.glDeleteProgram(mProgram);
195 | mProgram = -1;
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/permission/IPermissionsResult.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.permission;
2 |
3 | /**
4 | * 权限申请的结果回调
5 | *
6 | * @author xiaozhi
7 | * @since 2023/5/16
8 | */
9 | public interface IPermissionsResult {
10 |
11 | /**
12 | * 权限申请成功
13 | */
14 | void passPermissions();
15 |
16 | /**
17 | * 权限申请失败
18 | */
19 | void forbidPermissions();
20 | }
21 |
--------------------------------------------------------------------------------
/lib-camera/src/main/java/com/android/xz/util/YUVUtils.java:
--------------------------------------------------------------------------------
1 | package com.android.xz.util;
2 |
3 | public class YUVUtils {
4 |
5 | static {
6 | System.loadLibrary("yuv");
7 | }
8 |
9 | public static native int nativeNV21ToRGB24(byte[] yuvBytes, byte[] rgb24Bytes, int[] hw, int orientation);
10 | public static native int nativeNV21ToBGR24(byte[] yuvBytes, byte[] rgb24Bytes, int[] hw, int orientation);
11 |
12 | public native static void nativeNV21ToI420(byte[] src, int width, int height, byte[] dst);
13 |
14 | public native static void nativeNV21ScaleToI420(byte[] src, int width, int height, byte[] dst, int dstW, int dstH);
15 |
16 | public native static void nativeI420ToNV12(byte[] src, int width, int height, byte[] dst);
17 |
18 | public native static void nativeI420ToNV21(byte[] src, int width, int height, byte[] dst);
19 |
20 | public native static void nativeNV21ToNV12(byte[] src, int width, int height, byte[] dst);
21 |
22 | /**
23 | * 将nv21转换为yuv420p(I420), YYYYYYYY VUVU ---> YYYYYYYY UU VV
24 | *
25 | * @param src
26 | * @param width
27 | * @param height
28 | * @return
29 | */
30 | public static byte[] nv21ToYuv420p(byte[] src, int width, int height) {
31 | int yLength = width * height;
32 | int uLength = yLength / 4;
33 | int vLength = yLength / 4;
34 | int frameSize = yLength + uLength + vLength;
35 | byte[] yuv420p = new byte[frameSize];
36 | // Y分量
37 | System.arraycopy(src, 0, yuv420p, 0, yLength);
38 | for (int i = 0; i < yLength / 4; i++) {
39 | // U分量
40 | yuv420p[yLength + i] = src[yLength + 2 * i + 1];
41 | // V分量
42 | yuv420p[yLength + uLength + i] = src[yLength + 2 * i];
43 | }
44 | return yuv420p;
45 | }
46 |
47 | /**
48 | * 将nv21转换为yuv420p(I420), YYYYYYYY VUVU ---> YYYYYYYY VV UU
49 | *
50 | * @param src
51 | * @param width
52 | * @param height
53 | * @return
54 | */
55 | public static byte[] nv21ToYV12(byte[] src, int width, int height) {
56 | int yLength = width * height;
57 | int uLength = yLength / 4;
58 | int vLength = yLength / 4;
59 | int frameSize = yLength + uLength + vLength;
60 | byte[] yuv420p = new byte[frameSize];
61 | // Y分量
62 | System.arraycopy(src, 0, yuv420p, 0, yLength);
63 | for (int i = 0; i < yLength / 4; i++) {
64 | // V分量
65 | yuv420p[yLength + i] = src[yLength + 2 * i];
66 | // U分量
67 | yuv420p[yLength + uLength + i] = src[yLength + 2 * i + 1];
68 | }
69 | return yuv420p;
70 | }
71 |
72 | /**
73 | * 将YUV420Planner(I420)转换为NV21, YYYYYYYY UU VV --> YYYYYYYY VUVU
74 | *
75 | * @param src
76 | * @param width
77 | * @param height
78 | * @return
79 | */
80 | public static byte[] yuv420pToNV21(byte[] src, int width, int height) {
81 | int yLength = width * height;
82 | int uLength = yLength / 4;
83 | int vLength = yLength / 4;
84 | int frameSize = yLength + uLength + vLength;
85 | byte[] nv21 = new byte[frameSize];
86 |
87 | System.arraycopy(src, 0, nv21, 0, yLength); // Y分量
88 | for (int i = 0; i < yLength / 4; i++) {
89 | // U分量
90 | nv21[yLength + 2 * i + 1] = src[yLength + i];
91 | // V分量
92 | nv21[yLength + 2 * i] = src[yLength + uLength + i];
93 | }
94 | return nv21;
95 | }
96 |
97 | /**
98 | * 将YUV420Planner(I420)转换为NV21, YYYYYYYY UU VV --> YYYYYYYY UVUV
99 | *
100 | * @param src
101 | * @param width
102 | * @param height
103 | * @return
104 | */
105 | public static byte[] yuv420pToNV12(byte[] src, int width, int height) {
106 | int yLength = width * height;
107 | int uLength = yLength / 4;
108 | int vLength = yLength / 4;
109 | int frameSize = yLength + uLength + vLength;
110 | byte[] nv12 = new byte[frameSize];
111 |
112 | System.arraycopy(src, 0, nv12, 0, yLength); // Y分量
113 | for (int i = 0; i < yLength / 4; i++) {
114 | // U分量
115 | nv12[yLength + 2 * i] = src[yLength + i];
116 | // V分量
117 | nv12[yLength + 2 * i + 1] = src[yLength + uLength + i];
118 | }
119 | return nv12;
120 | }
121 |
122 | /**
123 | * 将NV21转换为Yuv420sp, YYYYYYYY VUVU --> YYYYYYYY UVUV
124 | *
125 | * @param src
126 | * @param width
127 | * @param height
128 | * @return
129 | */
130 | public static void _nv21ToYuv420sp(byte[] src, int width, int height, byte[] dst) {
131 | int yLength = width * height;
132 | int uLength = yLength >> 2;
133 | int vLength = yLength >> 2;
134 | int frameSize = yLength + uLength + vLength;
135 | // Y分量
136 | System.arraycopy(src, 0, dst, 0, yLength);
137 | for (int i = 0; i < uLength; i++) {
138 | // U分量
139 | dst[yLength + 2 * i] = src[yLength + 2 * i + 1];
140 | // V分量
141 | dst[yLength + 2 * i + 1] = src[yLength + 2 * i];
142 | }
143 | }
144 |
145 | /**
146 | * 将YUV420SemiPlanner转换为NV21, YYYYYYYY UVUV(yuv420sp)--> YYYYYYYY VUVU(nv21)
147 | *
148 | * @param src
149 | * @param width
150 | * @param height
151 | * @return
152 | */
153 | public static byte[] yuv420spToNV21(byte[] src, int width, int height) {
154 | int yLength = width * height;
155 | int uLength = yLength / 4;
156 | int vLength = yLength / 4;
157 | int frameSize = yLength + uLength + vLength;
158 | byte[] nv21 = new byte[frameSize];
159 | // Y分量
160 | System.arraycopy(src, 0, nv21, 0, yLength);
161 | for (int i = 0; i < yLength / 4; i++) {
162 | // U分量
163 | nv21[yLength + 2 * i + 1] = src[yLength + 2 * i];
164 | // V分量
165 | nv21[yLength + 2 * i] = src[yLength + 2 * i + 1];
166 | }
167 | return nv21;
168 | }
169 |
170 | /**
171 | * 将YV12转换为NV21, YYYYYYYY VV UU --> YYYYYYYY VUVU
172 | *
173 | * @param src
174 | * @param width
175 | * @param height
176 | * @return
177 | */
178 | public static byte[] yv12ToNV21(byte[] src, int width, int height) {
179 | int yLength = width * height;
180 | int uLength = yLength / 4;
181 | int vLength = yLength / 4;
182 | int frameSize = yLength + uLength + vLength;
183 | byte[] nv21 = new byte[frameSize];
184 |
185 | System.arraycopy(src, 0, nv21, 0, yLength); // Y分量
186 | for (int i = 0; i < yLength / 4; i++) {
187 | // U分量
188 | nv21[yLength + 2 * i + 1] = src[yLength + vLength + i];
189 | // V分量
190 | nv21[yLength + 2 * i] = src[yLength + i];
191 | }
192 | return nv21;
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | pluginManagement {
2 | repositories {
3 | google()
4 | mavenCentral()
5 | gradlePluginPortal()
6 | }
7 | }
8 | dependencyResolutionManagement {
9 | repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
10 | repositories {
11 | google()
12 | mavenCentral()
13 | }
14 | }
15 | rootProject.name = "AndroidCamera"
16 | include ':app'
17 | include ':lib-camera'
18 |
--------------------------------------------------------------------------------