├── .DS_Store
├── .gitignore
├── README.MD
├── app
├── .DS_Store
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── com
│ │ └── demo
│ │ └── mediacodec
│ │ ├── AspectRatioFrameLayout.java
│ │ ├── BaseActivity.java
│ │ ├── DecodersInfoActivity.java
│ │ ├── EncodersInfoActivity.java
│ │ ├── MainActivity.java
│ │ ├── MediaCodecUtils.java
│ │ ├── decode
│ │ └── DecodePlayActivity.java
│ │ └── transcode
│ │ ├── GLUtils.java
│ │ ├── InputSurface.java
│ │ ├── NoSupportMediaCodecException.java
│ │ ├── OutputSurface.java
│ │ ├── TextureRender.java
│ │ ├── TranscodeActivity.java
│ │ ├── TranscodeConfig.java
│ │ ├── TranscodeRunner.java
│ │ └── VideoOutputConfig.java
│ └── res
│ ├── drawable-v24
│ └── ic_launcher_foreground.xml
│ ├── drawable
│ └── ic_launcher_background.xml
│ ├── layout
│ ├── activity_decode_play.xml
│ ├── activity_decoders_info.xml
│ ├── activity_encoders_info.xml
│ ├── activity_main.xml
│ └── activity_transcode.xml
│ ├── mipmap-anydpi-v26
│ ├── ic_launcher.xml
│ └── ic_launcher_round.xml
│ ├── mipmap-hdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── mipmap-mdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── mipmap-xhdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── mipmap-xxhdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── mipmap-xxxhdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── values-night
│ └── themes.xml
│ ├── values
│ ├── colors.xml
│ ├── strings.xml
│ └── themes.xml
│ └── xml
│ ├── backup_rules.xml
│ ├── data_extraction_rules.xml
│ └── file_paths_public.xml
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/joechan-cq/MediaCodecExample/3dd6485e17bc29ffc7c01c47f842ca61e4c12b78/.DS_Store
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.aar
4 | *.ap_
5 | *.aab
6 |
7 | # Files for the ART/Dalvik VM
8 | *.dex
9 |
10 | # Java class files
11 | *.class
12 |
13 | .idea
14 | # Generated files
15 | bin/
16 | gen/
17 | out/
18 | # Uncomment the following line in case you need and you don't have the release build type files in your app
19 | # release/
20 |
21 | # Gradle files
22 | .gradle/
23 | build/
24 |
25 | # Local configuration file (sdk path, etc)
26 | local.properties
27 |
28 | # Proguard folder generated by Eclipse
29 | proguard/
30 |
31 | # Log Files
32 | *.log
33 |
34 | # Android Studio Navigation editor temp files
35 | .navigation/
36 |
37 | # Android Studio captures folder
38 | captures/
39 |
40 | # IntelliJ
41 | *.iml
42 | .idea/workspace.xml
43 | .idea/tasks.xml
44 | .idea/gradle.xml
45 | .idea/assetWizardSettings.xml
46 | .idea/dictionaries
47 | .idea/libraries
48 | # Android Studio 3 in .gitignore file.
49 | .idea/caches
50 | .idea/modules.xml
51 | # Comment next line if keeping position of elements in Navigation Editor is relevant for you
52 | .idea/navEditor.xml
53 |
54 | # Keystore files
55 | # Uncomment the following lines if you do not want to check your keystore files in.
56 | #*.jks
57 | #*.keystore
58 |
59 | # External native build folder generated in Android Studio 2.2 and later
60 | .externalNativeBuild
61 | .cxx/
62 |
63 | # Google Services (e.g. APIs or Firebase)
64 | # google-services.json
65 |
66 | # Freeline
67 | freeline.py
68 | freeline/
69 | freeline_project_description.json
70 |
71 | # fastlane
72 | fastlane/report.xml
73 | fastlane/Preview.html
74 | fastlane/screenshots
75 | fastlane/test_output
76 | fastlane/readme.md
77 |
78 | # Version control
79 | vcs.xml
80 |
81 | # lint
82 | lint/intermediates/
83 | lint/generated/
84 | lint/outputs/
85 | lint/tmp/
86 | # lint/reports/
87 |
--------------------------------------------------------------------------------
/README.MD:
--------------------------------------------------------------------------------
1 | # MediaCodecExample
2 |
3 | 简单的MediaCodec API使用示例。
4 |
5 | 内容包括:
6 |
7 | 1. 列举所有编码器;
8 |
9 | 2. 列举所有解码器;
10 |
11 | 3. 解码播放视频(无音频播放)
12 |
13 | 4. 硬件转码压缩视频(支持保留HDR,仅保留视频轨道)
14 |
15 |
16 |
--------------------------------------------------------------------------------
/app/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/joechan-cq/MediaCodecExample/3dd6485e17bc29ffc7c01c47f842ca61e4c12b78/app/.DS_Store
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'com.android.application'
3 | }
4 |
5 | android {
6 | namespace 'com.demo.mediacodec'
7 | compileSdk 32
8 |
9 | defaultConfig {
10 | applicationId "com.demo.mediacodec"
11 | minSdk 23
12 | targetSdk 32
13 | versionCode 1
14 | versionName "1.0"
15 | }
16 |
17 | buildTypes {
18 | release {
19 | minifyEnabled false
20 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
21 | }
22 | }
23 | }
24 |
25 | dependencies {
26 | implementation 'androidx.appcompat:appcompat:1.4.1'
27 | implementation 'com.google.android.material:material:1.5.0'
28 | }
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
This tolerance allows the view to occupy the whole of the screen when the requested aspect
82 | * ratio is very close, but not exactly equal to, the aspect ratio of the screen. This may
83 | * reduce
84 | * the number of view layers that need to be composited by the underlying system, which can help
85 | * to reduce power consumption.
86 | */
87 | private static final float MAX_ASPECT_RATIO_DEFORMATION_FRACTION = 0.01f;
88 |
89 | private final AspectRatioUpdateDispatcher aspectRatioUpdateDispatcher;
90 |
91 | private AspectRatioListener aspectRatioListener;
92 |
93 | private float videoAspectRatio;
94 | private @ResizeMode
95 | int resizeMode;
96 |
97 | public AspectRatioFrameLayout(Context context) {
98 | this(context, null);
99 | }
100 |
101 | public AspectRatioFrameLayout(Context context, AttributeSet attrs) {
102 | super(context, attrs);
103 | resizeMode = RESIZE_MODE_FIXED_WIDTH;
104 | videoAspectRatio = 0f;
105 | aspectRatioUpdateDispatcher = new AspectRatioUpdateDispatcher();
106 | }
107 |
108 | /**
109 | * Sets the aspect ratio that this view should satisfy.
110 | *
111 | * @param widthHeightRatio The width to height ratio.
112 | */
113 | public void setAspectRatio(float widthHeightRatio) {
114 | if (this.videoAspectRatio != widthHeightRatio) {
115 | this.videoAspectRatio = widthHeightRatio;
116 | requestLayout();
117 | }
118 | }
119 |
120 | public void setAspectRatioListener(AspectRatioListener listener) {
121 | this.aspectRatioListener = listener;
122 | }
123 |
124 | /**
125 | * Returns the {@link ResizeMode}.
126 | */
127 | public @ResizeMode
128 | int getResizeMode() {
129 | return resizeMode;
130 | }
131 |
132 | /**
133 | * Sets the {@link ResizeMode}
134 | *
135 | * @param resizeMode The {@link ResizeMode}.
136 | */
137 | public void setResizeMode(@ResizeMode int resizeMode) {
138 | if (this.resizeMode != resizeMode) {
139 | this.resizeMode = resizeMode;
140 | requestLayout();
141 | }
142 | }
143 |
144 | @Override
145 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
146 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
147 | if (videoAspectRatio <= 0) {
148 | // Aspect ratio not set.
149 | return;
150 | }
151 |
152 | int width = getMeasuredWidth();
153 | int height = getMeasuredHeight();
154 | float viewAspectRatio = (float) width / height;
155 | float aspectDeformation = videoAspectRatio / viewAspectRatio - 1;
156 | if (Math.abs(aspectDeformation) <= MAX_ASPECT_RATIO_DEFORMATION_FRACTION) {
157 | // We're within the allowed tolerance.
158 | aspectRatioUpdateDispatcher.scheduleUpdate(videoAspectRatio, viewAspectRatio, false);
159 | return;
160 | }
161 |
162 | switch (resizeMode) {
163 | case RESIZE_MODE_FIXED_WIDTH:
164 | height = (int) (width / videoAspectRatio + 0.5f);
165 | break;
166 | case RESIZE_MODE_FIXED_HEIGHT:
167 | width = (int) (height * videoAspectRatio + 0.5f);
168 | break;
169 | case RESIZE_MODE_ZOOM:
170 | if (aspectDeformation > 0) {
171 | width = (int) (height * videoAspectRatio + 0.5f);
172 | } else {
173 | height = (int) (width / videoAspectRatio + 0.5f);
174 | }
175 | break;
176 | case RESIZE_MODE_FIT:
177 | if (aspectDeformation > 0) {
178 | height = (int) (width / videoAspectRatio + 0.5f);
179 | } else {
180 | width = (int) (height * videoAspectRatio + 0.5f);
181 | }
182 | break;
183 | case RESIZE_MODE_FILL:
184 | default:
185 | // Ignore target aspect ratio
186 | break;
187 | }
188 | aspectRatioUpdateDispatcher.scheduleUpdate(videoAspectRatio, viewAspectRatio, true);
189 | super.onMeasure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
190 | MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
191 | }
192 |
193 | /**
194 | * Dispatches updates to {@link AspectRatioListener}.
195 | */
196 | private final class AspectRatioUpdateDispatcher implements Runnable {
197 |
198 | private float targetAspectRatio;
199 | private float naturalAspectRatio;
200 | private boolean aspectRatioMismatch;
201 | private boolean isScheduled;
202 |
203 | public void scheduleUpdate(
204 | float targetAspectRatio, float naturalAspectRatio, boolean aspectRatioMismatch) {
205 | this.targetAspectRatio = targetAspectRatio;
206 | this.naturalAspectRatio = naturalAspectRatio;
207 | this.aspectRatioMismatch = aspectRatioMismatch;
208 |
209 | if (!isScheduled) {
210 | isScheduled = true;
211 | post(this);
212 | }
213 | }
214 |
215 | @Override
216 | public void run() {
217 | isScheduled = false;
218 | if (aspectRatioListener == null) {
219 | return;
220 | }
221 | aspectRatioListener.onAspectRatioUpdated(
222 | targetAspectRatio, naturalAspectRatio, aspectRatioMismatch);
223 | }
224 | }
225 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/BaseActivity.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec;
2 |
3 | import android.content.Intent;
4 | import android.net.Uri;
5 |
6 | import androidx.annotation.Nullable;
7 | import androidx.appcompat.app.AppCompatActivity;
8 |
9 | /**
10 | * @author : chenqiao
11 | * @date : 2022/12/27 4:01 PM
12 | */
13 | public class BaseActivity extends AppCompatActivity {
14 | public static final int REQUEST_VIDEO_PICKER = 0xaa;
15 |
16 | public void openPicker() {
17 | Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
18 | intent.setType("video/*");
19 | intent.addCategory(Intent.CATEGORY_OPENABLE);
20 | startActivityForResult(intent, REQUEST_VIDEO_PICKER);
21 | }
22 |
23 | @Override
24 | protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
25 | super.onActivityResult(requestCode, resultCode, data);
26 | if (REQUEST_VIDEO_PICKER == requestCode && resultCode == RESULT_OK && data != null) {
27 | Uri videoUri = data.getData();
28 | onVideoCallback(videoUri);
29 | }
30 | }
31 |
32 | protected void onVideoCallback(Uri videoUri) {
33 |
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/DecodersInfoActivity.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec;
2 |
3 | import android.media.MediaCodecInfo;
4 | import android.media.MediaCodecList;
5 | import android.media.MediaFormat;
6 | import android.os.Bundle;
7 | import android.widget.TextView;
8 |
9 | import androidx.annotation.Nullable;
10 |
11 | /**
12 | * @author : chenqiao
13 | * @date : 2023/1/6 1:45 PM
14 | */
15 | public class DecodersInfoActivity extends BaseActivity {
16 |
17 | TextView decodersInfoTv;
18 |
19 | @Override
20 | protected void onCreate(@Nullable Bundle savedInstanceState) {
21 | super.onCreate(savedInstanceState);
22 | setContentView(R.layout.activity_decoders_info);
23 | decodersInfoTv = findViewById(R.id.tv_decoders_info);
24 | MediaCodecList mediaCodecList = new MediaCodecList(MediaCodecList.ALL_CODECS);
25 | MediaCodecInfo[] codecInfos = mediaCodecList.getCodecInfos();
26 | StringBuilder s = new StringBuilder();
27 | for (MediaCodecInfo codecInfo : codecInfos) {
28 | if (!codecInfo.isEncoder()) {
29 | s.append(codecInfo.getName()).append(":\n");
30 | String[] supportedTypes = codecInfo.getSupportedTypes();
31 | for (String type : supportedTypes) {
32 | try {
33 | MediaCodecInfo.CodecCapabilities cap =
34 | codecInfo.getCapabilitiesForType(type);
35 | s.append("\t").append(type).append(":");
36 | MediaFormat format = cap.getDefaultFormat();
37 | s.append(format).append("\n");
38 |
39 | if (cap.colorFormats != null && cap.colorFormats.length > 0) {
40 | s.append("colorFormat:[\n");
41 | for (int colorFormat : cap.colorFormats) {
42 | s.append(colorFormat).append(" ");
43 | }
44 | s.append("\n]\n");
45 | }
46 |
47 | MediaCodecInfo.VideoCapabilities videoCap =
48 | cap.getVideoCapabilities();
49 | if (videoCap != null) {
50 | s.append("\twidthRange:").append(videoCap.getSupportedWidths()).append(" heightRange:").append(videoCap.getSupportedHeights()).append("\n");
51 | } else {
52 | s.append("\n");
53 | }
54 | } catch (Exception ignore) {
55 | }
56 | }
57 | s.append("\n");
58 | }
59 | }
60 | decodersInfoTv.setText(s);
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/EncodersInfoActivity.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec;
2 |
3 | import android.media.MediaCodecInfo;
4 | import android.media.MediaCodecList;
5 | import android.media.MediaFormat;
6 | import android.os.Bundle;
7 | import android.util.Range;
8 | import android.widget.TextView;
9 |
10 | import androidx.annotation.Nullable;
11 |
12 | /**
13 | * @author : chenqiao
14 | * @date : 2023/1/6 1:45 PM
15 | */
16 | public class EncodersInfoActivity extends BaseActivity {
17 |
18 | TextView encodersInfoTv;
19 |
20 | @Override
21 | protected void onCreate(@Nullable Bundle savedInstanceState) {
22 | super.onCreate(savedInstanceState);
23 | setContentView(R.layout.activity_encoders_info);
24 | encodersInfoTv = findViewById(R.id.tv_encoders_info);
25 |
26 | MediaCodecList mediaCodecList = new MediaCodecList(MediaCodecList.ALL_CODECS);
27 | MediaCodecInfo[] codecInfos = mediaCodecList.getCodecInfos();
28 | StringBuilder s = new StringBuilder();
29 | for (MediaCodecInfo codecInfo : codecInfos) {
30 | if (codecInfo.isEncoder()) {
31 | s.append(codecInfo.getName()).append(":\n");
32 | String[] supportedTypes = codecInfo.getSupportedTypes();
33 | if (supportedTypes != null) {
34 | for (String type : supportedTypes) {
35 | MediaCodecInfo.CodecCapabilities caps = codecInfo.getCapabilitiesForType(type);
36 | if (caps == null) {
37 | continue;
38 | }
39 | MediaFormat defaultFormat = caps.getDefaultFormat();
40 | String mimeType = caps.getMimeType();
41 | s.append("defaultFormat: ").append(defaultFormat).append("\n");
42 | MediaCodecInfo.VideoCapabilities vcaps = caps.getVideoCapabilities();
43 | if (vcaps != null) {
44 | Range
30 | * 流程:
31 | * MediaExtractor挑选视频轨道 -> 准备合适的解码器 -> 解码并渲染到Surface上。
32 | * 只有具有对某个视频完全支持的解码器才能进行播放。
33 | * 如果在非HDR设备上播放HDR视频,会获取不到解码器。
34 | *
35 | * @author : chenqiao
36 | * @date : 2022/12/27 3:58 PM
37 | */
38 | public class DecodePlayActivity extends BaseActivity {
39 |
40 | private SurfaceView mSurfaceView;
41 | private AspectRatioFrameLayout mContainer;
42 | private TextView mDebugTv;
43 |
44 | @Override
45 | protected void onCreate(@Nullable Bundle savedInstanceState) {
46 | super.onCreate(savedInstanceState);
47 | setContentView(R.layout.activity_decode_play);
48 | findViewById(R.id.btn_select_video).setOnClickListener(new View.OnClickListener() {
49 | @Override
50 | public void onClick(View view) {
51 | openPicker();
52 | }
53 | });
54 | mSurfaceView = findViewById(R.id.surface);
55 | mContainer = findViewById(R.id.video_container);
56 | mDebugTv = findViewById(R.id.tv_debug_info);
57 | }
58 |
59 | @Override
60 | protected void onDestroy() {
61 | super.onDestroy();
62 | if (mMediaExtractor != null) {
63 | mMediaExtractor.release();
64 | }
65 | if (pf != null) {
66 | try {
67 | pf.close();
68 | } catch (IOException ignore) {
69 | }
70 | }
71 | if (mMediaCodec != null) {
72 | mMediaCodec.release();
73 | }
74 |
75 | }
76 |
77 | @Override
78 | protected void onVideoCallback(Uri videoUri) {
79 | decodeAndPlay(videoUri);
80 | }
81 |
82 | private void decodeAndPlay(Uri videoUri) {
83 | new Thread(new Runnable() {
84 | @Override
85 | public void run() {
86 | StringBuilder log = new StringBuilder();
87 | selectVideoTrack(videoUri, log);
88 | prepareDecoder(log);
89 | }
90 | }).start();
91 | }
92 |
93 | /**
94 | * 轨道选择器
95 | */
96 | private MediaExtractor mMediaExtractor;
97 |
98 | /**
99 | * 视频格式
100 | */
101 | private MediaFormat mVideoFormat;
102 |
103 | /**
104 | * 解码器
105 | */
106 | private MediaCodec mMediaCodec;
107 |
108 | private ParcelFileDescriptor pf;
109 |
110 | /**
111 | * 挑选视频轨道
112 | */
113 | private void selectVideoTrack(Uri videoUri, StringBuilder log) {
114 | if (mMediaExtractor != null) {
115 | mMediaExtractor.release();
116 | }
117 | mMediaExtractor = new MediaExtractor();
118 | if (pf != null) {
119 | try {
120 | pf.close();
121 | } catch (IOException ignore) {
122 | }
123 | }
124 | try {
125 | ContentResolver contentResolver = getContentResolver();
126 | pf = contentResolver.openFileDescriptor(videoUri, "r");
127 | mMediaExtractor.setDataSource(pf.getFileDescriptor());
128 | int trackCount = mMediaExtractor.getTrackCount();
129 | for (int i = 0; i < trackCount; i++) {
130 | MediaFormat format = mMediaExtractor.getTrackFormat(i);
131 | String mime = format.getString(MediaFormat.KEY_MIME);
132 | if (!TextUtils.isEmpty(mime) && mime.startsWith("video")) {
133 | //找到视频轨道
134 | mVideoFormat = format;
135 | //选中该视频轨道,后面读取轨道数据,就是读取的该轨道的
136 | mMediaExtractor.selectTrack(i);
137 | log.append("找到了视频轨道:").append(mVideoFormat).append("\n");
138 | setDebugLog(log.toString());
139 | return;
140 | }
141 | }
142 | } catch (IOException e) {
143 | e.printStackTrace();
144 | }
145 | log.append("没有找到了视频轨道!").append("\n");
146 | setDebugLog(log.toString());
147 | }
148 |
149 | /**
150 | * 准备解码器
151 | */
152 | private void prepareDecoder(StringBuilder log) {
153 | if (mMediaCodec != null) {
154 | mMediaCodec.release();
155 | mMediaCodec = null;
156 | }
157 | if (mVideoFormat == null) {
158 | return;
159 | }
160 | boolean maybeSwitchWH = false;
161 |
162 | String mime = mVideoFormat.getString(MediaFormat.KEY_MIME);
163 | int width = mVideoFormat.getInteger(MediaFormat.KEY_WIDTH);
164 | int height = mVideoFormat.getInteger(MediaFormat.KEY_HEIGHT);
165 | int rotation;
166 | if (mVideoFormat.containsKey(MediaFormat.KEY_ROTATION)) {
167 | rotation = mVideoFormat.getInteger(MediaFormat.KEY_ROTATION);
168 | } else {
169 | rotation = 0;
170 | if (width < height) {
171 | maybeSwitchWH = true;
172 | }
173 | }
174 | int maxCache;
175 | if (mVideoFormat.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
176 | //使用读取到的size作为缓存大小,防止出现溢出
177 | maxCache = mVideoFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
178 | } else {
179 | maxCache = 500 * 1024;
180 | }
181 |
182 | //调整Surface尺寸
183 | runOnUiThread(new Runnable() {
184 | @Override
185 | public void run() {
186 | mContainer.setResizeMode(AspectRatioFrameLayout.RESIZE_MODE_FIT);
187 | if (rotation == 0 || rotation == 180) {
188 | mContainer.setAspectRatio(width * 1f / height);
189 | } else {
190 | mContainer.setAspectRatio(height * 1f / width);
191 | }
192 | }
193 | });
194 |
195 | String codecName = MediaCodecUtils.findDecoderByFormat(mVideoFormat);
196 | if (TextUtils.isEmpty(codecName)) {
197 | log.append("prepareDecoder: 完整format没有找到解码器!\n");
198 | log.append("prepareDecoder: 尝试降级!\n");
199 | if (MediaFormat.MIMETYPE_VIDEO_DOLBY_VISION.equals(mime)) {
200 | //如果是杜比视界,那么尝试用HEVC的解码器去解
201 | mVideoFormat.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_VIDEO_HEVC);
202 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
203 | //因为杜比视界的profile和level是单独的,这里降级到HEVC的话,Profile和Level也要移除,否则还是会找不到解码器
204 | mVideoFormat.removeKey(MediaFormat.KEY_PROFILE);
205 | mVideoFormat.removeKey(MediaFormat.KEY_LEVEL);
206 | }
207 | codecName = MediaCodecUtils.findDecoderByFormat(mVideoFormat);
208 | } else if (MediaFormat.MIMETYPE_VIDEO_HEVC.equals(mime)) {
209 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
210 | log.append("prepareDecoder: 移除profile和level\n");
211 | //HEVC的话,尝试移除Profile和Level
212 | mVideoFormat.removeKey(MediaFormat.KEY_PROFILE);
213 | mVideoFormat.removeKey(MediaFormat.KEY_LEVEL);
214 | }
215 | codecName = MediaCodecUtils.findDecoderByFormat(mVideoFormat);
216 | if (TextUtils.isEmpty(codecName)) {
217 | log.append("prepareDecoder: 移除profile、level后format没有找到解码器!:").append(mVideoFormat).append("\n");
218 | if (maybeSwitchWH) {
219 | //Oppo有某些设备,竖屏拍摄的视频,不写rotation到metadata中,而是直接交换宽高(一般竖屏视频是1920x1080+90度,而这些特殊视频是1080x1920+0),
220 | //导致这里因为解码器的宽高限制,无法获取到解码器.
221 | log.append("prepareDecoder: 尝试交换Width和Height\n");
222 | MediaFormat simpleFormat = MediaFormat.createVideoFormat(mime, height, width);
223 | codecName = MediaCodecUtils.findDecoderByFormat(simpleFormat);
224 | if (TextUtils.isEmpty(codecName)) {
225 | log.append("prepareDecoder: 交换width、height也没有找到解码器!").append(simpleFormat).append("\n");
226 | }
227 | }
228 | }
229 | }
230 | }
231 | if (TextUtils.isEmpty(codecName)) {
232 | log.append("最终没有找到解码器!").append("\n");
233 | setDebugLog(log.toString());
234 | return;
235 | }
236 |
237 | log.append("找到解码器:").append(codecName).append("\n");
238 | setDebugLog(log.toString());
239 |
240 | try {
241 | //以同步模式进行解码
242 | MediaCodec decoder = MediaCodec.createByCodecName(codecName);
243 | mMediaCodec = decoder;
244 | decoder.configure(mVideoFormat, mSurfaceView.getHolder().getSurface(), null, 0);
245 | decoder.start();
246 |
247 | ByteBuffer byteBuffer = ByteBuffer.allocate(maxCache);
248 | int sampleSize;
249 | MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
250 |
251 | long startTime = System.nanoTime(); //ns
252 |
253 | //不停读取轨道数据
254 | while ((sampleSize = mMediaExtractor.readSampleData(byteBuffer, 0)) > 0) {
255 | long sampleTime = mMediaExtractor.getSampleTime(); //us
256 |
257 | //从decoder中取出输入缓冲队列
258 | int index = decoder.dequeueInputBuffer(10 * 1000L);
259 | if (index > -1) {
260 | ByteBuffer inputBuffer = decoder.getInputBuffer(index);
261 | //将从轨道读取的数据,填充进输入缓冲中
262 | inputBuffer.clear();
263 | inputBuffer.put(byteBuffer);
264 | //将输入缓冲还给解码器
265 | decoder.queueInputBuffer(index, 0, sampleSize, sampleTime, 0);
266 | }
267 |
268 | //从解码器中处理解码后的数据
269 | int outIndex = decoder.dequeueOutputBuffer(bufferInfo, 10 * 1000L);
270 | if (outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
271 | //do nothing
272 | } else if (outIndex > -1) {
273 | //检查是否到了渲染时间,没到的话sleep到渲染时间
274 | if (System.nanoTime() - startTime < bufferInfo.presentationTimeUs * 1000L) {
275 | SystemClock.sleep((bufferInfo.presentationTimeUs - (System.nanoTime() - startTime) / 1000) / 1000);
276 | }
277 | if (isFinishing() || isDestroyed()) {
278 | break;
279 | }
280 | //这里直接将解码后的数据刷到Surface即可
281 | try {
282 | decoder.releaseOutputBuffer(outIndex, true);
283 | } catch (Exception e) {
284 | e.printStackTrace();
285 | }
286 | }
287 |
288 | //获取接下来的轨道数据
289 | boolean hasNext = mMediaExtractor.advance();
290 | if (hasNext) {
291 | byteBuffer.clear();
292 | } else {
293 | break;
294 | }
295 | }
296 |
297 | if (mMediaCodec != null) {
298 | mMediaCodec.release();
299 | }
300 | if (mMediaExtractor != null) {
301 | mMediaExtractor.release();
302 | }
303 | log.append("解码完成,释放资源!").append("\n");
304 | } catch (Exception e) {
305 | e.printStackTrace();
306 | log.append("解码过程报错:" + e.getMessage());
307 | } finally {
308 | setDebugLog(log.toString());
309 | }
310 | }
311 |
312 | private void setDebugLog(String debugInfo) {
313 | runOnUiThread(() -> {
314 | mDebugTv.setText(debugInfo);
315 | });
316 | }
317 | }
318 |
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/GLUtils.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec.transcode;
2 |
3 | /**
4 | * @author : chenqiao
5 | * @date : 2023/6/1 13:32
6 | */
7 | public class GLUtils {
8 | //region YUV EXT
9 | //https://registry.khronos.org/EGL/extensions/EXT/EGL_EXT_yuv_surface.txt
10 | public static final String EGL_YUV_EXT_NAME = "EGL_EXT_yuv_surface";
11 |
12 | public static final int EGL_YUV_BUFFER_EXT = 0x3300;
13 | public static final int EGL_YUV_ORDER_EXT = 0x3301;
14 | public static final int EGL_YUV_ORDER_YUV_EXT = 0x3302;
15 | public static final int EGL_YUV_NUMBER_OF_PLANES_EXT = 0x3311;
16 | public static final int EGL_YUV_SUBSAMPLE_EXT = 0x3312;
17 | public static final int EGL_YUV_DEPTH_RANGE_EXT = 0x3317;
18 | public static final int EGL_YUV_CSC_STANDARD_EXT = 0x330A;
19 | public static final int EGL_YUV_PLANE_BPP_EXT = 0x331A;
20 | public static final int EGL_YUV_SUBSAMPLE_4_2_0_EXT = 0x3313;
21 | public static final int EGL_YUV_DEPTH_RANGE_LIMITED_EXT = 0x3318;
22 | public static final int EGL_YUV_DEPTH_RANGE_FULL_EXT = 0x3319;
23 | public static final int EGL_YUV_CSC_STANDARD_601_EXT = 0x330B;
24 | public static final int EGL_YUV_CSC_STANDARD_709_EXT = 0x330C;
25 | public static final int EGL_YUV_CSC_STANDARD_2020_EXT = 0x330D;
26 | public static final int EGL_YUV_PLANE_BPP_0_EXT = 0x331B;
27 | public static final int EGL_YUV_PLANE_BPP_8_EXT = 0x331C;
28 | public static final int EGL_YUV_PLANE_BPP_10_EXT = 0x331D;
29 | //endregion
30 |
31 | //region rgba glsl
32 | public static final String RGBA_VERTEX_SHADER = "" +
33 | "uniform mat4 uMVPMatrix; \n" +
34 | "uniform mat4 uSTMatrix; \n" +
35 | "attribute vec4 aPosition; \n" +
36 | "attribute vec4 aTextureCoord; \n" +
37 | "varying vec2 vTextureCoord; \n" +
38 | "void main() { \n" +
39 | " gl_Position = uMVPMatrix * aPosition; \n" +
40 | " vTextureCoord = (uSTMatrix * aTextureCoord).xy; \n" +
41 | "} \n";
42 |
43 | public static final String RGBA_FRAGMENT_SHADER = "" +
44 | "#extension GL_OES_EGL_image_external : require \n" +
45 | "precision mediump float; \n" +
46 | "varying vec2 vTextureCoord; \n" +
47 | "uniform samplerExternalOES sTexture; \n" +
48 | "void main() { \n" +
49 | " gl_FragColor = texture2D(sTexture, vTextureCoord); \n" +
50 | "} \n";
51 | //endregion
52 |
53 | //region yuvp10 glsl
54 |
55 | public static final String YUV_VERTEX_SHADER = "" +
56 | "#version 300 es \n" +
57 | "precision highp float; \n" +
58 | "uniform mat4 uMVPMatrix; \n" +
59 | "uniform mat4 uSTMatrix; \n" +
60 | "layout(location = 0) in vec4 aPosition; \n" +
61 | "layout(location = 1) in vec4 aTextureCoord; \n" +
62 | " \n" +
63 | "out vec2 vTextureCoord; \n" +
64 | " \n" +
65 | "void main() \n" +
66 | "{ \n" +
67 | " gl_Position = uMVPMatrix * aPosition; \n" +
68 | " vTextureCoord = (uSTMatrix * aTextureCoord).xy; \n" +
69 | "} \n";
70 |
71 | // public static final String YUV_FRAGMENT_SHADER = "" +
72 | // "#version 300 es \n" +
73 | // "#extension GL_EXT_YUV_target : require \n" +
74 | // "#extension GL_OES_EGL_image_external : require \n" +
75 | // "#extension GL_OES_EGL_image_external_essl3 : require \n" +
76 | // "precision highp float; \n" +
77 | // " \n" +
78 | // "uniform samplerExternalOES sTexture; \n" +
79 | // " \n" +
80 | // "in vec2 vTextureCoord; \n" +
81 | // "layout (yuv) out vec4 color; \n" +
82 | // " \n" +
83 | // "void main() \n" +
84 | // "{ \n" +
85 | // " vec3 rgbColor = texture(sTexture, vTextureCoord).rgb; \n" +
86 | // " color = vec4(rgb_2_yuv(rgbColor, itu_601_full_range), 1.0); \n" +
87 | // "} \n";
88 |
89 | public static final String YUV_FRAGMENT_SHADER = "" +
90 | "#version 300 es \n" +
91 | "#extension GL_EXT_YUV_target : require \n" +
92 | "#extension GL_OES_EGL_image_external : require \n" +
93 | "#extension GL_OES_EGL_image_external_essl3 : require \n" +
94 | "precision highp float; \n" +
95 | " \n" +
96 | "uniform __samplerExternal2DY2YEXT sTexture; \n" +
97 | " \n" +
98 | "in vec2 vTextureCoord; \n" +
99 | "layout (yuv) out vec4 color; \n" +
100 | " \n" +
101 | "void main() \n" +
102 | "{ \n" +
103 | " color = texture(sTexture, vTextureCoord); \n" +
104 | "} \n";
105 |
106 | //endregion
107 |
108 | }
109 |
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/InputSurface.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec.transcode;
2 |
3 |
4 | import android.media.MediaCodec;
5 | import android.opengl.EGL14;
6 | import android.opengl.EGLConfig;
7 | import android.opengl.EGLContext;
8 | import android.opengl.EGLDisplay;
9 | import android.opengl.EGLExt;
10 | import android.opengl.EGLSurface;
11 | import android.text.TextUtils;
12 | import android.util.Log;
13 | import android.view.Surface;
14 |
15 | import com.demo.mediacodec.MediaCodecUtils;
16 |
17 | //代码来源http://androidxref.com/9.0.0_r3/xref/cts/tests/tests/media/src/android/media/cts
18 |
19 | /**
20 | * Holds state associated with a Surface used for MediaCodec encoder input.
21 | *
22 | * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
23 | * to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
24 | * to the video encoder.
25 | */
26 | public class InputSurface {
27 | private static final String TAG = "InputSurface";
28 |
29 | private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
30 | private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
31 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
32 | private EGLConfig[] mConfigs = new EGLConfig[1];
33 |
34 | private Surface mSurface;
35 | private int mWidth;
36 | private int mHeight;
37 |
38 | /**
39 | * Creates an InputSurface from a Surface.
40 | */
41 | public InputSurface(Surface surface, VideoOutputConfig config) {
42 | if (surface == null) {
43 | throw new NullPointerException();
44 | }
45 | mSurface = surface;
46 |
47 | eglSetup(config);
48 | }
49 |
50 | /**
51 | * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
52 | */
53 | private void eglSetup(VideoOutputConfig config) {
54 | mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
55 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
56 | throw new RuntimeException("unable to get EGL14 display");
57 | }
58 | int[] version = new int[2];
59 | if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
60 | mEGLDisplay = null;
61 | throw new RuntimeException("unable to initialize EGL14");
62 | }
63 |
64 | if (!config.isHDR || config.force8Bit) {
65 | createSdrEGLContextAndWindow();
66 | config.eglColorSpace = MediaCodecUtils.EGLColorSpace.RGB888;
67 | Log.i("InputSurface", "使用RGBA8888");
68 | } else {
69 | try {
70 | if (config.isDolby) {
71 | //杜比视界
72 | Log.i("InputSurface", "使用RGBA1010102");
73 | createRGBA1010102EGLContextAndWindow();
74 | config.eglColorSpace = MediaCodecUtils.EGLColorSpace.RGBA1010102;
75 | } else if (config.isHDRVivid) {
76 | //vivid
77 | Log.i("InputSurface", "使用YUVP10");
78 | createYUVP10EGLContextAndWindow();
79 | config.eglColorSpace = MediaCodecUtils.EGLColorSpace.YUVP10;
80 | } else {
81 | //不是杜比视界、不是hdr vivid。
82 | Log.i("InputSurface", "不是杜比,也不是Vivid,使用RGBA1010102");
83 | createRGBA1010102EGLContextAndWindow();
84 | config.eglColorSpace = MediaCodecUtils.EGLColorSpace.RGBA1010102;
85 | }
86 | } catch (Exception e) {
87 | e.printStackTrace();
88 | Log.i("InputSurface", "eglSetup: 10bit位深EGL初始化失败,尝试使用RGBA8888");
89 | createSdrEGLContextAndWindow();
90 | config.eglColorSpace = MediaCodecUtils.EGLColorSpace.RGB888;
91 | }
92 | }
93 |
94 |
95 | mWidth = getWidth();
96 | mHeight = getHeight();
97 | }
98 |
99 | public void updateSize(int width, int height) {
100 | if (width != mWidth || height != mHeight) {
101 | Log.d(TAG, "re-create EGLSurface");
102 | releaseEGLSurface();
103 | createEGLSurface();
104 | mWidth = getWidth();
105 | mHeight = getHeight();
106 | }
107 | }
108 |
109 | private void createEGLSurface() {
110 | //EGLConfig[] configs = new EGLConfig[1];
111 | int[] surfaceAttribs = {
112 | EGL14.EGL_NONE
113 | };
114 | mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mConfigs[0], mSurface,
115 | surfaceAttribs, 0);
116 | checkEglError("eglCreateWindowSurface");
117 | if (mEGLSurface == null) {
118 | throw new RuntimeException("surface was null");
119 | }
120 | }
121 |
122 | private void releaseEGLSurface() {
123 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
124 | EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
125 | mEGLSurface = EGL14.EGL_NO_SURFACE;
126 | }
127 | }
128 |
129 | /**
130 | * Discard all resources held by this class, notably the EGL context. Also releases the
131 | * Surface that was passed to our constructor.
132 | */
133 | public void release() {
134 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
135 | EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
136 | EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
137 | EGL14.eglReleaseThread();
138 | EGL14.eglTerminate(mEGLDisplay);
139 | }
140 |
141 | mSurface.release();
142 |
143 | mEGLDisplay = EGL14.EGL_NO_DISPLAY;
144 | mEGLContext = EGL14.EGL_NO_CONTEXT;
145 | mEGLSurface = EGL14.EGL_NO_SURFACE;
146 |
147 | mSurface = null;
148 | }
149 |
150 | /**
151 | * Makes our EGL context and surface current.
152 | */
153 | public void makeCurrent() {
154 | if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
155 | throw new RuntimeException("eglMakeCurrent failed");
156 | }
157 | }
158 |
159 | public void makeUnCurrent() {
160 | if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
161 | EGL14.EGL_NO_CONTEXT)) {
162 | throw new RuntimeException("eglMakeCurrent failed");
163 | }
164 | }
165 |
166 | /**
167 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
168 | */
169 | public boolean swapBuffers() {
170 | return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
171 | }
172 |
173 | /**
174 | * Returns the Surface that the MediaCodec receives buffers from.
175 | */
176 | public Surface getSurface() {
177 | return mSurface;
178 | }
179 |
180 | /**
181 | * Queries the surface's width.
182 | */
183 | public int getWidth() {
184 | int[] value = new int[1];
185 | EGL14.eglQuerySurface(mEGLDisplay, mEGLSurface, EGL14.EGL_WIDTH, value, 0);
186 | return value[0];
187 | }
188 |
189 | /**
190 | * Queries the surface's height.
191 | */
192 | public int getHeight() {
193 | int[] value = new int[1];
194 | EGL14.eglQuerySurface(mEGLDisplay, mEGLSurface, EGL14.EGL_HEIGHT, value, 0);
195 | return value[0];
196 | }
197 |
198 | /**
199 | * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
200 | */
201 | public void setPresentationTime(long nsecs) {
202 | EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
203 | }
204 |
205 | /**
206 | * Checks for EGL errors.
207 | */
208 | private void checkEglError(String msg) {
209 | int error;
210 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
211 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
212 | }
213 | }
214 |
215 | public void configure(MediaCodec codec) {
216 | codec.setInputSurface(mSurface);
217 | }
218 |
219 | private void createSdrEGLContextAndWindow() {
220 | int[] attribList = {
221 | EGL14.EGL_RED_SIZE, 8,
222 | EGL14.EGL_GREEN_SIZE, 8,
223 | EGL14.EGL_BLUE_SIZE, 8,
224 | EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
225 | EGLExt.EGL_RECORDABLE_ANDROID, 1,
226 | EGL14.EGL_NONE
227 | };
228 | int[] numConfigs = new int[1];
229 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, mConfigs, 0, mConfigs.length,
230 | numConfigs, 0)) {
231 | throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
232 | }
233 |
234 | // Configure context for OpenGL ES 2.0.
235 | int[] attrib_list = {
236 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
237 | EGL14.EGL_NONE
238 | };
239 | mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mConfigs[0], EGL14.EGL_NO_CONTEXT,
240 | attrib_list, 0);
241 | checkEglError("eglCreateContext");
242 | if (mEGLContext == null) {
243 | throw new RuntimeException("null context");
244 | }
245 |
246 | // Create a window surface, and attach it to the Surface we received.
247 | createEGLSurface();
248 | }
249 |
250 | private void createRGBA1010102EGLContextAndWindow() {
251 | //TODO 需要检查是否支持
252 | int[] attribList = {
253 | EGL14.EGL_RED_SIZE, 10,
254 | EGL14.EGL_GREEN_SIZE, 10,
255 | EGL14.EGL_BLUE_SIZE, 10,
256 | EGL14.EGL_ALPHA_SIZE, 2,
257 | EGL14.EGL_RENDERABLE_TYPE, EGLExt.EGL_OPENGL_ES3_BIT_KHR,
258 | EGL14.EGL_NONE
259 | };
260 | int[] numConfigs = new int[1];
261 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, mConfigs, 0, mConfigs.length,
262 | numConfigs, 0)) {
263 | throw new RuntimeException("unable to find RGB1010102 recordable ES2 EGL config");
264 | }
265 |
266 | // Configure context for OpenGL ES 2.0.
267 | int[] attrib_list = {
268 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
269 | EGL14.EGL_NONE
270 | };
271 | mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mConfigs[0], EGL14.EGL_NO_CONTEXT,
272 | attrib_list, 0);
273 | checkEglError("eglCreateContext");
274 | if (mEGLContext == null) {
275 | throw new RuntimeException("null context");
276 | }
277 |
278 | // Create a window surface, and attach it to the Surface we received.
279 | createEGLSurface();
280 | }
281 |
282 | private void createYUVP10EGLContextAndWindow() {
283 | String extensions = EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_EXTENSIONS);
284 | if (TextUtils.isEmpty(extensions) || !extensions.contains(GLUtils.EGL_YUV_EXT_NAME)) {
285 | throw new RuntimeException("EGL not support YUV EXT");
286 | }
287 | int[] attribList = {
288 | EGL14.EGL_SURFACE_TYPE, EGL14.EGL_WINDOW_BIT,
289 | EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
290 | EGL14.EGL_COLOR_BUFFER_TYPE, GLUtils.EGL_YUV_BUFFER_EXT,
291 | GLUtils.EGL_YUV_ORDER_EXT, GLUtils.EGL_YUV_ORDER_YUV_EXT,
292 | GLUtils.EGL_YUV_NUMBER_OF_PLANES_EXT, 2,
293 | GLUtils.EGL_YUV_SUBSAMPLE_EXT, GLUtils.EGL_YUV_SUBSAMPLE_4_2_0_EXT,
294 | GLUtils.EGL_YUV_DEPTH_RANGE_EXT, GLUtils.EGL_YUV_DEPTH_RANGE_LIMITED_EXT,
295 | GLUtils.EGL_YUV_CSC_STANDARD_EXT, GLUtils.EGL_YUV_CSC_STANDARD_2020_EXT,
296 | GLUtils.EGL_YUV_PLANE_BPP_EXT, GLUtils.EGL_YUV_PLANE_BPP_10_EXT,
297 | EGL14.EGL_NONE
298 | };
299 | int[] numConfigs = new int[1];
300 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, mConfigs, 0, mConfigs.length,
301 | numConfigs, 0)) {
302 | throw new RuntimeException("unable to find YUVP10 ES2 EGL config");
303 | }
304 | int[] v = new int[1];
305 | EGL14.eglGetConfigAttrib(mEGLDisplay, mConfigs[0], EGL14.EGL_NATIVE_VISUAL_ID, v, 0);
306 |
307 | int[] attrib_list = {
308 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
309 | EGL14.EGL_NONE
310 | };
311 | mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mConfigs[0], EGL14.EGL_NO_CONTEXT,
312 | attrib_list, 0);
313 | checkEglError("eglCreateContext");
314 | if (mEGLContext == null) {
315 | throw new RuntimeException("null context");
316 | }
317 |
318 | // Create a window surface, and attach it to the Surface we received.
319 | createEGLSurface();
320 | }
321 |
322 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/NoSupportMediaCodecException.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec.transcode;
2 |
3 | import com.demo.mediacodec.MediaCodecUtils;
4 |
5 | /**
6 | * @author : chenqiao
7 | * @date : 2023/6/2 10:11
8 | */
9 | public class NoSupportMediaCodecException extends Exception {
10 |
11 | private final MediaCodecUtils.OutputLevel outputLevel;
12 |
13 | public NoSupportMediaCodecException(String msg, MediaCodecUtils.OutputLevel outputLevel) {
14 | super(msg);
15 | this.outputLevel = outputLevel;
16 | }
17 |
18 | public NoSupportMediaCodecException(String message, Throwable cause,
19 | MediaCodecUtils.OutputLevel outputLevel) {
20 | super(message, cause);
21 | this.outputLevel = outputLevel;
22 | }
23 |
24 | public NoSupportMediaCodecException(Throwable cause, MediaCodecUtils.OutputLevel outputLevel) {
25 | super(cause);
26 | this.outputLevel = outputLevel;
27 | }
28 |
29 | public MediaCodecUtils.OutputLevel getOutputLevel() {
30 | return outputLevel;
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/OutputSurface.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec.transcode;
2 |
3 |
4 | import android.graphics.SurfaceTexture;
5 | import android.opengl.EGL14;
6 | import android.opengl.EGLContext;
7 | import android.opengl.EGLDisplay;
8 | import android.opengl.EGLSurface;
9 | import android.util.Log;
10 | import android.view.Surface;
11 |
12 | //代码来源http://androidxref.com/9.0.0_r3/xref/cts/tests/tests/media/src/android/media/cts
13 | /**
14 | * Holds state associated with a Surface used for MediaCodec decoder output.
15 | *
16 | * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
17 | * and then create a Surface for that SurfaceTexture. The Surface can be passed to
18 | * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
19 | * texture with updateTexImage, then render the texture with GL to a pbuffer.
20 | *
21 | * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
22 | * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
23 | * we just draw it on whatever surface is current.
24 | *
25 | * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
26 | * can potentially drop frames.
27 | */
28 | public class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
29 | private static final String TAG = "OutputSurface";
30 | private static final boolean VERBOSE = false;
31 |
32 | private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
33 | private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
34 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
35 |
36 | private SurfaceTexture mSurfaceTexture;
37 | private Surface mSurface;
38 |
39 | private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
40 | private boolean mFrameAvailable;
41 |
42 | private TextureRender mTextureRender;
43 |
44 | /**
45 | * Creates an OutputSurface using the current EGL context (rather than establishing a
46 | * new one). Creates a Surface that can be passed to MediaCodec.configure().
47 | * @param outputConfig
48 | */
49 | public OutputSurface(VideoOutputConfig outputConfig) {
50 | setup(this, outputConfig);
51 | }
52 |
53 | /**
54 | * Creates instances of TextureRender and SurfaceTexture, and a Surface associated
55 | * with the SurfaceTexture.
56 | */
57 | private void setup(SurfaceTexture.OnFrameAvailableListener listener,
58 | VideoOutputConfig outputConfig) {
59 | mTextureRender = new TextureRender(outputConfig);
60 | mTextureRender.surfaceCreated();
61 |
62 | // Even if we don't access the SurfaceTexture after the constructor returns, we
63 | // still need to keep a reference to it. The Surface doesn't retain a reference
64 | // at the Java level, so if we don't either then the object can get GCed, which
65 | // causes the native finalizer to run.
66 | if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
67 | mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
68 |
69 | // This doesn't work if OutputSurface is created on the thread that CTS started for
70 | // these test cases.
71 | //
72 | // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
73 | // create a Handler that uses it. The "frame available" message is delivered
74 | // there, but since we're not a Looper-based thread we'll never see it. For
75 | // this to do anything useful, OutputSurface must be created on a thread without
76 | // a Looper, so that SurfaceTexture uses the main application Looper instead.
77 | //
78 | // Java language note: passing "this" out of a constructor is generally unwise,
79 | // but we should be able to get away with it here.
80 | mSurfaceTexture.setOnFrameAvailableListener(listener);
81 |
82 | mSurface = new Surface(mSurfaceTexture);
83 | }
84 |
85 | /**
86 | * Discard all resources held by this class, notably the EGL context.
87 | */
88 | public void release() {
89 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
90 | EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
91 | EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
92 | EGL14.eglReleaseThread();
93 | EGL14.eglTerminate(mEGLDisplay);
94 | }
95 |
96 | mSurface.release();
97 |
98 | // this causes a bunch of warnings that appear harmless but might confuse someone:
99 | // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
100 | //mSurfaceTexture.release();
101 |
102 | mEGLDisplay = EGL14.EGL_NO_DISPLAY;
103 | mEGLContext = EGL14.EGL_NO_CONTEXT;
104 | mEGLSurface = EGL14.EGL_NO_SURFACE;
105 |
106 | mTextureRender = null;
107 | mSurface = null;
108 | mSurfaceTexture = null;
109 | }
110 |
111 | /**
112 | * Makes our EGL context and surface current.
113 | */
114 | public void makeCurrent() {
115 | if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
116 | throw new RuntimeException("eglMakeCurrent failed");
117 | }
118 | }
119 |
120 | /**
121 | * Returns the Surface that we draw onto.
122 | */
123 | public Surface getSurface() {
124 | return mSurface;
125 | }
126 |
127 | /**
128 | * Latches the next buffer into the texture. Must be called from the thread that created
129 | * the OutputSurface object, after the onFrameAvailable callback has signaled that new
130 | * data is available.
131 | */
132 | public void awaitNewImage() {
133 | final int TIMEOUT_MS = 500;
134 |
135 | synchronized (mFrameSyncObject) {
136 | while (!mFrameAvailable) {
137 | try {
138 | // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
139 | // stalling the test if it doesn't arrive.
140 | mFrameSyncObject.wait(TIMEOUT_MS);
141 | if (!mFrameAvailable) {
142 | // TODO: if "spurious wakeup", continue while loop
143 | throw new RuntimeException("Surface frame wait timed out");
144 | }
145 | } catch (InterruptedException ie) {
146 | // shouldn't happen
147 | throw new RuntimeException(ie);
148 | }
149 | }
150 | mFrameAvailable = false;
151 | }
152 |
153 | // Latch the data.
154 | mTextureRender.checkGlError("before updateTexImage");
155 | mSurfaceTexture.updateTexImage();
156 | }
157 |
158 | /**
159 | * Wait up to given timeout until new image become available.
160 | * @param timeoutMs
161 | * @return true if new image is available. false for no new image until timeout.
162 | */
163 | public boolean checkForNewImage(int timeoutMs) {
164 | synchronized (mFrameSyncObject) {
165 | while (!mFrameAvailable) {
166 | try {
167 | // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
168 | // stalling the test if it doesn't arrive.
169 | mFrameSyncObject.wait(timeoutMs);
170 | if (!mFrameAvailable) {
171 | return false;
172 | }
173 | } catch (InterruptedException ie) {
174 | // shouldn't happen
175 | throw new RuntimeException(ie);
176 | }
177 | }
178 | mFrameAvailable = false;
179 | }
180 |
181 | // Latch the data.
182 | mTextureRender.checkGlError("before updateTexImage");
183 | mSurfaceTexture.updateTexImage();
184 | return true;
185 | }
186 |
187 | /**
188 | * Draws the data from SurfaceTexture onto the current EGL surface.
189 | */
190 | public void drawImage() {
191 | mTextureRender.drawFrame(mSurfaceTexture);
192 | }
193 |
194 | public void latchImage() {
195 | mTextureRender.checkGlError("before updateTexImage");
196 | mSurfaceTexture.updateTexImage();
197 | }
198 |
199 | @Override
200 | public void onFrameAvailable(SurfaceTexture st) {
201 | if (VERBOSE) Log.d(TAG, "new frame available");
202 | synchronized (mFrameSyncObject) {
203 | if (mFrameAvailable) {
204 | throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
205 | }
206 | mFrameAvailable = true;
207 | mFrameSyncObject.notifyAll();
208 | }
209 | }
210 |
211 | /**
212 | * Checks for EGL errors.
213 | */
214 | private void checkEglError(String msg) {
215 | int error;
216 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
217 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
218 | }
219 | }
220 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/TextureRender.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2013 The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.demo.mediacodec.transcode;
18 |
19 | import android.graphics.Bitmap;
20 | import android.graphics.SurfaceTexture;
21 | import android.opengl.GLES11Ext;
22 | import android.opengl.GLES20;
23 | import android.opengl.Matrix;
24 | import android.util.Log;
25 |
26 | import com.demo.mediacodec.MediaCodecUtils;
27 |
28 | import java.io.FileOutputStream;
29 | import java.io.IOException;
30 | import java.nio.ByteBuffer;
31 | import java.nio.ByteOrder;
32 | import java.nio.FloatBuffer;
33 |
34 | //代码来源http://androidxref.com/9.0.0_r3/xref/cts/tests/tests/media/src/android/media/cts
35 |
36 | /**
37 | * Code for rendering a texture onto a surface using OpenGL ES 2.0.
38 | */
39 | class TextureRender {
40 | private static final String TAG = "TextureRender";
41 |
42 | private final VideoOutputConfig mOutputConfig;
43 |
44 | private static final int FLOAT_SIZE_BYTES = 4;
45 | private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
46 | private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
47 | private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
48 | private final float[] mTriangleVerticesData = {
49 | // X, Y, Z, U, V
50 | -1.0f, -1.0f, 0, 0.f, 0.f,
51 | 1.0f, -1.0f, 0, 1.f, 0.f,
52 | -1.0f, 1.0f, 0, 0.f, 1.f,
53 | 1.0f, 1.0f, 0, 1.f, 1.f,
54 | };
55 |
56 | private FloatBuffer mTriangleVertices;
57 |
58 | private float[] mMVPMatrix = new float[16];
59 | private float[] mSTMatrix = new float[16];
60 |
61 | private int mProgram;
62 | private int mTextureID = -12345;
63 | private int muMVPMatrixHandle;
64 | private int muSTMatrixHandle;
65 | private int maPositionHandle;
66 | private int maTextureHandle;
67 |
68 | public TextureRender(VideoOutputConfig outputConfig) {
69 | mOutputConfig = outputConfig;
70 | mTriangleVertices = ByteBuffer.allocateDirect(
71 | mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
72 | .order(ByteOrder.nativeOrder()).asFloatBuffer();
73 | mTriangleVertices.put(mTriangleVerticesData).position(0);
74 |
75 | Matrix.setIdentityM(mSTMatrix, 0);
76 | }
77 |
78 | public int getTextureId() {
79 | return mTextureID;
80 | }
81 |
82 | public void drawFrame(SurfaceTexture st) {
83 | checkGlError("onDrawFrame start");
84 | st.getTransformMatrix(mSTMatrix);
85 |
86 | GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
87 | GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
88 |
89 | GLES20.glUseProgram(mProgram);
90 | checkGlError("glUseProgram");
91 |
92 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
93 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
94 |
95 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
96 | GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
97 | TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
98 | checkGlError("glVertexAttribPointer maPosition");
99 | GLES20.glEnableVertexAttribArray(maPositionHandle);
100 | checkGlError("glEnableVertexAttribArray maPositionHandle");
101 |
102 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
103 | GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
104 | TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
105 | checkGlError("glVertexAttribPointer maTextureHandle");
106 | GLES20.glEnableVertexAttribArray(maTextureHandle);
107 | checkGlError("glEnableVertexAttribArray maTextureHandle");
108 |
109 | Matrix.setIdentityM(mMVPMatrix, 0);
110 | GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
111 | GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
112 |
113 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
114 | checkGlError("glDrawArrays");
115 | GLES20.glFinish();
116 | }
117 |
118 | /**
119 | * Initializes GL state. Call this after the EGL surface has been created and made current.
120 | */
121 | public void surfaceCreated() {
122 | if (mOutputConfig.eglColorSpace == MediaCodecUtils.EGLColorSpace.YUVP10) {
123 | mProgram = createProgram(GLUtils.YUV_VERTEX_SHADER, GLUtils.YUV_FRAGMENT_SHADER);
124 | } else {
125 | mProgram = createProgram(GLUtils.RGBA_VERTEX_SHADER, GLUtils.RGBA_FRAGMENT_SHADER);
126 | }
127 | if (mProgram == 0) {
128 | throw new RuntimeException("failed creating program");
129 | }
130 | maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
131 | checkGlError("glGetAttribLocation aPosition");
132 | if (maPositionHandle == -1) {
133 | throw new RuntimeException("Could not get attrib location for aPosition");
134 | }
135 | maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
136 | checkGlError("glGetAttribLocation aTextureCoord");
137 | if (maTextureHandle == -1) {
138 | throw new RuntimeException("Could not get attrib location for aTextureCoord");
139 | }
140 |
141 | muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
142 | checkGlError("glGetUniformLocation uMVPMatrix");
143 | if (muMVPMatrixHandle == -1) {
144 | throw new RuntimeException("Could not get attrib location for uMVPMatrix");
145 | }
146 |
147 | muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
148 | checkGlError("glGetUniformLocation uSTMatrix");
149 | if (muSTMatrixHandle == -1) {
150 | throw new RuntimeException("Could not get attrib location for uSTMatrix");
151 | }
152 |
153 |
154 | int[] textures = new int[1];
155 | GLES20.glGenTextures(1, textures, 0);
156 |
157 | mTextureID = textures[0];
158 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
159 | checkGlError("glBindTexture mTextureID");
160 |
161 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
162 | GLES20.GL_LINEAR);
163 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
164 | GLES20.GL_LINEAR);
165 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
166 | GLES20.GL_CLAMP_TO_EDGE);
167 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
168 | GLES20.GL_CLAMP_TO_EDGE);
169 | checkGlError("glTexParameter");
170 | }
171 |
172 | private int loadShader(int shaderType, String source) {
173 | int shader = GLES20.glCreateShader(shaderType);
174 | checkGlError("glCreateShader type=" + shaderType);
175 | GLES20.glShaderSource(shader, source);
176 | GLES20.glCompileShader(shader);
177 | int[] compiled = new int[1];
178 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
179 | if (compiled[0] == 0) {
180 | Log.e(TAG, "Could not compile shader " + shaderType + ":");
181 | Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
182 | GLES20.glDeleteShader(shader);
183 | shader = 0;
184 | }
185 | return shader;
186 | }
187 |
188 | private int createProgram(String vertexSource, String fragmentSource) {
189 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
190 | if (vertexShader == 0) {
191 | return 0;
192 | }
193 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
194 | if (pixelShader == 0) {
195 | return 0;
196 | }
197 |
198 | int program = GLES20.glCreateProgram();
199 | checkGlError("glCreateProgram");
200 | if (program == 0) {
201 | Log.e(TAG, "Could not create program");
202 | }
203 | GLES20.glAttachShader(program, vertexShader);
204 | checkGlError("glAttachShader");
205 | GLES20.glAttachShader(program, pixelShader);
206 | checkGlError("glAttachShader");
207 | GLES20.glLinkProgram(program);
208 | int[] linkStatus = new int[1];
209 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
210 | if (linkStatus[0] != GLES20.GL_TRUE) {
211 | Log.e(TAG, "Could not link program: ");
212 | Log.e(TAG, GLES20.glGetProgramInfoLog(program));
213 | GLES20.glDeleteProgram(program);
214 | program = 0;
215 | }
216 | return program;
217 | }
218 |
219 | public void checkGlError(String op) {
220 | int error;
221 | while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
222 | Log.e(TAG, op + ": glError " + error);
223 | throw new RuntimeException(op + ": glError " + error);
224 | }
225 | }
226 |
227 | /**
228 | * Saves the current frame to disk as a PNG image. Frame starts from (0,0).
229 | *
230 | * Useful for debugging.
231 | */
232 | public static void saveFrame(String filename, int width, int height) {
233 | // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA
234 | // data (i.e. a byte of red, followed by a byte of green...). We need an int[] filled
235 | // with native-order ARGB data to feed to Bitmap.
236 | //
237 | // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just
238 | // copying data around for a 720p frame. It's better to do a bulk get() and then
239 | // rearrange the data in memory. (For comparison, the PNG compress takes about 500ms
240 | // for a trivial frame.)
241 | //
242 | // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer
243 | // get() into a straight memcpy on most Android devices. Our ints will hold ABGR data.
244 | // Swapping B and R gives us ARGB. We need about 30ms for the bulk get(), and another
245 | // 270ms for the color swap.
246 | //
247 | // Making this even more interesting is the upside-down nature of GL, which means we
248 | // may want to flip the image vertically here.
249 |
250 | ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
251 | buf.order(ByteOrder.LITTLE_ENDIAN);
252 | GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
253 | buf.rewind();
254 |
255 | int pixelCount = width * height;
256 | int[] colors = new int[pixelCount];
257 | buf.asIntBuffer().get(colors);
258 | for (int i = 0; i < pixelCount; i++) {
259 | int c = colors[i];
260 | colors[i] = (c & 0xff00ff00) | ((c & 0x00ff0000) >> 16) | ((c & 0x000000ff) << 16);
261 | }
262 |
263 | FileOutputStream fos = null;
264 | try {
265 | fos = new FileOutputStream(filename);
266 | Bitmap bmp = Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888);
267 | bmp.compress(Bitmap.CompressFormat.PNG, 90, fos);
268 | bmp.recycle();
269 | } catch (IOException ioe) {
270 | throw new RuntimeException("Failed to write file " + filename, ioe);
271 | } finally {
272 | try {
273 | if (fos != null) fos.close();
274 | } catch (IOException ioe2) {
275 | throw new RuntimeException("Failed to close file " + filename, ioe2);
276 | }
277 | }
278 | Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
279 | }
280 | }
281 |
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/TranscodeActivity.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec.transcode;
2 |
3 | import android.app.ProgressDialog;
4 | import android.media.MediaFormat;
5 | import android.net.Uri;
6 | import android.os.Bundle;
7 | import android.text.Editable;
8 | import android.text.TextWatcher;
9 | import android.widget.Button;
10 | import android.widget.CompoundButton;
11 | import android.widget.TextView;
12 | import android.widget.Toast;
13 |
14 | import com.demo.mediacodec.BaseActivity;
15 | import com.demo.mediacodec.R;
16 | import com.google.android.material.checkbox.MaterialCheckBox;
17 | import com.google.android.material.dialog.MaterialAlertDialogBuilder;
18 | import com.google.android.material.textfield.TextInputEditText;
19 |
20 | import java.io.File;
21 | import java.io.IOException;
22 | import java.io.PrintWriter;
23 | import java.io.StringWriter;
24 |
25 | import androidx.annotation.Nullable;
26 | import androidx.annotation.UiThread;
27 |
28 | /**
29 | * 视频转码(没有音频相关)
30 | *
31 | * @author : chenqiao
32 | * @date : 2023/1/29 10:19 AM
33 | */
34 | public class TranscodeActivity extends BaseActivity implements TranscodeRunner.OnTranscodeListener {
35 |
36 | private TextView mVideoInfoTv, mErrorTv;
37 | private TextInputEditText mDstWidthEdt, mDstHeightEdt, mDstBitrateEdt, mDstFpsEdt;
38 | private Button mTransCodeBtn;
39 |
40 | private TranscodeRunner transcodeRunner;
41 | private ProgressDialog mProgressDialog;
42 |
43 | private MaterialCheckBox mH265Cb, mKeepHdrCb, mForce8BitCb;
44 |
45 | @Override
46 | protected void onCreate(@Nullable Bundle savedInstanceState) {
47 | super.onCreate(savedInstanceState);
48 | setContentView(R.layout.activity_transcode);
49 |
50 | findViewById(R.id.btn_select_video).setOnClickListener(v -> {
51 | openPicker();
52 | });
53 |
54 | mErrorTv = findViewById(R.id.tv_errorInfo);
55 | mH265Cb = findViewById(R.id.cb_h265);
56 | mH265Cb.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
57 | @Override
58 | public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
59 | if (!isChecked) {
60 | mKeepHdrCb.setChecked(false);
61 | mForce8BitCb.setChecked(false);
62 | }
63 | }
64 | });
65 | mKeepHdrCb = findViewById(R.id.cb_keep_hdr);
66 | mForce8BitCb = findViewById(R.id.cb_force_8_bit);
67 | mVideoInfoTv = findViewById(R.id.tv_ori_video_info);
68 | mDstWidthEdt = findViewById(R.id.edt_dst_width);
69 | mDstHeightEdt = findViewById(R.id.edt_dst_height);
70 | mDstBitrateEdt = findViewById(R.id.edt_dst_bitrate);
71 | mDstFpsEdt = findViewById(R.id.edt_dst_fps);
72 |
73 | TextWatcher watcher = new TextWatcher() {
74 | @Override
75 | public void beforeTextChanged(CharSequence s, int start, int count, int after) {
76 | }
77 |
78 | @Override
79 | public void onTextChanged(CharSequence s, int start, int before, int count) {
80 | }
81 |
82 | @Override
83 | public void afterTextChanged(Editable s) {
84 | if (mDstWidthEdt.getEditableText().length() > 0 && mDstHeightEdt.getEditableText().length() > 0) {
85 | mTransCodeBtn.setEnabled(true);
86 | } else {
87 | mTransCodeBtn.setEnabled(false);
88 | }
89 | }
90 | };
91 | mDstWidthEdt.addTextChangedListener(watcher);
92 | mDstHeightEdt.addTextChangedListener(watcher);
93 | mTransCodeBtn = findViewById(R.id.btn_transcode);
94 |
95 | mTransCodeBtn.setOnClickListener(v -> {
96 | mErrorTv.setText(null);
97 | File dstDir = getExternalCacheDir();
98 | TranscodeConfig config = new TranscodeConfig();
99 | config.dstPath = new File(dstDir, "output.mp4");
100 | config.h265 = mH265Cb.isChecked();
101 | config.outWidth = Integer.parseInt(mDstWidthEdt.getEditableText().toString());
102 | config.outHeight = Integer.parseInt(mDstHeightEdt.getEditableText().toString());
103 | config.bitrate = Integer.parseInt(mDstBitrateEdt.getEditableText().toString());
104 | config.fps = Integer.parseInt(mDstFpsEdt.getEditableText().toString());
105 | config.keepHdr = mKeepHdrCb.isChecked();
106 | if (config.keepHdr && !config.h265) {
107 | Toast.makeText(this, "仅支持H265编码的HDR效果", Toast.LENGTH_SHORT).show();
108 | }
109 | config.force8Bit = mForce8BitCb.isChecked();
110 | try {
111 | if (config.dstPath.exists()) {
112 | config.dstPath.delete();
113 | }
114 | } catch (Exception ignore) {
115 | }
116 | transcodeRunner.startTranscode(config);
117 | });
118 | }
119 |
120 | @Override
121 | protected void onVideoCallback(Uri videoUri) {
122 | if (transcodeRunner != null) {
123 | transcodeRunner.release();
124 | }
125 | transcodeRunner = new TranscodeRunner(this, videoUri);
126 | transcodeRunner.setTransCodeListener(this);
127 | transcodeRunner.prepareAsync();
128 | }
129 |
130 | @Override
131 | public void onPrepareDone(MediaFormat videoFormat) {
132 | runOnUiThread(() -> {
133 | mVideoInfoTv.setText("视频轨道信息:" + videoFormat);
134 | int width = videoFormat.getInteger(MediaFormat.KEY_WIDTH);
135 | int height = videoFormat.getInteger(MediaFormat.KEY_HEIGHT);
136 | int rotation = 0;
137 | if (videoFormat.containsKey(MediaFormat.KEY_ROTATION)) {
138 | rotation = videoFormat.getInteger(MediaFormat.KEY_ROTATION);
139 | }
140 | if (rotation == 90 || rotation == 270) {
141 | width += height;
142 | height = width - height;
143 | width = width - height;
144 | }
145 | mDstWidthEdt.setText(String.valueOf(width));
146 | mDstHeightEdt.setText(String.valueOf(height));
147 | if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N && videoFormat.containsKey(MediaFormat.KEY_COLOR_STANDARD)) {
148 | int colorStandard = videoFormat.getInteger(MediaFormat.KEY_COLOR_STANDARD);
149 | if (colorStandard == MediaFormat.COLOR_STANDARD_BT2020) {
150 | mH265Cb.setChecked(true);
151 | mKeepHdrCb.setEnabled(true);
152 | mKeepHdrCb.setChecked(true);
153 | } else {
154 | mH265Cb.setChecked(false);
155 | mKeepHdrCb.setEnabled(false);
156 | mKeepHdrCb.setChecked(false);
157 | mForce8BitCb.setEnabled(false);
158 | mForce8BitCb.setChecked(false);
159 | }
160 | } else {
161 | mKeepHdrCb.setEnabled(false);
162 | mKeepHdrCb.setChecked(false);
163 | mForce8BitCb.setEnabled(false);
164 | mForce8BitCb.setChecked(false);
165 | }
166 | });
167 | }
168 |
169 | @Override
170 | public void onError(Exception e) {
171 | runOnUiThread(() -> {
172 | StringWriter sw = new StringWriter();
173 | PrintWriter writer = new PrintWriter(sw);
174 | e.printStackTrace(writer);
175 | mErrorTv.setText("出错:" + sw);
176 | try {
177 | sw.close();
178 | } catch (IOException ex) {
179 | ex.printStackTrace();
180 | }
181 | dismissProgressDialog();
182 | });
183 | }
184 |
185 | @Override
186 | public void onTranscodeProgress(int current) {
187 | runOnUiThread(() -> {
188 | showOrUpdateProgress(current);
189 | });
190 | }
191 |
192 | @Override
193 | public void onTranscodeDone(File output) {
194 | runOnUiThread(() -> {
195 | dismissProgressDialog();
196 | showOpenVideoDialog(output);
197 | });
198 | }
199 |
200 | private void showOpenVideoDialog(File videoFile) {
201 | MaterialAlertDialogBuilder builder = new MaterialAlertDialogBuilder(this);
202 | builder.setTitle("转码完成").setMessage("文件路径:" + videoFile.getAbsolutePath()).setCancelable(true);
203 | builder.show();
204 | }
205 |
206 | @UiThread
207 | private void showOrUpdateProgress(int current) {
208 | if (mProgressDialog == null) {
209 | mProgressDialog = new ProgressDialog(this);
210 | mProgressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
211 | mProgressDialog.setCancelable(false);
212 | mProgressDialog.setCanceledOnTouchOutside(false);
213 | mProgressDialog.setMax(100);
214 | mProgressDialog.setTitle("正在转码");
215 | }
216 | mProgressDialog.setProgress(current);
217 | if (!mProgressDialog.isShowing()) {
218 | mProgressDialog.show();
219 | }
220 | }
221 |
222 | @UiThread
223 | private void dismissProgressDialog() {
224 | if (mProgressDialog != null && mProgressDialog.isShowing()) {
225 | mProgressDialog.dismiss();
226 | }
227 | }
228 | }
229 |
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/TranscodeConfig.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec.transcode;
2 |
3 | import java.io.File;
4 |
5 | /**
6 | * @author : chenqiao
7 | * @date : 2023/1/29 1:40 PM
8 | */
9 | public class TranscodeConfig {
10 | public File dstPath;
11 | public boolean h265;
12 | public int outWidth;
13 | public int outHeight;
14 | public int bitrate;
15 | public int fps;
16 | public boolean force8Bit;
17 | public boolean keepHdr;
18 | }
19 |
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/TranscodeRunner.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec.transcode;
2 |
3 | import android.content.ContentResolver;
4 | import android.content.Context;
5 | import android.media.MediaCodec;
6 | import android.media.MediaExtractor;
7 | import android.media.MediaFormat;
8 | import android.media.MediaMuxer;
9 | import android.net.Uri;
10 | import android.os.Build;
11 | import android.os.Bundle;
12 | import android.os.Handler;
13 | import android.os.HandlerThread;
14 | import android.os.ParcelFileDescriptor;
15 | import android.text.TextUtils;
16 | import android.util.Log;
17 | import android.view.Surface;
18 |
19 | import com.demo.mediacodec.MediaCodecUtils;
20 |
21 | import java.io.File;
22 | import java.io.IOException;
23 | import java.nio.ByteBuffer;
24 |
25 | import androidx.annotation.NonNull;
26 |
27 | /**
28 | * 视频转码的原理:
29 | * 准备好一个离屏渲染的Surface
30 | * 解码器解析视频数据,绘制到Surface上
31 | * 编码器将该Surface作为输入源,编码输出视频帧数据
32 | * 使用Muxer,向文件写入视频帧数据,最终生成转码视频。
33 | *
34 | * @author : chenqiao
35 | * @date : 2023/1/29 10:45 AM
36 | */
37 | public class TranscodeRunner {
38 |
39 | private InputSurface mEncoderInputSurface;
40 | private OutputSurface mDecoderOutputSurface;
41 |
42 | public interface OnTranscodeListener {
43 |
44 | void onPrepareDone(MediaFormat videoFormat);
45 |
46 | void onError(Exception e);
47 |
48 | void onTranscodeProgress(int current);
49 |
50 | void onTranscodeDone(File output);
51 |
52 | }
53 |
54 | private final Context mContext;
55 | private final Uri mVideoUri;
56 | private TranscodeConfig mConfig;
57 | private ParcelFileDescriptor pf;
58 | private MediaFormat mOriVideoFormat;
59 | private MediaFormat mOutputFormat;
60 | private MediaFormat mRealOutputFormat;
61 |
62 | private String mOriVideoMime;
63 | private int mOriVideoWidth, mOriVideoHeight;
64 | private int mOriVideoRotation;
65 | private int mOriVideoFps;
66 | private long mVideoDurationUs;
67 |
68 | private boolean mMaybeSwitchWH;
69 |
70 | private OnTranscodeListener listener;
71 |
72 | //媒体提取器
73 | private MediaExtractor mMediaExtractor;
74 | //视频轨道Id
75 | private int mVideoTrackerIndex;
76 |
77 | //编解码器
78 | private MediaCodec mDecoder, mEncoder;
79 |
80 | private MediaMuxer mMuxer;
81 | private int mVideoOutputTrackIndex;
82 |
83 | //解码回调线程
84 | private HandlerThread mDecodeCodecThread;
85 | private Handler mDecodeCodecHandler;
86 |
87 | //编码回调线程
88 | private HandlerThread mEncodeCodecThread;
89 | private Handler mEncodeCodecHandler;
90 |
91 | public TranscodeRunner(Context context, Uri uri) {
92 | mContext = context;
93 | mVideoUri = uri;
94 | }
95 |
96 | public void setTransCodeListener(OnTranscodeListener listener) {
97 | this.listener = listener;
98 | }
99 |
100 | public void prepareAsync() {
101 | new Thread(new Runnable() {
102 | @Override
103 | public void run() {
104 | mMediaExtractor = new MediaExtractor();
105 | try {
106 | ContentResolver contentResolver = mContext.getContentResolver();
107 | pf = contentResolver.openFileDescriptor(mVideoUri, "r");
108 | mMediaExtractor.setDataSource(pf.getFileDescriptor());
109 | int trackCount = mMediaExtractor.getTrackCount();
110 | for (int i = 0; i < trackCount; i++) {
111 | MediaFormat format = mMediaExtractor.getTrackFormat(i);
112 | String mime = format.getString(MediaFormat.KEY_MIME);
113 | if (!TextUtils.isEmpty(mime) && mime.startsWith("video")) {
114 | //找到视频轨道
115 | mOriVideoFormat = format;
116 | _getOriVideoInfo();
117 | //选中该视频轨道,后面读取轨道数据,就是读取的该轨道的
118 | mVideoTrackerIndex = i;
119 | mMediaExtractor.selectTrack(i);
120 | if (listener != null) {
121 | listener.onPrepareDone(mOriVideoFormat);
122 | }
123 | return;
124 | }
125 | }
126 | callError(new IOException("没有找到视频轨道!"));
127 | } catch (IOException e) {
128 | e.printStackTrace();
129 | callError(e);
130 | }
131 | }
132 | }).start();
133 | }
134 |
135 | public void startTranscode(@NonNull TranscodeConfig transcodeConfig) {
136 | mConfig = transcodeConfig;
137 | new Thread(new Runnable() {
138 |
139 | private void innerPrepareEncoder(VideoOutputConfig outputConfig) throws Exception {
140 | try {
141 | prepareEncoder(outputConfig);
142 | } catch (NoSupportMediaCodecException e) {
143 | if (outputConfig.outputLevel == MediaCodecUtils.OutputLevel.DEFAULT) {
144 | //降到NoProfile模式
145 | outputConfig.outputLevel = MediaCodecUtils.OutputLevel.NO_PROFILE;
146 | e.printStackTrace();
147 | Log.w("TranscodeRunner", "prepareEncoder: 降级至NoProfile模式");
148 | innerPrepareEncoder(outputConfig);
149 | } else if (outputConfig.outputLevel == MediaCodecUtils.OutputLevel.NO_PROFILE) {
150 | //降到NoHDR模式
151 | outputConfig.outputLevel = MediaCodecUtils.OutputLevel.NO_HDR;
152 | e.printStackTrace();
153 | Log.w("TranscodeRunner", "prepareEncoder: 降级至NoHDR模式");
154 | innerPrepareEncoder(outputConfig);
155 | } else {
156 | throw e;
157 | }
158 | }
159 | }
160 |
161 | @Override
162 | public void run() {
163 | if (mOriVideoFormat == null) {
164 | callError(new IOException("没有找到视频轨道!"));
165 | return;
166 | }
167 | try {
168 | VideoOutputConfig outputConfig =
169 | new VideoOutputConfig(MediaCodecUtils.OutputLevel.DEFAULT);
170 | if (!mConfig.keepHdr) {
171 | outputConfig.outputLevel = MediaCodecUtils.OutputLevel.NO_HDR;
172 | }
173 | outputConfig.force8Bit = mConfig.force8Bit;
174 | innerPrepareEncoder(outputConfig);
175 | prepareDecoder(outputConfig);
176 | _start();
177 | } catch (Exception e) {
178 | e.printStackTrace();
179 | callError(e);
180 | }
181 | }
182 | }).start();
183 | }
184 |
185 | public void reset() {
186 | mMediaExtractor.unselectTrack(mVideoTrackerIndex);
187 | mMediaExtractor.selectTrack(mVideoTrackerIndex);
188 | mMediaExtractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
189 |
190 | if (mEncoderInputSurface != null) {
191 | mEncoderInputSurface.release();
192 | }
193 | mEncoderInputSurface = null;
194 | if (mDecoderOutputSurface != null) {
195 | mDecoderOutputSurface.release();
196 | }
197 | mDecoderOutputSurface = null;
198 |
199 | if (mDecoder != null) {
200 | try {
201 | mDecoder.release();
202 | } catch (Exception e) {
203 | Log.w("TranscodeRunner", "release Decoder: ", e);
204 | }
205 | mDecoder = null;
206 | }
207 | if (mDecodeCodecThread != null) {
208 | mDecodeCodecThread.quitSafely();
209 | }
210 | mDecodeCodecThread = null;
211 | mDecodeCodecHandler = null;
212 |
213 | if (mEncoder != null) {
214 | try {
215 | mEncoder.stop();
216 | } catch (Exception e) {
217 | Log.w("TranscodeRunner", "stop Encoder: ", e);
218 | }
219 | try {
220 | mEncoder.release();
221 | } catch (Exception e) {
222 | Log.w("TranscodeRunner", "release Encoder: ", e);
223 | }
224 | mEncoder = null;
225 | }
226 | if (mEncodeCodecThread != null) {
227 | mEncodeCodecThread.quitSafely();
228 | }
229 | mEncodeCodecThread = null;
230 | mEncodeCodecHandler = null;
231 |
232 | if (mMuxer != null) {
233 | try {
234 | mMuxer.stop();
235 | } catch (Exception e) {
236 | Log.w("TranscodeRunner", "stop Muxer: ", e);
237 | }
238 | try {
239 | mMuxer.release();
240 | } catch (Exception e) {
241 | Log.w("TranscodeRunner", "release Muxer: ", e);
242 | }
243 | mMuxer = null;
244 | }
245 | mVideoOutputTrackIndex = 0;
246 | }
247 |
248 | public void release() {
249 | reset();
250 | if (pf != null) {
251 | try {
252 | pf.close();
253 | } catch (IOException e) {
254 | e.printStackTrace();
255 | }
256 | }
257 | if (mMediaExtractor != null) {
258 | mMediaExtractor.release();
259 | }
260 | }
261 |
262 | private void callProgress(int current) {
263 | if (listener != null) {
264 | listener.onTranscodeProgress(current);
265 | }
266 | }
267 |
268 | private void callError(Exception e) {
269 | reset();
270 | if (listener != null) {
271 | listener.onError(e);
272 | }
273 | }
274 |
275 | private void _getOriVideoInfo() {
276 | mOriVideoMime = mOriVideoFormat.getString(MediaFormat.KEY_MIME);
277 | mOriVideoWidth = mOriVideoFormat.getInteger(MediaFormat.KEY_WIDTH);
278 | mOriVideoHeight = mOriVideoFormat.getInteger(MediaFormat.KEY_HEIGHT);
279 | mOriVideoFps = mOriVideoFormat.getInteger(MediaFormat.KEY_FRAME_RATE);
280 | mMaybeSwitchWH = false;
281 | if (mOriVideoFormat.containsKey(MediaFormat.KEY_ROTATION)) {
282 | mOriVideoRotation = mOriVideoFormat.getInteger(MediaFormat.KEY_ROTATION);
283 | } else {
284 | mOriVideoRotation = 0;
285 | if (mOriVideoWidth < mOriVideoHeight) {
286 | mMaybeSwitchWH = true;
287 | }
288 | }
289 | mVideoDurationUs = mOriVideoFormat.getLong(MediaFormat.KEY_DURATION);
290 | }
291 |
292 | /**
293 | * 准备编码器
294 | */
295 | private void prepareEncoder(VideoOutputConfig outputConfig) throws Exception {
296 | mOutputFormat = MediaCodecUtils.createOutputFormat(mContext, mVideoUri, mOriVideoFormat,
297 | mConfig, outputConfig);
298 |
299 | String codecName = MediaCodecUtils.findEncoderByFormat(mOutputFormat);
300 | if (TextUtils.isEmpty(codecName)) {
301 | if (mConfig.outWidth < mConfig.outHeight) {
302 | //有些设备下面判断是否支持写的不够好,这里主动交换一下width和height,看能否获取出编码器
303 | MediaFormat tempF = MediaCodecUtils.createOutputFormat(mContext, mVideoUri
304 | , mOriVideoFormat, mConfig, outputConfig);
305 | tempF.setInteger(MediaFormat.KEY_WIDTH, mConfig.outHeight);
306 | tempF.setInteger(MediaFormat.KEY_HEIGHT, mConfig.outWidth);
307 | codecName = MediaCodecUtils.findEncoderByFormat(tempF);
308 | }
309 | }
310 | if (TextUtils.isEmpty(codecName)) {
311 | throw new NoSupportMediaCodecException("没有找到合适的编码器! outputFormat:" + mOutputFormat,
312 | outputConfig.outputLevel);
313 | }
314 | Log.i("TranscodeRunner", "使用编码器" +
315 | ": " + codecName);
316 | mEncodeCodecThread = new HandlerThread("EncodeCodecThread");
317 | mEncodeCodecThread.start();
318 | mEncodeCodecHandler = new Handler(mEncodeCodecThread.getLooper());
319 |
320 | if (mEncoder != null) {
321 | try {
322 | mEncoder.release();
323 | } catch (Exception ignore) {
324 | }
325 | }
326 | mEncoder = MediaCodec.createByCodecName(codecName);
327 |
328 | mEncoder.setCallback(new MediaCodec.Callback() {
329 | @Override
330 | public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
331 | }
332 |
333 | @Override
334 | public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index,
335 | @NonNull MediaCodec.BufferInfo info) {
336 | ByteBuffer outputBuffer = codec.getOutputBuffer(index);
337 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
338 | mMuxer.writeSampleData(mVideoOutputTrackIndex, outputBuffer, info);
339 | long presentationTimeUs = info.presentationTimeUs;
340 | callProgress((int) (presentationTimeUs * 100 / mVideoDurationUs));
341 | Log.i("Encoder", "编码pts: " + presentationTimeUs);
342 |
343 | synchronized (hdrInfoLock) {
344 | hdrInfoLock.notifyAll();
345 | }
346 | }
347 | codec.releaseOutputBuffer(index, false);
348 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
349 | Log.i("Encoder", "编码已经完成");
350 | _transcodeComplete();
351 | }
352 | }
353 |
354 | @Override
355 | public void onError(@NonNull MediaCodec codec,
356 | @NonNull MediaCodec.CodecException e) {
357 | e.printStackTrace();
358 | callError(e);
359 | }
360 |
361 | @Override
362 | public void onOutputFormatChanged(@NonNull MediaCodec codec,
363 | @NonNull MediaFormat format) {
364 | Log.i("Encoder", "encoder output format: " + format);
365 | mRealOutputFormat = format;
366 | if (mMuxer == null) {
367 | try {
368 | prepareMuxer();
369 | } catch (Exception e) {
370 | e.printStackTrace();
371 | callError(e);
372 | }
373 | }
374 | }
375 | }, mEncodeCodecHandler);
376 | try {
377 | mEncoder.configure(mOutputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
378 | } catch (Exception e) {
379 | throw new NoSupportMediaCodecException("编码器Configure失败!outputFormat:" + mOutputFormat
380 | , e, outputConfig.outputLevel);
381 | }
382 |
383 | Surface surface = mEncoder.createInputSurface();
384 | try {
385 | mEncoderInputSurface = new InputSurface(surface, outputConfig);
386 | //构造方法中创建了EGL环境后,这里立即进行绑定,后面OutputSurface初始化需要用到
387 | mEncoderInputSurface.makeCurrent();
388 | } catch (RuntimeException e) {
389 | throw new NoSupportMediaCodecException("EGL环境初始化失败!outputFormat:" + mOutputFormat, e,
390 | outputConfig.outputLevel);
391 | }
392 | }
393 |
394 | private int decodeFrameIndex;
395 | private int encodeFrameIndex;
396 | private final Object hdrInfoLock = new Object();
397 |
398 | /**
399 | * 准备解码器
400 | */
401 | private void prepareDecoder(VideoOutputConfig outputConfig) throws Exception {
402 | decodeFrameIndex = 0;
403 | encodeFrameIndex = 0;
404 | boolean isDolby = MediaFormat.MIMETYPE_VIDEO_DOLBY_VISION.equals(mOriVideoMime);
405 | boolean useDolbyDec = false;
406 | String codecName = MediaCodecUtils.findDecoderByFormat(mOriVideoFormat);
407 | if (TextUtils.isEmpty(codecName)) {
408 | if (isDolby) {
409 | //如果是杜比视界,那么尝试用HEVC的解码器去解
410 | mOriVideoFormat.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_VIDEO_HEVC);
411 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
412 | //因为杜比视界的profile和level是单独的,这里降级到HEVC的话,Profile和Level也要移除,否则还是会找不到解码器
413 | mOriVideoFormat.removeKey(MediaFormat.KEY_PROFILE);
414 | mOriVideoFormat.removeKey(MediaFormat.KEY_LEVEL);
415 | }
416 | codecName = MediaCodecUtils.findDecoderByFormat(mOriVideoFormat);
417 | } else if (MediaFormat.MIMETYPE_VIDEO_HEVC.equals(mOriVideoMime)) {
418 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
419 | //HEVC的话,尝试移除Profile和Level
420 | mOriVideoFormat.removeKey(MediaFormat.KEY_PROFILE);
421 | mOriVideoFormat.removeKey(MediaFormat.KEY_LEVEL);
422 | }
423 | codecName = MediaCodecUtils.findDecoderByFormat(mOriVideoFormat);
424 |
425 | if (TextUtils.isEmpty(codecName)) {
426 | if (mMaybeSwitchWH) {
427 | //Oppo有某些设备,竖屏拍摄的视频,不写rotation到metadata中,而是直接交换宽高(一般竖屏视频是1920x1080+90
428 | // 度,而这些特殊视频是1080x1920+0),
429 | //导致这里因为解码器的宽高限制,无法获取到解码器.
430 | MediaFormat simpleFormat = MediaFormat.createVideoFormat(mOriVideoMime,
431 | mOriVideoHeight, mOriVideoWidth);
432 | codecName = MediaCodecUtils.findDecoderByFormat(simpleFormat);
433 | }
434 | }
435 | } else {
436 | throw new RuntimeException("没有找到合适的解码器! videoFormat:" + mOriVideoFormat);
437 | }
438 | } else {
439 | if (isDolby) {
440 | useDolbyDec = true;
441 | }
442 | }
443 | if (TextUtils.isEmpty(codecName)) {
444 | throw new RuntimeException("没有找到合适的解码器! videoFormat:" + mOriVideoFormat);
445 | }
446 | Log.i("TranscodeRunner", "使用解码器: " + codecName);
447 | mDecodeCodecThread = new HandlerThread("DecodeCodecThread");
448 | mDecodeCodecThread.start();
449 | mDecodeCodecHandler = new Handler(mDecodeCodecThread.getLooper());
450 |
451 | mDecoder = MediaCodec.createByCodecName(codecName);
452 | //异步模式
453 | mDecoder.setCallback(new MediaCodec.Callback() {
454 |
455 | @Override
456 | public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
457 | ByteBuffer inputBuffer = null;
458 | try {
459 | inputBuffer = codec.getInputBuffer(index);
460 | } catch (Exception ignore) {
461 | }
462 | if (inputBuffer == null) {
463 | return;
464 | }
465 | int sampleSize = mMediaExtractor.readSampleData(inputBuffer, 0);
466 | if (sampleSize > 0) {
467 | long sampleTime = mMediaExtractor.getSampleTime();
468 | int flags = mMediaExtractor.getSampleFlags();
469 | try {
470 | codec.queueInputBuffer(index, 0, sampleSize, sampleTime, flags);
471 | } catch (Exception ignore) {
472 | }
473 | mMediaExtractor.advance();
474 | } else {
475 | codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
476 | }
477 | }
478 |
479 | @Override
480 | public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index,
481 | @NonNull MediaCodec.BufferInfo info) {
482 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
483 | boolean render = info.size > 0;
484 | if (render && Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
485 | //如果是Android O以下,进行手动丢帧来降低帧率
486 | if (Math.abs(info.presentationTimeUs - mVideoDurationUs) < 100_000L) {
487 | //最后100ms之内,不丢帧
488 | } else {
489 | if (mOriVideoFps > 0 && mConfig.fps < mOriVideoFps) {
490 | //如果相比原视频需要降低帧率,那么需要计算是否需要丢帧
491 | long oriTimeInternal = 1000000000L / mOriVideoFps;
492 | long dstTimeInternal = 1000000000L / mConfig.fps;
493 | long dstTime = encodeFrameIndex * dstTimeInternal;
494 | int indexPre = (int) (dstTime / oriTimeInternal);
495 | int indexAfter = indexPre + 1;
496 | //比较pre和after对应的时间,看取哪个合适
497 | long offset1 = Math.abs(oriTimeInternal * indexPre - dstTime);
498 | long offset2 = Math.abs(oriTimeInternal * indexAfter - dstTime);
499 | if (offset1 <= offset2) {
500 | //采用indexPre
501 | if (decodeFrameIndex != indexPre) {
502 | //和indexPre不等,则进行丢帧
503 | render = false;
504 | }
505 | } else {
506 | //采用indexAfter
507 | if (decodeFrameIndex != indexAfter) {
508 | //和indexAfter不等,则进行丢帧
509 | render = false;
510 | }
511 | }
512 | }
513 | }
514 | }
515 | byte[] hdr10Info = null;
516 | if (outputConfig.isHDR && android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.Q) {
517 | try {
518 | MediaFormat format = codec.getOutputFormat();
519 | ByteBuffer hdrByteBuffer =
520 | format.getByteBuffer(MediaFormat.KEY_HDR10_PLUS_INFO);
521 | if (hdrByteBuffer != null) {
522 | int limit = hdrByteBuffer.limit();
523 | if (limit > 0) {
524 | hdr10Info = new byte[limit];
525 | hdrByteBuffer.get(hdr10Info);
526 | }
527 | }
528 | } catch (Exception ignore) {
529 | }
530 | }
531 | try {
532 | codec.releaseOutputBuffer(index, render);
533 | if (render) {
534 | // 切换GL线程
535 | // 为什么不用mDecoderOutputSurface.makeCurrent()
536 | // ?因为OutputSurface内部没有创建EGLContext等参数
537 | mEncoderInputSurface.makeCurrent();
538 | //往OutputSurface上绘制图像
539 | mDecoderOutputSurface.awaitNewImage();
540 | mDecoderOutputSurface.drawImage();
541 | //上屏
542 | mEncoderInputSurface.setPresentationTime(info.presentationTimeUs * 1000);
543 | mEncoderInputSurface.swapBuffers();
544 | mEncoderInputSurface.makeUnCurrent();
545 | encodeFrameIndex++;
546 |
547 | if (hdr10Info != null) {
548 | //hdr10+的元数据需要手动写给编码器
549 | Bundle codecParameters = new Bundle();
550 | codecParameters.putByteArray(MediaCodec.PARAMETER_KEY_HDR10_PLUS_INFO, hdr10Info);
551 | if (mEncoder != null) {
552 | mEncoder.setParameters(codecParameters);
553 | }
554 | }
555 | }
556 | decodeFrameIndex++;
557 | Log.i("Decoder", "解码pts: " + info.presentationTimeUs);
558 | } catch (Exception ignore) {
559 | }
560 | if (hdr10Info != null) {
561 | //因为是解码和编码是异步的,上面对编码器设置了Hdr10Info后,会使得编码器输出的一帧带上这个数据,
562 | //但如果解码速度快过编码速度,就会出现Hdr10Info绑定的帧不正确的情况。所以这里有意地降低一下解码速度。
563 | synchronized (hdrInfoLock) {
564 | try {
565 | hdrInfoLock.wait(100);
566 | } catch (InterruptedException e) {
567 | e.printStackTrace();
568 | }
569 | }
570 | }
571 | } else {
572 | codec.releaseOutputBuffer(index, false);
573 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
574 | if (mEncoder != null) {
575 | mEncoder.signalEndOfInputStream();
576 | }
577 | codec.stop();
578 | codec.release();
579 | Log.i("Decoder", "解码已经完成");
580 | }
581 | }
582 | }
583 |
584 | @Override
585 | public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
586 | e.printStackTrace();
587 | callError(e);
588 | }
589 |
590 | @Override
591 | public void onOutputFormatChanged(@NonNull MediaCodec codec,
592 | @NonNull MediaFormat format) {
593 | Log.i("Decoder", "decoder output format: " + format);
594 | }
595 | }, mDecodeCodecHandler);
596 | mDecoderOutputSurface = new OutputSurface(outputConfig);
597 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
598 | mOriVideoFormat.setInteger("allow-frame-drop", 0);
599 | }
600 | if (isDolby && useDolbyDec) {
601 | Bundle transferBundle = new Bundle();
602 | String value = "transfer.hlg"; //还有一种是"transfer.dolby"
603 | transferBundle.putString("vendor.dolby.codec.transfer.value", value);
604 | mDecoder.setParameters(transferBundle);
605 | }
606 | mDecoder.configure(mOriVideoFormat, mDecoderOutputSurface.getSurface(), null, 0);
607 | }
608 |
609 | private void prepareMuxer() throws Exception {
610 | mMuxer = new MediaMuxer(mConfig.dstPath.getAbsolutePath(),
611 | MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
612 | mVideoOutputTrackIndex = mMuxer.addTrack(mRealOutputFormat);
613 | mMuxer.start();
614 | }
615 |
616 | private void _start() {
617 | if (mEncoder != null) {
618 | mEncoder.start();
619 | }
620 | mDecoder.start();
621 | }
622 |
623 | private void _transcodeComplete() {
624 | reset();
625 | if (listener != null) {
626 | listener.onTranscodeDone(mConfig.dstPath);
627 | }
628 | }
629 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/demo/mediacodec/transcode/VideoOutputConfig.java:
--------------------------------------------------------------------------------
1 | package com.demo.mediacodec.transcode;
2 |
3 | import com.demo.mediacodec.MediaCodecUtils;
4 |
5 | /**
6 | * @author : chenqiao
7 | * @date : 2023/6/1 10:44
8 | */
9 | public class VideoOutputConfig {
10 |
11 | public MediaCodecUtils.OutputLevel outputLevel;
12 |
13 | public boolean isHDR;
14 |
15 | public boolean isHDRVivid;
16 |
17 | public boolean isDolby;
18 |
19 | public boolean force8Bit;
20 |
21 | public MediaCodecUtils.EGLColorSpace eglColorSpace;
22 |
23 | public VideoOutputConfig(MediaCodecUtils.OutputLevel outputLevel) {
24 | this.outputLevel = outputLevel;
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |