├── .gitignore
├── MediaSamples
├── .gitignore
├── LICENSE
├── app
│ ├── .gitignore
│ ├── build.gradle
│ ├── proguard-rules.pro
│ └── src
│ │ ├── androidTest
│ │ └── java
│ │ │ └── com
│ │ │ └── manu
│ │ │ └── mediasamples
│ │ │ └── ExampleInstrumentedTest.kt
│ │ ├── main
│ │ ├── AndroidManifest.xml
│ │ ├── cpp
│ │ │ ├── CMakeLists.txt
│ │ │ └── native-yuv-to-buffer-lib.cpp
│ │ ├── java
│ │ │ └── com
│ │ │ │ └── manu
│ │ │ │ └── mediasamples
│ │ │ │ ├── MainActivity.kt
│ │ │ │ ├── PermissionActivity.kt
│ │ │ │ ├── app
│ │ │ │ └── MediaApplication.kt
│ │ │ │ ├── others
│ │ │ │ └── Encode.kt
│ │ │ │ ├── samples
│ │ │ │ ├── async
│ │ │ │ │ ├── AsyncActivity.kt
│ │ │ │ │ └── AsyncEncodeManager.kt
│ │ │ │ ├── audio
│ │ │ │ │ └── AudioTrackActivity.kt
│ │ │ │ ├── frame
│ │ │ │ │ ├── AsyncInputEncodeManager.kt
│ │ │ │ │ ├── IMediaNative.java
│ │ │ │ │ ├── ImageActivity.kt
│ │ │ │ │ └── YUVTools.java
│ │ │ │ ├── opengl
│ │ │ │ │ ├── Config.kt
│ │ │ │ │ ├── IRender.kt
│ │ │ │ │ ├── OpenGLActivity.kt
│ │ │ │ │ ├── PlayRenderer.kt
│ │ │ │ │ └── VideoRender.kt
│ │ │ │ ├── record
│ │ │ │ │ ├── AudioEncode.kt
│ │ │ │ │ ├── AudioEncode2.kt
│ │ │ │ │ ├── AudioThread.kt
│ │ │ │ │ ├── EncodeManager.kt
│ │ │ │ │ ├── RecordActivity.kt
│ │ │ │ │ ├── RecordConfig.kt
│ │ │ │ │ └── VideoEncode.kt
│ │ │ │ └── sync
│ │ │ │ │ ├── EncodeManager.kt
│ │ │ │ │ ├── SyncActivity.kt
│ │ │ │ │ └── SyncEncodeThread.kt
│ │ │ │ └── util
│ │ │ │ ├── AutoFitTextureView.kt
│ │ │ │ ├── CameraSizes.kt
│ │ │ │ ├── CodecUtil.kt
│ │ │ │ ├── GLUtil.kt
│ │ │ │ ├── L.kt
│ │ │ │ └── TextureHelper.kt
│ │ └── res
│ │ │ ├── drawable-v24
│ │ │ └── ic_launcher_foreground.xml
│ │ │ ├── drawable
│ │ │ └── ic_launcher_background.xml
│ │ │ ├── layout
│ │ │ ├── activity_camera.xml
│ │ │ ├── activity_main.xml
│ │ │ ├── activity_open_g_l.xml
│ │ │ └── activity_track_audio.xml
│ │ │ ├── mipmap-anydpi-v26
│ │ │ ├── ic_launcher.xml
│ │ │ └── ic_launcher_round.xml
│ │ │ ├── mipmap-hdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-mdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-xhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-xxhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-xxxhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── raw
│ │ │ ├── video_fragment_shader_default.glsl
│ │ │ ├── video_vertex_shader.glsl
│ │ │ └── video_vertex_shader_default.glsl
│ │ │ ├── values-night
│ │ │ └── themes.xml
│ │ │ ├── values
│ │ │ ├── colors.xml
│ │ │ ├── strings.xml
│ │ │ └── themes.xml
│ │ │ └── xml
│ │ │ └── network_security_config.xml
│ │ └── test
│ │ └── java
│ │ └── com
│ │ └── manu
│ │ └── mediasamples
│ │ └── ExampleUnitTest.kt
├── build.gradle
├── gradle.properties
├── gradle
│ └── wrapper
│ │ ├── gradle-wrapper.jar
│ │ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── screenshot
│ └── media_record.gif
└── settings.gradle
├── README.md
├── ffmpeg-decode-av
├── CMakeLists.txt
├── main.c
└── src
│ ├── audio
│ ├── audio_sample.c
│ └── audio_sample.h
│ ├── avio
│ ├── avio_audio_sample.c
│ └── avio_audio_sample.h
│ ├── base
│ ├── av_base.c
│ └── av_base.h
│ └── video
│ ├── video_sample.c
│ └── video_sample.h
├── ffmpeg-demux-mp4
├── CMakeLists.txt
└── main.c
├── ffmpeg-resample-audio
├── CMakeLists.txt
├── main.c
└── src
│ ├── base
│ ├── audio_resample_base.c
│ └── audio_resample_base.h
│ ├── resampler
│ ├── audio_resample.c
│ └── audio_resample.h
│ └── sample
│ ├── sample.c
│ └── sample.h
└── sdf-pcm
├── CMakeLists.txt
└── main.c
/.gitignore:
--------------------------------------------------------------------------------
1 | /*/cmake-build-debug/
2 | /*/.idea
3 |
--------------------------------------------------------------------------------
/MediaSamples/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.ap_
4 | *.aab
5 |
6 | # Files for the ART/Dalvik VM
7 | *.dex
8 |
9 | # Java class files
10 | *.class
11 |
12 | # Generated files
13 | bin/
14 | gen/
15 | out/
16 |
17 | # Gradle files
18 | .gradle/
19 | build/
20 |
21 | # Local configuration file (sdk path, etc)
22 | local.properties
23 |
24 | # Proguard folder generated by Eclipse
25 | proguard/
26 |
27 | # Log Files
28 | *.log
29 |
30 | # Android Studio Navigation editor temp files
31 | .navigation/
32 |
33 |
34 | # IntelliJ
35 | *.iml
36 | .idea/workspace.xml
37 | .idea/tasks.xml
38 | .idea/gradle.xml
39 | .idea/assetWizardSettings.xml
40 | .idea/dictionaries
41 | .idea/libraries
42 | .idea/caches
43 |
44 | # Keystore files
45 | # Uncomment the following lines if you do not want to check your keystore files in.
46 | #*.jks
47 | #*.keystore
48 |
49 | # External native build folder generated in Android Studio 2.2 and later
50 | .externalNativeBuild
51 |
52 | # Google Services (e.g. APIs or Firebase)
53 | # google-services.json
54 |
55 | # Freeline
56 | freeline.py
57 | freeline/
58 | freeline_project_description.json
59 |
60 | # fastlane
61 | fastlane/report.xml
62 | fastlane/Preview.html
63 | fastlane/screenshots
64 | fastlane/test_output
65 | fastlane/readme.md
66 |
67 | # Version control
68 | vcs.xml
69 |
70 | # lint
71 | lint/intermediates/
72 | lint/generated/
73 | lint/outputs/
74 | lint/tmp/
75 | # lint/reports/
76 | /.idea/
77 | /build/
78 |
--------------------------------------------------------------------------------
/MediaSamples/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/MediaSamples/app/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'com.android.application'
3 | id 'kotlin-android'
4 | }
5 |
6 | android {
7 | compileSdkVersion 28
8 | buildToolsVersion "28.0.3"
9 |
10 | defaultConfig {
11 | applicationId "com.manu.mediasamples"
12 | minSdkVersion 21
13 | targetSdkVersion 28
14 | versionCode 1
15 | versionName "1.0"
16 |
17 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
18 | }
19 |
20 | buildTypes {
21 | release {
22 | minifyEnabled false
23 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
24 | }
25 | }
26 |
27 | // externalNativeBuild {
28 | // cmake {
29 | // version "3.10.2"
30 | // path "src/main/cpp/CMakeLists.txt"
31 | // }
32 | // }
33 |
34 | compileOptions {
35 | sourceCompatibility JavaVersion.VERSION_1_8
36 | targetCompatibility JavaVersion.VERSION_1_8
37 | }
38 | kotlinOptions {
39 | jvmTarget = '1.8'
40 | }
41 |
42 | buildFeatures {
43 | viewBinding true
44 | }
45 | ndkVersion '20.0.5594570'
46 | }
47 |
48 | def kotlin_coroutines = "1.3.4"
49 |
50 | dependencies {
51 |
52 | implementation 'androidx.core:core-ktx:1.3.2'
53 | implementation "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}"
54 | implementation "org.jetbrains.kotlinx:kotlinx-coroutines-core:$kotlin_coroutines"
55 | implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:$kotlin_coroutines"
56 | implementation 'androidx.lifecycle:lifecycle-runtime-ktx:2.2.0'
57 | implementation 'androidx.appcompat:appcompat:1.2.0'
58 | implementation 'com.google.android.material:material:1.2.1'
59 | implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
60 | implementation 'pub.devrel:easypermissions:3.0.0'
61 | testImplementation 'junit:junit:4.13.1'
62 | androidTestImplementation 'androidx.test.ext:junit:1.1.2'
63 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
64 | }
--------------------------------------------------------------------------------
/MediaSamples/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
--------------------------------------------------------------------------------
/MediaSamples/app/src/androidTest/java/com/manu/mediasamples/ExampleInstrumentedTest.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples
2 |
3 | import androidx.test.platform.app.InstrumentationRegistry
4 | import androidx.test.ext.junit.runners.AndroidJUnit4
5 |
6 | import org.junit.Test
7 | import org.junit.runner.RunWith
8 |
9 | import org.junit.Assert.*
10 |
11 | /**
12 | * Instrumented test, which will execute on an Android device.
13 | *
14 | * See [testing documentation](http://d.android.com/tools/testing).
15 | */
16 | @RunWith(AndroidJUnit4::class)
17 | class ExampleInstrumentedTest {
18 | @Test
19 | fun useAppContext() {
20 | // Context of the app under test.
21 | val appContext = InstrumentationRegistry.getInstrumentation().targetContext
22 | assertEquals("com.manu.mediasamples", appContext.packageName)
23 | }
24 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
24 |
26 |
27 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/cpp/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | # Sets the minimum version of CMake required to build your native library.
2 | # This ensures that a certain set of CMake features is available to
3 | # your build.
4 |
5 | cmake_minimum_required(VERSION 3.4.1)
6 |
7 | # Specifies a library name, specifies whether the library is STATIC or
8 | # SHARED, and provides relative paths to the source code. You can
9 | # define multiple libraries by adding multiple add_library() commands,
10 | # and CMake builds them for you. When you build your app, Gradle
11 | # automatically packages shared libraries with your APK.
12 |
13 | add_library( native-yuv-to-buffer-lib
14 | SHARED
15 | native-yuv-to-buffer-lib.cpp )
16 |
17 | target_link_libraries( # Specifies the target library.
18 | native-yuv-to-buffer-lib
19 | android
20 | log
21 | )
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/cpp/native-yuv-to-buffer-lib.cpp:
--------------------------------------------------------------------------------
1 | //
2 | // Created by awk-asst on 11.04.19.
3 | //
4 |
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 |
14 | #include
15 | #include
16 |
17 | #define TAG "CamCapture"
18 |
19 | uint8_t *buf;
20 | uint8_t *bbuf_yIn;
21 | uint8_t *bbuf_uIn;
22 | uint8_t *bbuf_vIn;
23 |
24 | bool SPtoI420(const uint8_t *src, uint8_t *dst, int width, int height, bool isNV21)
25 | {
26 | if (!src || !dst) {
27 | return false;
28 | }
29 |
30 | unsigned int YSize = width * height;
31 | unsigned int UVSize = (YSize>>1);
32 |
33 | // NV21: Y..Y + VUV...U
34 | const uint8_t *pSrcY = src;
35 | const uint8_t *pSrcUV = src + YSize;
36 |
37 | // I420: Y..Y + U.U + V.V
38 | uint8_t *pDstY = dst;
39 | uint8_t *pDstU = dst + YSize;
40 | uint8_t *pDstV = dst + YSize + (UVSize>>1);
41 |
42 | // copy Y
43 | memcpy(pDstY, pSrcY, YSize);
44 |
45 | // copy U and V
46 | for (int k=0; k < (UVSize>>1); k++) {
47 | if(isNV21) {
48 | pDstV[k] = pSrcUV[k * 2]; // copy V
49 | pDstU[k] = pSrcUV[k * 2 + 1]; // copy U
50 | }else{
51 | pDstU[k] = pSrcUV[k * 2]; // copy V
52 | pDstV[k] = pSrcUV[k * 2 + 1]; // copy U
53 | }
54 | }
55 |
56 | return true;
57 | }
58 |
59 | bool NV21toNV12(const uint8_t *src, uint8_t *dst, int width, int height)
60 | {
61 | if (!src || !dst) {
62 | return false;
63 | }
64 |
65 | unsigned int YSize = width * height;
66 | unsigned int UVSize = (YSize>>1);
67 |
68 | // NV21: Y..Y + VUV...U
69 | const uint8_t *pSrcY = src;
70 | const uint8_t *pSrcUV = src + YSize;
71 |
72 | // NV12: Y..Y + UVU...V
73 | uint8_t *pDstY = dst;
74 | uint8_t *pDstUV = dst + YSize;
75 |
76 | // copy Y
77 | memcpy(pDstY, pSrcY, YSize);
78 |
79 | // copy U and V
80 | for (int k=0; k < (UVSize>>1); k++) {
81 | pDstUV[k * 2 + 1] = pSrcUV[k * 2]; // copy V
82 | pDstUV[k * 2] = pSrcUV[k * 2 + 1]; // copy U
83 | }
84 |
85 | return true;
86 | }
87 |
88 |
89 | /*-----------------------------------------------------------------------*/
90 | /*-----------------------------------------------------------------------*/
91 | /*------------------------- send frame planes ---------------------------*/
92 | /*-----------------------------------------------------------------------*/
93 | /*-----------------------------------------------------------------------*/
94 | extern "C"
95 | JNIEXPORT jbyteArray JNICALL
96 | Java_com_manu_mediasamples_frame_IMediaNative_yuvToBuffer(JNIEnv *env, jobject instance,
97 | jobject yPlane, jobject uPlane, jobject vPlane,
98 | jint yPixelStride, jint yRowStride,
99 | jint uPixelStride, jint uRowStride,
100 | jint vPixelStride, jint vRowStride,
101 | jint imgWidth, jint imgHeight) {
102 |
103 | bbuf_yIn = static_cast(env->GetDirectBufferAddress(yPlane));
104 | bbuf_uIn = static_cast(env->GetDirectBufferAddress(uPlane));
105 | bbuf_vIn = static_cast(env->GetDirectBufferAddress(vPlane));
106 |
107 | buf = (uint8_t *) malloc(sizeof(uint8_t) * imgWidth * imgHeight +
108 | 2 * (imgWidth + 1) / 2 * (imgHeight + 1) / 2);
109 |
110 | // __android_log_print(ANDROID_LOG_INFO, "YUVTOBUFFER", "yPixelStride: %d, yRowStride: %d", yPixelStride, yRowStride);
111 | // __android_log_print(ANDROID_LOG_INFO, "YUVTOBUFFER", "uPixelStride: %d, uRowStride: %d", uPixelStride, uRowStride);
112 | // __android_log_print(ANDROID_LOG_INFO, "YUVTOBUFFER", "vPixelStride: %d, vRowStride: %d", vPixelStride, vRowStride);
113 | // __android_log_print(ANDROID_LOG_INFO, "YUVTOBUFFER", "bbuf_yIn: %p, bbuf_uIn: %p, bbuf_vIn: %p", bbuf_yIn, bbuf_uIn, bbuf_vIn);
114 | // __android_log_print(ANDROID_LOG_INFO, "YUVTOBUFFER", "imgWidth: %d, imgHeight: %d", imgWidth, imgHeight);
115 |
116 | bool isNV21;
117 | if (yPixelStride == 1) {
118 | // All pixels in a row are contiguous; copy one line at a time.
119 | for (int y = 0; y < imgHeight; y++)
120 | memcpy(buf + y * imgWidth, bbuf_yIn + y * yRowStride,
121 | static_cast(imgWidth));
122 | } else {
123 | // Highly improbable, but not disallowed by the API. In this case
124 | // individual pixels aren't stored consecutively but sparsely with
125 | // other data inbetween each pixel.
126 | for (int y = 0; y < imgHeight; y++)
127 | for (int x = 0; x < imgWidth; x++)
128 | buf[y * imgWidth + x] = bbuf_yIn[y * yRowStride + x * yPixelStride];
129 | }
130 |
131 | uint8_t *chromaBuf = &buf[imgWidth * imgHeight];
132 | int chromaBufStride = 2 * ((imgWidth + 1) / 2);
133 |
134 |
135 |
136 | if (uPixelStride == 2 && vPixelStride == 2 &&
137 | uRowStride == vRowStride && bbuf_uIn == bbuf_vIn + 1) {
138 |
139 |
140 | isNV21 = true;
141 | // The actual cb/cr planes happened to be laid out in
142 | // exact NV21 form in memory; copy them as is
143 | for (int y = 0; y < (imgHeight + 1) / 2; y++)
144 | memcpy(chromaBuf + y * chromaBufStride, bbuf_vIn + y * vRowStride,
145 | static_cast(chromaBufStride));
146 |
147 |
148 | } else if (vPixelStride == 2 && uPixelStride == 2 &&
149 | uRowStride == vRowStride && bbuf_vIn == bbuf_uIn + 1) {
150 |
151 |
152 | isNV21 = false;
153 | // The cb/cr planes happened to be laid out in exact NV12 form
154 | // in memory; if the destination API can use NV12 in addition to
155 | // NV21 do something similar as above, but using cbPtr instead of crPtr.
156 | // If not, remove this clause and use the generic code below.
157 | }
158 | else {
159 | isNV21 = true;
160 | if (vPixelStride == 1 && uPixelStride == 1) {
161 | // Continuous cb/cr planes; the input data was I420/YV12 or similar;
162 | // copy it into NV21 form
163 | for (int y = 0; y < (imgHeight + 1) / 2; y++) {
164 | for (int x = 0; x < (imgWidth + 1) / 2; x++) {
165 | chromaBuf[y * chromaBufStride + 2 * x + 0] = bbuf_vIn[y * vRowStride + x];
166 | chromaBuf[y * chromaBufStride + 2 * x + 1] = bbuf_uIn[y * uRowStride + x];
167 | }
168 | }
169 | } else {
170 | // Generic data copying into NV21
171 | for (int y = 0; y < (imgHeight + 1) / 2; y++) {
172 | for (int x = 0; x < (imgWidth + 1) / 2; x++) {
173 | chromaBuf[y * chromaBufStride + 2 * x + 0] = bbuf_vIn[y * vRowStride +
174 | x * uPixelStride];
175 | chromaBuf[y * chromaBufStride + 2 * x + 1] = bbuf_uIn[y * uRowStride +
176 | x * vPixelStride];
177 | }
178 | }
179 | }
180 | }
181 |
182 | if (isNV21) {
183 | __android_log_print(ANDROID_LOG_INFO, "YUVTOBUFFER", "isNV21");
184 | }
185 | else {
186 | __android_log_print(ANDROID_LOG_INFO, "YUVTOBUFFER", "isNV12");
187 | }
188 |
189 | // uint8_t *I420Buff = (uint8_t *) malloc(sizeof(uint8_t) * imgWidth * imgHeight +
190 | // 2 * (imgWidth + 1) / 2 * (imgHeight + 1) / 2);
191 | uint8_t *NV12Buff = (uint8_t *) malloc(sizeof(uint8_t) * imgWidth * imgHeight +
192 | 2 * (imgWidth + 1) / 2 * (imgHeight + 1) / 2);
193 | // SPtoI420(buf,I420Buff,imgWidth,imgHeight,isNV21);
194 | if (isNV21) {
195 | NV21toNV12(buf, NV12Buff, imgWidth, imgHeight);
196 | }
197 | else {
198 | NV12Buff = buf;
199 | }
200 |
201 | jbyteArray ret = env->NewByteArray(imgWidth * imgHeight *
202 | 3/2);
203 | env->SetByteArrayRegion (ret, 0, imgWidth * imgHeight *
204 | 3/2, (jbyte*)NV12Buff);
205 | free(buf);
206 | free(NV12Buff);
207 | // free (I420Buff);
208 | return ret;
209 | }
210 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples
2 |
3 | import android.content.Intent
4 | import android.hardware.camera2.CameraCharacteristics
5 | import android.os.Bundle
6 | import android.view.View
7 | import android.view.Window
8 | import android.view.WindowManager
9 | import androidx.appcompat.app.AppCompatActivity
10 | import com.manu.mediasamples.samples.async.AsyncActivity
11 | import com.manu.mediasamples.databinding.ActivityMainBinding
12 | import com.manu.mediasamples.samples.audio.AudioTrackActivity
13 | import com.manu.mediasamples.samples.frame.ImageActivity
14 | import com.manu.mediasamples.samples.opengl.OpenGLActivity
15 | import com.manu.mediasamples.samples.record.RecordActivity
16 | import com.manu.mediasamples.samples.sync.SyncActivity
17 |
18 | /**
19 | * @Desc: MainActivity
20 | * @Author: jzman
21 | */
22 | class MainActivity : AppCompatActivity(), View.OnClickListener{
23 | private lateinit var binding: ActivityMainBinding
24 |
25 | companion object {
26 | private const val TAG = "main_activity"
27 | const val CAMERA_ID = "camera_id"
28 |
29 | /** Camera id */
30 | private const val CAMERA_TYPE = CameraCharacteristics.LENS_FACING_BACK.toString()
31 | }
32 |
33 | override fun onCreate(savedInstanceState: Bundle?) {
34 | super.onCreate(savedInstanceState)
35 | requestWindowFeature(Window.FEATURE_NO_TITLE)
36 | window.setFlags(
37 | WindowManager.LayoutParams.FLAG_FULLSCREEN,
38 | WindowManager.LayoutParams.FLAG_FULLSCREEN
39 | )
40 | binding = ActivityMainBinding.inflate(layoutInflater)
41 | setContentView(binding.root)
42 | binding.btnImageReader.setOnClickListener(this)
43 | binding.btnSync.setOnClickListener(this)
44 | binding.btnAsync.setOnClickListener(this)
45 | binding.btnRecord.setOnClickListener(this)
46 | binding.btnAudioTrack.setOnClickListener(this)
47 | binding.btnOpenGL.setOnClickListener(this)
48 | }
49 |
50 | override fun onClick(v: View?) {
51 | when(v?.id){
52 | R.id.btnSync -> startSampleActivity(CAMERA_TYPE,SyncActivity::class.java)
53 | R.id.btnAsync -> startSampleActivity(CAMERA_TYPE,AsyncActivity::class.java)
54 | R.id.btnImageReader -> startSampleActivity(CAMERA_TYPE,ImageActivity::class.java)
55 | R.id.btnRecord -> startSampleActivity(CAMERA_TYPE,RecordActivity::class.java)
56 | R.id.btnAudioTrack -> startSampleActivity(CAMERA_TYPE,AudioTrackActivity::class.java)
57 | R.id.btnOpenGL -> startSampleActivity(OpenGLActivity::class.java)
58 | }
59 | }
60 |
61 | private fun startSampleActivity(cameraId: String, clazz:Class<*>) {
62 | val intent = Intent(this, clazz)
63 | intent.putExtra(CAMERA_ID, cameraId)
64 | startActivity(intent)
65 | }
66 |
67 | private fun startSampleActivity(clazz:Class<*>) {
68 | val intent = Intent(this, clazz)
69 | startActivity(intent)
70 | }
71 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/PermissionActivity.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples
2 |
3 | import android.Manifest
4 | import android.content.Intent
5 | import android.os.Bundle
6 | import android.os.Handler
7 | import android.os.Looper
8 | import android.util.Log
9 | import androidx.appcompat.app.AppCompatActivity
10 | import pub.devrel.easypermissions.AppSettingsDialog
11 | import pub.devrel.easypermissions.EasyPermissions
12 | import pub.devrel.easypermissions.PermissionRequest
13 |
14 | class PermissionActivity : AppCompatActivity(), EasyPermissions.PermissionCallbacks {
15 |
16 | companion object {
17 | private const val TAG = "PermissionActivity"
18 |
19 | /** Camera权限请求Code */
20 | private const val REQUEST_CODE_CAMERA = 0
21 | }
22 |
23 | override fun onCreate(savedInstanceState: Bundle?) {
24 | super.onCreate(savedInstanceState)
25 | requestPermission()
26 | }
27 |
28 | override fun onPermissionsGranted(requestCode: Int, perms: MutableList) {
29 | Log.d(TAG, "onPermissionsGranted")
30 | startMainActivity()
31 | }
32 |
33 | override fun onPermissionsDenied(requestCode: Int, perms: MutableList) {
34 | Log.d(TAG, "onPermissionsDenied:${perms}")
35 | if (EasyPermissions.somePermissionPermanentlyDenied(this, perms)) {
36 | AppSettingsDialog.Builder(this).build().show();
37 | } else {
38 | finish()
39 | }
40 | }
41 |
42 | override fun onRequestPermissionsResult(
43 | requestCode: Int,
44 | permissions: Array,
45 | grantResults: IntArray
46 | ) {
47 | super.onRequestPermissionsResult(requestCode, permissions, grantResults)
48 | EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this)
49 | }
50 |
51 | override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
52 | super.onActivityResult(requestCode, resultCode, data)
53 | when (requestCode) {
54 | AppSettingsDialog.DEFAULT_SETTINGS_REQ_CODE -> {
55 | startMainActivity()
56 | }
57 | }
58 | }
59 |
60 | private fun requestPermission() {
61 | val permissions = arrayOf(
62 | Manifest.permission.CAMERA,
63 | Manifest.permission.RECORD_AUDIO,
64 | Manifest.permission.WRITE_EXTERNAL_STORAGE
65 | )
66 | if (EasyPermissions.hasPermissions(this, *permissions)) {
67 | startMainActivity()
68 | } else {
69 | EasyPermissions.requestPermissions(
70 | PermissionRequest.Builder(this, REQUEST_CODE_CAMERA, *permissions)
71 | .setNegativeButtonText(getString(R.string.permission_negative))
72 | .setPositiveButtonText(getString(R.string.permission_positive))
73 | .setRationale(getString(R.string.request_camera_permission))
74 | .build()
75 | )
76 | }
77 | }
78 |
79 | private fun startMainActivity() {
80 | startActivity(Intent(this, MainActivity::class.java))
81 | finish()
82 | }
83 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/app/MediaApplication.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.app
2 |
3 | import android.annotation.SuppressLint
4 | import android.app.Application
5 | import android.content.Context
6 |
7 | /**
8 | * @Desc: App
9 | * @Author: jzman
10 | */
11 | class MediaApplication : Application() {
12 | override fun onCreate() {
13 | super.onCreate()
14 | context = this
15 | }
16 |
17 | companion object{
18 | @SuppressLint("StaticFieldLeak")
19 | lateinit var context: Context
20 | }
21 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/others/Encode.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.others
2 |
3 | import android.content.Context
4 | import android.media.*
5 | import android.util.Log
6 | import java.util.concurrent.LinkedBlockingQueue
7 | import kotlin.properties.Delegates
8 |
9 | /**
10 | * @Desc: EncodeManager
11 | * @Author: jzman
12 | */
13 | object Encode {
14 |
15 | private const val TAG = "EncodeManager"
16 | private const val MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC
17 | private const val COLOR_FORMAT = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible
18 |
19 | /** 编码器的缓存超时时间 */
20 | private const val TIMES_OUT = 10000L
21 |
22 | private lateinit var mMediaCodec: MediaCodec
23 | private var mTrackIndex by Delegates.notNull()
24 | private var isEncodeStart = false
25 | private var isStop = false
26 | private lateinit var mMediaMuxer: MediaMuxer
27 | private lateinit var mVideoEncodeThread:Thread
28 |
29 | private var mQuene: LinkedBlockingQueue = LinkedBlockingQueue()
30 | // private var mQuene: LinkedList = LinkedList()
31 |
32 | fun init(context: Context) {
33 | mMediaMuxer = MediaMuxer(
34 | "${context.filesDir}/test.mp4",
35 | MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4
36 | )
37 | }
38 |
39 | /**
40 | * 开启编码
41 | */
42 | fun startEncode() {
43 | Log.i(TAG, "startEncode start")
44 | try {
45 | val codecInfo = matchSupportCodec(MIME_TYPE)
46 | ?: error("${MediaFormat.MIMETYPE_VIDEO_AVC} not support")
47 | // 创建MediaCodec
48 | // mMediaCodec = MediaCodec.createByCodecName(codecInfo.name)
49 | mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
50 | // 编码格式
51 | val mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, 1080, 1920)
52 | mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FORMAT) // 颜色采样格式
53 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 3000000) // 比特率
54 | mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30) // 帧率
55 | mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1) // I帧间隔
56 | mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
57 | mMediaCodec.start()
58 | isEncodeStart = true
59 | } catch (e: Exception) {
60 | Log.i(TAG, "startEncode fail:${e.message} ")
61 | e.printStackTrace()
62 | }
63 |
64 | }
65 |
66 | fun start() {
67 | mVideoEncodeThread = Thread(VideoEncodeRunnable())
68 | mVideoEncodeThread.start()
69 | }
70 |
71 | /**
72 | * 停止编码
73 | */
74 | fun stop() {
75 |
76 | try {
77 | isEncodeStart = false
78 | isStop = true
79 | mMediaCodec.stop()
80 | mMediaCodec.release()
81 | mMediaMuxer.stop()
82 | mMediaMuxer.release()
83 | }catch (e:Exception){
84 | e.printStackTrace()
85 | Log.i(TAG, "codec stop fail:${e.message}")
86 | }
87 |
88 | }
89 |
90 | /**
91 | * 添加新帧
92 | */
93 | fun offer(byteArray: ByteArray) {
94 | val temp = mQuene.offer(FrameData(byteArray, System.nanoTime()))
95 | Log.i(TAG, "offer return:$temp")
96 | }
97 |
98 | /**
99 | * 取出新帧
100 | */
101 | fun poll(): FrameData? {
102 | return mQuene.poll()
103 | }
104 |
105 | /**
106 | * 匹配设备支持的编解码器
107 | */
108 | private fun matchSupportCodec(mimeType: String): MediaCodecInfo? {
109 | val mediaCodecList = MediaCodecList(MediaCodecList.REGULAR_CODECS)
110 | val codeInfos = mediaCodecList.codecInfos
111 | for (codeInfo in codeInfos) {
112 | if (codeInfo.isEncoder) continue
113 | val types = codeInfo.supportedTypes
114 | for (type in types) {
115 | if (type.equals(mimeType, true)) {
116 | return codeInfo
117 | }
118 | }
119 | }
120 | return null
121 | }
122 |
123 | /**
124 | * 编码线程
125 | */
126 | private class VideoEncodeRunnable : Runnable {
127 | override fun run() {
128 | if (!isEncodeStart) {
129 | Thread.sleep(200)
130 | startEncode()
131 | }
132 |
133 | // 处理ImageReader生成的帧数据
134 | while (!isStop && isEncodeStart) {
135 |
136 | if (isStop) break
137 | val data = poll()
138 | if (data != null) {
139 | // 获取空闲的用于填充有效数据的输入缓冲区的索引,无可用缓冲区则返回-1
140 | val inputBufferId = mMediaCodec.dequeueInputBuffer(TIMES_OUT)
141 | Log.i(TAG, "VideoEncodeRunnable > inputBufferId:$inputBufferId ")
142 | if (inputBufferId >= 0) {
143 | // 获取空的缓冲区
144 | val inputBuffer = mMediaCodec.getInputBuffer(inputBufferId)
145 | val buffer = data.buffer
146 | if (inputBuffer != null) {
147 | inputBuffer.clear()
148 | inputBuffer.put(buffer)
149 | }
150 |
151 | mMediaCodec.queueInputBuffer(
152 | inputBufferId,
153 | 0,
154 | buffer.size,
155 | System.nanoTime() / 1000,
156 | MediaCodec.BUFFER_FLAG_KEY_FRAME
157 | )
158 | }
159 | }
160 | val bufferInfo = MediaCodec.BufferInfo()
161 | //得到成功编码后输出的out buffer Id
162 | val outputBufferId = mMediaCodec.dequeueOutputBuffer(bufferInfo, TIMES_OUT)
163 | Log.i(TAG, "VideoEncodeRunnable > outputBufferId:$outputBufferId ")
164 | if (outputBufferId >= 0) {
165 | val outputBuffer = mMediaCodec.getOutputBuffer(outputBufferId)
166 |
167 | val out = ByteArray(bufferInfo.size)
168 | // outputBuffer?.get(out)
169 | outputBuffer?.position(bufferInfo.offset)
170 | outputBuffer?.limit(bufferInfo.offset + bufferInfo.size)
171 | // 将编码后的数据写入到MP4复用器
172 | if (outputBuffer != null && bufferInfo.size != 0) {
173 | mMediaMuxer.writeSampleData(mTrackIndex, outputBuffer, bufferInfo)
174 | Log.i(
175 | TAG,
176 | "VideoEncodeRunnable > writeSampleData: size:${bufferInfo.size} "
177 | )
178 | }
179 | //释放output buffer
180 | mMediaCodec.releaseOutputBuffer(outputBufferId, false)
181 | } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
182 | Log.i(TAG, "VideoEncodeRunnable > format_change ")
183 | val mediaFormat = mMediaCodec.outputFormat
184 | mTrackIndex = mMediaMuxer.addTrack(mediaFormat)
185 | mMediaMuxer.start()
186 | }
187 | }
188 | }
189 | }
190 |
191 | /**
192 | * ImageReader获取的帧数据的封装
193 | */
194 | class FrameData(var buffer: ByteArray, var timeStamp: Long)
195 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/async/AsyncActivity.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.async
2 |
3 | import android.annotation.SuppressLint
4 | import android.content.Context
5 | import android.graphics.SurfaceTexture
6 | import android.hardware.camera2.*
7 | import android.hardware.camera2.params.OutputConfiguration
8 | import android.hardware.camera2.params.SessionConfiguration
9 | import android.os.Build
10 | import android.os.Bundle
11 | import android.os.Handler
12 | import android.os.HandlerThread
13 | import android.util.Log
14 | import android.util.Size
15 | import android.view.Surface
16 | import android.view.TextureView
17 | import android.view.View
18 | import androidx.annotation.RequiresApi
19 | import androidx.appcompat.app.AppCompatActivity
20 | import com.google.android.material.snackbar.Snackbar
21 | import com.manu.mediasamples.MainActivity
22 | import com.manu.mediasamples.R
23 | import com.manu.mediasamples.databinding.ActivityCameraBinding
24 | import java.util.concurrent.ExecutorService
25 | import java.util.concurrent.Executors
26 |
27 | /**
28 | * @Desc: Camera2
29 | * @Author: jzman
30 | */
31 | class AsyncActivity : AppCompatActivity(), View.OnClickListener {
32 |
33 | private lateinit var binding: ActivityCameraBinding
34 | private lateinit var mCameraId: String
35 |
36 | private lateinit var mCaptureRequestBuild: CaptureRequest.Builder
37 | private lateinit var mExecutor: ExecutorService
38 |
39 | private lateinit var mCameraDevice: CameraDevice
40 | private lateinit var mCameraCaptureSession: CameraCaptureSession
41 | private lateinit var mSurfaceTexture: SurfaceTexture
42 | private lateinit var mSurface: Surface
43 | private lateinit var previewSize: Size
44 |
45 | private var mCameraThread = HandlerThread("CameraThread").apply { start() }
46 | private var mCameraHandler = Handler(mCameraThread.looper)
47 |
48 | private var isRecordState = false;
49 | private var isCameraState = false;
50 |
51 | /**
52 | * 获取CameraManager
53 | */
54 | private val mCameraManager: CameraManager by lazy {
55 | application.getSystemService(Context.CAMERA_SERVICE) as CameraManager
56 | }
57 |
58 | /**
59 | * 获取CameraCharacteristics
60 | */
61 | private val mCameraCharacteristics: CameraCharacteristics by lazy {
62 | mCameraManager.getCameraCharacteristics(mCameraId)
63 | }
64 |
65 | companion object {
66 | private const val TAG = "CameraActivity2"
67 | }
68 |
69 | @RequiresApi(Build.VERSION_CODES.P)
70 | override fun onCreate(savedInstanceState: Bundle?) {
71 | super.onCreate(savedInstanceState)
72 | binding = ActivityCameraBinding.inflate(layoutInflater)
73 | setContentView(binding.root)
74 | binding.btnRecord.setOnClickListener(this)
75 | binding.btnStop.setOnClickListener(this)
76 | mCameraId = intent.getStringExtra(MainActivity.CAMERA_ID).toString()
77 | mExecutor = Executors.newSingleThreadExecutor()
78 | previewSize = Size(1920, 1080)
79 | binding.textureView.setAspectRatio(previewSize.width, previewSize.height)
80 | binding.textureView.surfaceTextureListener = TextureListener()
81 | }
82 |
83 | @RequiresApi(Build.VERSION_CODES.P)
84 | override fun onClick(v: View?) {
85 | when (v?.id) {
86 | R.id.btnRecord -> startRecord()
87 | R.id.btnStop -> stop()
88 | }
89 | }
90 |
91 | override fun onStop() {
92 | super.onStop()
93 | try {
94 | mCameraDevice.close()
95 | } catch (exc: Throwable) {
96 | Log.e(TAG, "Error closing camera", exc)
97 | }
98 | }
99 |
100 | override fun onDestroy() {
101 | super.onDestroy()
102 | mCameraThread.quitSafely()
103 | mExecutor.shutdownNow()
104 | }
105 |
106 | @SuppressLint("MissingPermission", "Recycle")
107 | private fun initCamera() {
108 | Log.i(TAG, "initCamera")
109 | // 打开Camera
110 | openCamera()
111 | }
112 |
113 | /**
114 | * 打开Camera
115 | */
116 | @SuppressLint("MissingPermission")
117 | private fun openCamera() {
118 | mCameraManager.openCamera(mCameraId, object : CameraDevice.StateCallback() {
119 | override fun onOpened(camera: CameraDevice) {
120 | // 设备开启
121 | Log.i(TAG, "onOpened")
122 | mCameraDevice = camera
123 | isCameraState = true
124 | }
125 |
126 | override fun onDisconnected(camera: CameraDevice) {
127 | // 设备断开
128 | Log.i(TAG, "onDisconnected")
129 | isCameraState = false
130 | finish()
131 | }
132 |
133 | override fun onError(camera: CameraDevice, error: Int) {
134 | // 意外错误
135 | Log.i(TAG, "onError:$error")
136 | isCameraState = false
137 | val msg = when (error) {
138 | ERROR_CAMERA_DEVICE -> "Fatal (device)"
139 | ERROR_CAMERA_DISABLED -> "Device policy"
140 | ERROR_CAMERA_IN_USE -> "Camera in use"
141 | ERROR_CAMERA_SERVICE -> "Fatal (service)"
142 | ERROR_MAX_CAMERAS_IN_USE -> "Maximum cameras in use"
143 | else -> "Unknown"
144 | }
145 | val exc = RuntimeException("Camera error: ($error) $msg")
146 | Log.e(TAG, exc.message, exc)
147 | }
148 |
149 | override fun onClosed(camera: CameraDevice) {
150 | super.onClosed(camera)
151 | // 设备关闭,CameraDevice的close方法触发
152 | Log.i(TAG, "onClosed")
153 | isCameraState = false
154 | }
155 | }, mCameraHandler)
156 | }
157 |
158 | /**
159 | * 开启录制
160 | */
161 | @RequiresApi(Build.VERSION_CODES.P)
162 | private fun startRecord() {
163 | AsyncEncodeManager.init(previewSize.width, previewSize.height)
164 | if (!isCameraState) {
165 | Snackbar.make(
166 | binding.container,
167 | getString(R.string.camera_error),
168 | Snackbar.LENGTH_LONG
169 | ).show()
170 | return
171 | }
172 |
173 | Snackbar.make(
174 | binding.container,
175 | getString(if (isRecordState) R.string.record_now else R.string.record_start),
176 | Snackbar.LENGTH_LONG
177 | ).show()
178 | if (isRecordState) return
179 |
180 | mSurfaceTexture = binding.textureView.surfaceTexture!!
181 | mSurface = Surface(mSurfaceTexture)
182 |
183 |
184 | mSurfaceTexture.setDefaultBufferSize(previewSize.width, previewSize.height)
185 |
186 | // 添加预览的Surface和作为输入的Surface
187 | mCaptureRequestBuild = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
188 | mCaptureRequestBuild.addTarget(mSurface)
189 | mCaptureRequestBuild.addTarget(AsyncEncodeManager.getSurface())
190 |
191 | val outputs = mutableListOf()
192 | outputs.add(OutputConfiguration(mSurface))
193 | outputs.add(OutputConfiguration(AsyncEncodeManager.getSurface()))
194 | val sessionConfiguration = SessionConfiguration(
195 | SessionConfiguration.SESSION_REGULAR,
196 | outputs, mExecutor, object : CameraCaptureSession.StateCallback() {
197 |
198 | override fun onActive(session: CameraCaptureSession) {
199 | super.onActive(session)
200 | // 会话主动处理Capture Request
201 | Log.i(TAG, "onActive")
202 | }
203 |
204 | override fun onReady(session: CameraCaptureSession) {
205 | super.onReady(session)
206 | // 每次会话没有更多的Capture Request时调用
207 | // Camera完成自身配置没有Capture Request提交至会话也会调用
208 | // 会话完成所有的Capture Request会回调
209 | Log.i(TAG, "onReady")
210 | }
211 |
212 | override fun onConfigureFailed(session: CameraCaptureSession) {
213 | val exc = RuntimeException("Camera $mCameraId session configuration failed")
214 | Log.e(TAG, exc.message, exc)
215 | }
216 |
217 | override fun onConfigured(session: CameraCaptureSession) {
218 | // Camera完成自身配置,会话开始处理请求
219 | // Capture Request已经在会话中排队,则立即调用onActive
220 | // 没有提交Capture Request则调用onReady
221 | Log.i(TAG, "onConfigured")
222 | mCameraCaptureSession = session
223 |
224 | // 设置各种参数
225 | mCaptureRequestBuild.set(
226 | CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, // 视频稳定功能是否激活
227 | 1
228 | )
229 | // 发送CaptureRequest
230 | mCameraCaptureSession.setRepeatingRequest(
231 | mCaptureRequestBuild.build(),
232 | null,
233 | mCameraHandler
234 | )
235 | // 开始编码
236 | AsyncEncodeManager.startEncode()
237 | isRecordState = true
238 | }
239 | })
240 | mCameraDevice.createCaptureSession(sessionConfiguration)
241 | }
242 |
243 | /**
244 | * 关闭CaptureSession
245 | */
246 | private fun closeCaptureSession() {
247 | mCameraCaptureSession.stopRepeating()
248 | mCameraCaptureSession.close()
249 | }
250 |
251 | private fun stop() {
252 | Snackbar
253 | .make(
254 | binding.container,
255 | getString(if (isRecordState) R.string.record_end else R.string.record_none),
256 | Snackbar.LENGTH_LONG
257 | ).show()
258 | if (!isRecordState) return
259 | AsyncEncodeManager.stopEncode()
260 | closeCaptureSession()
261 | isRecordState = false
262 | }
263 |
264 | /**
265 | * TextureView关联的surfaceTexture可用时通知的回调
266 | */
267 | private inner class TextureListener : TextureView.SurfaceTextureListener {
268 | override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) {
269 | // surfaceTexture的缓冲区大小变化时调用
270 | Log.i(TAG, "onSurfaceTextureSizeChanged")
271 | }
272 |
273 | override fun onSurfaceTextureUpdated(surface: SurfaceTexture) {
274 | // surfaceTexture的updateTextImage更新指定的surfaceTexture时调用
275 | Log.i(TAG, "onSurfaceTextureUpdated")
276 | }
277 |
278 | override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
279 | // surfaceTexture销毁的时候调用
280 | // 返回true表示surfaceTexture将不进行渲染,false表示需调用surfaceTexture的release方法进行destroy
281 | Log.i(TAG, "onSurfaceTextureDestroyed")
282 | return true
283 | }
284 |
285 | override fun onSurfaceTextureAvailable(surface: SurfaceTexture, width: Int, height: Int) {
286 | // surfaceTexture可用的时候调用
287 | Log.i(TAG, "onSurfaceTextureAvailable")
288 |
289 | // 获取合适的预览大小
290 | // previewSize = getPreviewOutputSize(
291 | // binding.textureView.display,
292 | // mCameraCharacteristics,
293 | // mSurfaceTexture::class.java
294 | // )
295 |
296 | initCamera()
297 | }
298 | }
299 | }
300 |
301 |
302 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/async/AsyncEncodeManager.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.async
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaCodecInfo
5 | import android.media.MediaFormat
6 | import android.media.MediaMuxer
7 | import android.os.Build
8 | import android.util.Log
9 | import android.view.Surface
10 | import com.manu.mediasamples.app.MediaApplication
11 | import java.io.IOException
12 | import kotlin.properties.Delegates
13 |
14 | /**
15 | * @Desc:AsyncEncodeManager
16 | * @Author: jzman
17 | */
18 | object AsyncEncodeManager : MediaCodec.Callback(){
19 | private const val TAG = "AsyncEncodeManager"
20 | private const val MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC
21 | private const val COLOR_FORMAT_SURFACE =
22 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
23 | private lateinit var mMediaCodec: MediaCodec
24 | private lateinit var mMediaMuxer: MediaMuxer
25 |
26 | /** 用作数据流输入的Surface */
27 | private lateinit var mSurface: Surface
28 |
29 | private var pts:Long = 0
30 |
31 | /** 轨道索引 */
32 | private var mTrackIndex by Delegates.notNull()
33 |
34 | /**
35 | * 初始化
36 | */
37 | fun init(width: Int, height: Int) {
38 | Log.d(TAG, "init")
39 | initCodec(width, height)
40 | initMuxer()
41 | }
42 |
43 | /**
44 | * 获取用作输入流输入的Surface
45 | */
46 | fun getSurface(): Surface {
47 | return mSurface
48 | }
49 |
50 | /**
51 | * 开始编码
52 | */
53 | fun startEncode() {
54 | Log.d(TAG, "startEncode")
55 | mMediaCodec.start()
56 | }
57 |
58 | /**
59 | * 结束编码
60 | */
61 | fun stopEncode() {
62 | Log.d(TAG, "stopEncode")
63 | mMediaCodec.stop()
64 | mMediaMuxer.stop()
65 | }
66 |
67 | /**
68 | * 初始化MediaCodec
69 | */
70 | private fun initCodec(width: Int, height: Int) {
71 | Log.d(TAG, "initCodec start")
72 | try {
73 | // 创建MediaCodec
74 | mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
75 | // 参数设置
76 | val mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height)
77 | mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
78 | COLOR_FORMAT_SURFACE
79 | ) // 颜色采样格式
80 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 4) // 比特率
81 | mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30) // 帧率
82 | mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1) // I帧间隔
83 |
84 | mediaFormat.setInteger(
85 | MediaFormat.KEY_PROFILE,
86 | MediaCodecInfo.CodecProfileLevel.AVCProfileHigh
87 | )
88 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
89 | mediaFormat.setInteger(
90 | MediaFormat.KEY_LEVEL,
91 | MediaCodecInfo.CodecProfileLevel.AVCLevel31
92 | )
93 | }
94 | // 设置Callback
95 | mMediaCodec.setCallback(this)
96 | // 配置状态
97 | mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
98 | // 创建Surface作为MediaCodec的输入,createInputSurface只能在configure与start之间调用创建Surface
99 | mSurface = mMediaCodec.createInputSurface()
100 | } catch (e: Exception) {
101 | Log.i(TAG, "initCodec fail:${e.message} ")
102 | e.printStackTrace()
103 | }
104 | }
105 |
106 | /**
107 | * 初始化MediaMuxer
108 | */
109 | private fun initMuxer() {
110 | Log.d(TAG, "initMuxer start")
111 | try {
112 | val path = "${MediaApplication.context.filesDir}/test.mp4"
113 | mMediaMuxer = MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
114 | } catch (e: IOException) {
115 | Log.e(
116 | TAG,
117 | "initMuxer fail: ${e.message}"
118 | )
119 | }
120 | }
121 |
122 | override fun onOutputBufferAvailable(
123 | codec: MediaCodec,
124 | index: Int,
125 | info: MediaCodec.BufferInfo
126 | ) {
127 | Log.d(TAG,"onOutputBufferAvailable index:$index, info->offset:${info.offset},size:${info.size},pts:${info.presentationTimeUs/1000000}")
128 | val outputBuffer = codec.getOutputBuffer(index) ?: return
129 | outputBuffer.position(info.offset)
130 | outputBuffer.limit(info.size)
131 | if (pts == 0L){
132 | info.presentationTimeUs = info.presentationTimeUs - pts
133 | }
134 | mMediaMuxer.writeSampleData(mTrackIndex,outputBuffer,info)
135 | mMediaCodec.releaseOutputBuffer(index,false)
136 | }
137 |
138 | override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
139 | Log.d(TAG,"onInputBufferAvailable index:$index")
140 | }
141 |
142 | override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
143 | Log.d(TAG,"onOutputFormatChanged format:${format}")
144 | mTrackIndex = mMediaMuxer.addTrack(format)
145 | mMediaMuxer.start()
146 | }
147 |
148 | override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
149 | Log.d(TAG,"onError e:${e.message}")
150 | }
151 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/audio/AudioTrackActivity.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.audio
2 |
3 | import android.media.*
4 | import android.os.Bundle
5 | import android.os.Environment
6 | import android.util.Log
7 | import android.view.View
8 | import androidx.appcompat.app.AppCompatActivity
9 | import com.manu.mediasamples.R
10 | import com.manu.mediasamples.databinding.ActivityTrackAudioBinding
11 | import kotlinx.coroutines.Dispatchers
12 | import kotlinx.coroutines.MainScope
13 | import kotlinx.coroutines.launch
14 | import java.io.File
15 | import java.io.FileInputStream
16 | import kotlin.properties.Delegates
17 |
18 | /**
19 | * AudioTrack
20 | */
21 | class AudioTrackActivity : AppCompatActivity(),View.OnClickListener{
22 |
23 | companion object {
24 | const val TAG = "AudioTrackActivity"
25 |
26 | /** 音频格式 */
27 | const val AUDIO_FORMAT = AudioFormat.ENCODING_PCM_8BIT
28 |
29 | /** 采样率 */
30 | const val SAMPLE_RATE = 8000
31 |
32 | /** 声道配置 */
33 | const val CHANNEL_CONFIG = AudioFormat.CHANNEL_OUT_MONO
34 | }
35 |
36 | private lateinit var binding:ActivityTrackAudioBinding
37 | private val scope = MainScope()
38 |
39 | private lateinit var audioTrack: AudioTrack
40 | private lateinit var attributes: AudioAttributes
41 | private lateinit var audioFormat: AudioFormat
42 | private var bufferSize by Delegates.notNull()
43 | private lateinit var pcmFilePath: String
44 |
45 | override fun onCreate(savedInstanceState: Bundle?) {
46 | super.onCreate(savedInstanceState)
47 | binding = ActivityTrackAudioBinding.inflate(layoutInflater)
48 | setContentView(binding.root)
49 |
50 | binding.btnPlay.setOnClickListener(this)
51 | binding.btnStop.setOnClickListener(this)
52 | binding.btnPause.setOnClickListener(this)
53 |
54 | pcmFilePath = Environment.getExternalStorageDirectory()
55 | .path + File.separator + "test.pcm"
56 | Log.i(TAG, "pcmFilePath:$pcmFilePath")
57 | initAudioTrack()
58 | }
59 |
60 | override fun onClick(v: View?) {
61 | when(v?.id){
62 | R.id.btnPlay -> start()
63 | R.id.btnStop -> audioTrack.stop()
64 | R.id.btnPause -> audioTrack.pause()
65 | }
66 | }
67 |
68 | override fun onDestroy() {
69 | super.onDestroy()
70 | audioTrack.release()
71 | }
72 |
73 | private fun start(){
74 | audioTrack.play()
75 | writeAudioData()
76 | }
77 |
78 | private fun initAudioTrack() {
79 | bufferSize = AudioTrack
80 | .getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT)
81 | attributes = AudioAttributes.Builder()
82 | .setUsage(AudioAttributes.USAGE_MEDIA) // 设置音频的用途
83 | .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC) // 设置音频的内容类型
84 | .build()
85 | audioFormat = AudioFormat.Builder()
86 | .setSampleRate(SAMPLE_RATE)
87 | .setChannelMask(AudioFormat.CHANNEL_OUT_MONO)
88 | .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
89 | .build()
90 | audioTrack = AudioTrack(
91 | attributes, audioFormat, bufferSize,
92 | AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE
93 | )
94 | }
95 |
96 | private fun writeAudioData(){
97 | scope.launch(Dispatchers.IO){
98 | val pcmFile = File(pcmFilePath)
99 | val ins = FileInputStream(pcmFile)
100 | val bytes = ByteArray(bufferSize)
101 | var len: Int
102 | while (ins.read(bytes).also { len = it } > 0){
103 | audioTrack.write(bytes, 0, len)
104 | }
105 | audioTrack.stop()
106 | }
107 | }
108 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/frame/AsyncInputEncodeManager.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.frame
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaCodecInfo
5 | import android.media.MediaFormat
6 | import android.media.MediaMuxer
7 | import android.os.Build
8 | import android.util.Log
9 | import com.manu.mediasamples.app.MediaApplication
10 | import java.io.IOException
11 | import java.util.concurrent.LinkedBlockingQueue
12 | import kotlin.properties.Delegates
13 |
14 |
15 | /**
16 | * @Desc:AsyncInputEncodeManager
17 | * @Author: jzman
18 | */
19 | object AsyncInputEncodeManager : MediaCodec.Callback(){
20 | private const val TAG = "AsyncInputEncodeManager"
21 | private const val MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC
22 | private lateinit var mMediaCodec: MediaCodec
23 | private lateinit var mMediaMuxer: MediaMuxer
24 |
25 | private var pts:Long = 0
26 | private var isMuxer = false
27 | private var isStop = false
28 |
29 | /** 轨道索引 */
30 | private var mTrackIndex by Delegates.notNull()
31 |
32 | private var mQuene: LinkedBlockingQueue = LinkedBlockingQueue()
33 |
34 | /**
35 | * 初始化
36 | */
37 | fun init(width: Int, height: Int) {
38 | Log.d(TAG, "init")
39 | initCodec(width, height)
40 | initMuxer()
41 | }
42 |
43 | /**
44 | * 开始编码
45 | */
46 | fun startEncode() {
47 | Log.d(TAG, "startEncode")
48 | mMediaCodec.start()
49 | isStop = false
50 | }
51 |
52 | /**
53 | * 结束编码
54 | */
55 | fun stopEncode() {
56 | Log.d(TAG, "stopEncode")
57 | mMediaCodec.stop()
58 | mMediaCodec.release()
59 | if (isMuxer){
60 | mMediaMuxer.stop()
61 | mMediaMuxer.release()
62 | }
63 | isStop = true
64 | }
65 |
66 | /**
67 | * 添加新帧
68 | */
69 | fun offer(byteArray: ByteArray, timeStamp: Long) {
70 | val temp = mQuene.offer(FrameData(byteArray, timeStamp))
71 | Log.i(TAG, "offer return:$temp")
72 | }
73 |
74 | /**
75 | * 取出新帧
76 | */
77 | private fun poll(): FrameData? {
78 | return mQuene.poll()
79 | }
80 |
81 | fun isStop():Boolean{
82 | return isStop
83 | }
84 |
85 | /**
86 | * 初始化MediaCodec
87 | */
88 | private fun initCodec(width: Int, height: Int) {
89 | Log.d(TAG, "initCodec start")
90 | try {
91 | // 创建MediaCodec
92 | mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE)
93 | // 参数设置
94 | val mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, 1920, 1080)
95 | mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
96 | MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible
97 | )
98 | // 颜色采样格式
99 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2000000) // 比特率
100 | mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15) // 帧率
101 | mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10) // I帧间隔
102 |
103 | mediaFormat.setInteger(
104 | MediaFormat.KEY_PROFILE,
105 | MediaCodecInfo.CodecProfileLevel.AVCProfileHigh
106 | )
107 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
108 | mediaFormat.setInteger(
109 | MediaFormat.KEY_LEVEL,
110 | MediaCodecInfo.CodecProfileLevel.AVCLevel31
111 | )
112 | }
113 | // 设置Callback
114 | mMediaCodec.setCallback(this)
115 | // 配置状态
116 | mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
117 | } catch (e: Exception) {
118 | Log.i(TAG, "initCodec fail:${e.message} ")
119 | e.printStackTrace()
120 | }
121 | }
122 |
123 | /**
124 | * 初始化MediaMuxer
125 | */
126 | private fun initMuxer() {
127 | Log.d(TAG, "initMuxer start")
128 | try {
129 | val path = "${MediaApplication.context.filesDir}/test.mp4"
130 | mMediaMuxer = MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
131 | } catch (e: IOException) {
132 | Log.e(
133 | TAG,
134 | "initMuxer fail: ${e.message}"
135 | )
136 | }
137 | }
138 |
139 | override fun onOutputBufferAvailable(
140 | codec: MediaCodec,
141 | index: Int,
142 | info: MediaCodec.BufferInfo
143 | ) {
144 | Log.d(TAG,"onOutputBufferAvailable index:$index, info->offset:${info.offset},size:${info.size},pts:${info.presentationTimeUs/1000000}")
145 | val outputBuffer = codec.getOutputBuffer(index) ?: return
146 | outputBuffer.position(info.offset)
147 | outputBuffer.limit(info.size)
148 | if (pts == 0L){
149 | info.presentationTimeUs = info.presentationTimeUs - pts
150 | }
151 | mMediaMuxer.writeSampleData(mTrackIndex,outputBuffer,info)
152 | mMediaCodec.releaseOutputBuffer(index,false)
153 | }
154 |
155 | override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
156 | Log.d(TAG,"onInputBufferAvailable index:$index")
157 |
158 | // 获取空的缓冲区
159 | val inputBuffer = codec.getInputBuffer(index)
160 |
161 | val data = poll()
162 | try {
163 | if (data != null) {
164 | if (inputBuffer != null) {
165 | inputBuffer.clear()
166 | inputBuffer.put(data.buffer)
167 | codec.queueInputBuffer(
168 | index,
169 | 0,
170 | data.buffer.size,
171 | System.nanoTime() / 1000,
172 | 0
173 | )
174 | }
175 | } else {
176 | //EOS
177 | codec.queueInputBuffer(
178 | index,
179 | 0, 0, 0, 0
180 | )
181 | }
182 | } catch (e: Exception) {
183 | println("error:${e.message}")
184 | println("error:${e.localizedMessage}")
185 | inputBuffer!!.clear()
186 | e.printStackTrace()
187 | }
188 | }
189 |
190 | override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
191 | Log.d(TAG,"onOutputFormatChanged format:${format}")
192 | mTrackIndex = mMediaMuxer.addTrack(format)
193 | mMediaMuxer.start()
194 | isMuxer = true
195 | }
196 |
197 | override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
198 | Log.d(TAG,"onError e:${e.message}")
199 | }
200 |
201 | /**
202 | * ImageReader获取的帧数据的封装
203 | */
204 | class FrameData(var buffer: ByteArray, var timeStamp: Long)
205 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/frame/IMediaNative.java:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.frame;
2 |
3 | import java.nio.ByteBuffer;
4 |
5 | class IMediaNative {
6 | public native byte[] yuvToBuffer(ByteBuffer y, ByteBuffer u, ByteBuffer v, int yPixelStride, int yRowStride,
7 | int uPixelStride, int uRowStride, int vPixelStride, int vRowStride, int imgWidth, int imgHeight);
8 | }
9 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/opengl/Config.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.opengl
2 |
3 | /**
4 | * @Desc: Config
5 | * @Author: jzman
6 | * @Date: 2021/8/13.
7 | */
8 | object Config {
9 | const val DEFAULT = false
10 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/opengl/IRender.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.opengl
2 |
3 | import android.view.Surface
4 |
5 | /**
6 | * @Desc:
7 | * @Author: jzman
8 | * @Date: 2021/8/5.
9 | */
10 | interface IRender {
11 | fun draw(mvpMatrix: FloatArray)
12 | fun getSurface():Surface
13 | fun setTextureID(id: Int)
14 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/opengl/OpenGLActivity.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.opengl
2 |
3 | import android.media.MediaPlayer
4 | import android.opengl.GLSurfaceView
5 | import androidx.appcompat.app.AppCompatActivity
6 | import android.os.Bundle
7 | import android.view.View
8 | import android.view.Window
9 | import android.view.WindowManager
10 | import com.manu.mediasamples.databinding.ActivityOpenGLBinding
11 | import com.manu.mediasamples.util.L
12 |
13 | /**
14 | * openGL渲染视频
15 | */
16 | class OpenGLActivity : AppCompatActivity() {
17 | companion object{
18 | const val TAG = "OpenGLActivity"
19 | }
20 | private lateinit var binding: ActivityOpenGLBinding
21 | private lateinit var playRenderer: PlayRenderer
22 | override fun onCreate(savedInstanceState: Bundle?) {
23 | super.onCreate(savedInstanceState)
24 | requestWindowFeature(Window.FEATURE_NO_TITLE)
25 | window.setFlags(
26 | WindowManager.LayoutParams.FLAG_FULLSCREEN,
27 | WindowManager.LayoutParams.FLAG_FULLSCREEN
28 | )
29 | binding = ActivityOpenGLBinding.inflate(layoutInflater)
30 | setContentView(binding.root)
31 | binding.glSurfaceView.setEGLContextClientVersion(2)
32 | playRenderer = PlayRenderer(this, binding.glSurfaceView)
33 | binding.glSurfaceView.setRenderer(playRenderer)
34 | binding.glSurfaceView.renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
35 | }
36 |
37 | override fun onDestroy() {
38 | playRenderer.destroy()
39 | super.onDestroy()
40 | }
41 |
42 | fun test(view: View) {
43 | playRenderer.stop()
44 | }
45 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/opengl/PlayRenderer.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.opengl
2 |
3 | import android.content.Context
4 | import android.media.MediaPlayer
5 | import android.opengl.GLES20
6 | import android.opengl.GLSurfaceView
7 | import android.opengl.Matrix
8 | import android.os.Environment
9 | import com.manu.mediasamples.util.L
10 | import com.manu.mediasamples.util.TextureHelper
11 | import javax.microedition.khronos.egl.EGLConfig
12 | import javax.microedition.khronos.opengles.GL10
13 |
14 | /**
15 | * @Desc: PlayRenderer
16 | * @Author: jzman
17 | * @Date: 2021/8/2.
18 | */
19 | class PlayRenderer(
20 | private var context: Context,
21 | private var glSurfaceView: GLSurfaceView
22 | ) : GLSurfaceView.Renderer,
23 | VideoRender.OnNotifyFrameUpdateListener, MediaPlayer.OnPreparedListener,
24 | MediaPlayer.OnVideoSizeChangedListener, MediaPlayer.OnCompletionListener,
25 | MediaPlayer.OnErrorListener {
26 | companion object {
27 | private const val TAG = "PlayRenderer"
28 | }
29 | private lateinit var videoRender: VideoRender
30 | private lateinit var mediaPlayer: MediaPlayer
31 | private val projectionMatrix = FloatArray(16)
32 | private val viewMatrix = FloatArray(16)
33 | private val vPMatrix = FloatArray(16)
34 |
35 | private var screenWidth: Int = -1
36 | private var screenHeight: Int = -1
37 | private var videoWidth: Int = -1
38 | private var videoHeight: Int = -1
39 |
40 | override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
41 | L.i(TAG, "onSurfaceCreated")
42 | GLES20.glClearColor(0f, 0f, 0f, 0f)
43 | videoRender = VideoRender(context)
44 | videoRender.setTextureID(TextureHelper.createTextureId())
45 | videoRender.onNotifyFrameUpdateListener = this
46 | initMediaPlayer()
47 | }
48 |
49 | override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
50 | L.i(TAG, "onSurfaceChanged > width:$width,height:$height")
51 | screenWidth = width
52 | screenHeight = height
53 | GLES20.glViewport(0, 0, width, height)
54 | }
55 |
56 | override fun onDrawFrame(gl: GL10) {
57 | L.i(TAG, "onDrawFrame")
58 | gl.glClear(GL10.GL_COLOR_BUFFER_BIT or GL10.GL_DEPTH_BUFFER_BIT)
59 | videoRender.draw(vPMatrix)
60 | }
61 |
62 | override fun onPrepared(mp: MediaPlayer?) {
63 | L.i(OpenGLActivity.TAG, "onPrepared")
64 | mediaPlayer.start()
65 | }
66 |
67 | override fun onVideoSizeChanged(mp: MediaPlayer?, width: Int, height: Int) {
68 | L.i(OpenGLActivity.TAG, "onVideoSizeChanged > width:$width ,height:$height")
69 | this.videoWidth = width
70 | this.videoHeight = height
71 | if (!Config.DEFAULT) initMatrix()
72 | }
73 |
74 | override fun onCompletion(mp: MediaPlayer?) {
75 | L.i(OpenGLActivity.TAG, "onCompletion")
76 | }
77 |
78 | override fun onError(mp: MediaPlayer?, what: Int, extra: Int): Boolean {
79 | L.i(OpenGLActivity.TAG, "error > what:$what,extra:$extra")
80 | return true
81 | }
82 |
83 | private fun initMediaPlayer() {
84 | mediaPlayer = MediaPlayer()
85 | mediaPlayer.setOnPreparedListener(this)
86 | mediaPlayer.setOnVideoSizeChangedListener(this)
87 | mediaPlayer.setOnCompletionListener(this)
88 | mediaPlayer.setOnErrorListener(this)
89 | mediaPlayer.setDataSource("http://vfx.mtime.cn/Video/2019/02/04/mp4/190204084208765161.mp4")
90 | // mediaPlayer.setDataSource(Environment.getExternalStorageDirectory().absolutePath + "/video.mp4")
91 | mediaPlayer.setSurface(videoRender.getSurface())
92 | mediaPlayer.prepareAsync()
93 | }
94 |
95 | private fun initMatrix() {
96 | // 设置相机位置(视图矩阵)
97 | Matrix.setLookAtM(
98 | viewMatrix, 0,
99 | 0.0f, 0.0f, 5.0f, // 相机位置
100 | 0.0f, 0.0f, 0.0f, // 目标位置
101 | 0.0f, 1.0f, 0.0f // 相机正上方向量
102 | )
103 | // 计算视频缩放比例(投影矩阵)
104 | val screenRatio = screenWidth / screenHeight.toFloat()
105 | val videoRatio = videoWidth / videoHeight.toFloat()
106 | val ratio: Float
107 | if (screenWidth >= screenHeight) {
108 | if (videoRatio > screenRatio) {
109 | ratio = videoRatio / screenRatio
110 | Matrix.orthoM(
111 | projectionMatrix, 0,
112 | -1f, 1f, -ratio, ratio, -1f, 5f
113 | )
114 | } else {
115 | ratio = screenRatio / videoRatio
116 | Matrix.orthoM(
117 | projectionMatrix, 0,
118 | -ratio, ratio, -1f, 1f, -1f, 5f
119 | )
120 | }
121 | } else {
122 | if (videoRatio >= screenRatio) {
123 | ratio = videoRatio / screenRatio
124 | Matrix.orthoM(
125 | projectionMatrix, 0,
126 | -1f, 1f, -ratio, ratio, -1f, 5f
127 | )
128 | } else {
129 | ratio = screenRatio / videoRatio
130 | Matrix.orthoM(
131 | projectionMatrix, 0,
132 | -ratio, ratio, -1f, 1f, -1f, 5f
133 | )
134 | }
135 | }
136 | // 计算投影和视图变换
137 | Matrix.multiplyMM(vPMatrix, 0, projectionMatrix, 0, viewMatrix, 0)
138 | L.i(TAG, "initMatrix > screenRatio:$screenRatio,videoRatio:$videoRatio,ratio:$ratio")
139 | }
140 |
141 | override fun onNotifyUpdate() {
142 | glSurfaceView.requestRender()
143 | }
144 |
145 | fun destroy() {
146 | mediaPlayer.stop()
147 | mediaPlayer.release()
148 | }
149 |
150 | fun stop(){
151 | mediaPlayer.stop()
152 | }
153 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/opengl/VideoRender.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.opengl
2 |
3 | import android.content.Context
4 | import android.graphics.SurfaceTexture
5 | import android.opengl.GLES20
6 | import android.util.Log
7 | import android.view.Surface
8 | import com.manu.mediasamples.R
9 | import com.manu.mediasamples.util.GLUtil
10 | import com.manu.mediasamples.util.TextureHelper
11 | import java.nio.ByteBuffer
12 | import java.nio.ByteOrder
13 | import kotlin.properties.Delegates
14 |
15 | /**
16 | * @Desc:
17 | * @Author: Administrator
18 | * @Date: 2021/8/5 14:56.
19 | */
20 | class VideoRender(private var context: Context) : IRender,SurfaceTexture.OnFrameAvailableListener {
21 | companion object{
22 | private const val TAG = "VideoRender"
23 | }
24 |
25 | private var programHandler = -1
26 | private var vertexPositionHandler = -1
27 | private var texturePositionHandler = -1
28 | private var vertexMatrixHandle: Int = -1
29 | private var textureId:Int = -1;
30 | private lateinit var surfaceTexture : SurfaceTexture
31 | var onNotifyFrameUpdateListener: OnNotifyFrameUpdateListener? = null
32 |
33 | // 顶点坐标
34 | // private val vertexCoordinates = floatArrayOf(
35 | // 1.0f, 1.0f,
36 | // -1.0f, 1.0f,
37 | // -1.0f, -1.0f,
38 | // 1.0f, -1.0f
39 | // )
40 | //
41 | // // 纹理坐标
42 | // private val textureCoordinates = floatArrayOf(
43 | // 1.0f, 0.0f,
44 | // 0.0f, 0.0f,
45 | // 0.0f, 1.0f,
46 | // 1.0f, 1.0f
47 | // )
48 |
49 | // 顶点坐标
50 | private val vertexCoordinates = floatArrayOf(
51 | -1f, -1f,
52 | 1f, -1f,
53 | -1f, 1f,
54 | 1f, 1f
55 | )
56 |
57 | // 纹理坐标
58 | private val textureCoordinates = floatArrayOf(
59 | 0f, 1f,
60 | 1f, 1f,
61 | 0f, 0f,
62 | 1f, 0f
63 | )
64 |
65 | private var vertexBuffer =
66 | ByteBuffer.allocateDirect(vertexCoordinates.size * 4).run {
67 | this.order(ByteOrder.nativeOrder())
68 | this.asFloatBuffer().apply {
69 | put(vertexCoordinates)
70 | position(0)
71 | }
72 | }
73 |
74 | private var textureBuffer =
75 | ByteBuffer.allocateDirect(textureCoordinates.size * 4).run {
76 | this.order(ByteOrder.nativeOrder())
77 | this.asFloatBuffer().apply {
78 | put(textureCoordinates)
79 | position(0)
80 | }
81 | }
82 |
83 | override fun draw(mvpMatrix: FloatArray) {
84 | shaderAndProgram()
85 | // render
86 | surfaceTexture.updateTexImage()
87 | // get shader handler
88 | vertexPositionHandler = GLES20.glGetAttribLocation(programHandler, "aPosition")
89 | texturePositionHandler = GLES20.glGetAttribLocation(programHandler, "aCoordinate")
90 | // enable vertex
91 | GLES20.glEnableVertexAttribArray(vertexPositionHandler)
92 | GLES20.glEnableVertexAttribArray(texturePositionHandler)
93 | // set
94 | GLES20.glVertexAttribPointer(vertexPositionHandler,2,GLES20.GL_FLOAT,false,0, vertexBuffer)
95 | GLES20.glVertexAttribPointer(texturePositionHandler,2,GLES20.GL_FLOAT,false,0, textureBuffer)
96 |
97 | if (!Config.DEFAULT){
98 | // get handle to shape's transformation matrix
99 | vertexMatrixHandle = GLES20.glGetUniformLocation(programHandler, "uMVPMatrix")
100 | // Pass the projection and view transformation to the shader
101 | GLES20.glUniformMatrix4fv(vertexMatrixHandle, 1, false, mvpMatrix, 0)
102 | }
103 | // draw
104 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4)
105 | }
106 |
107 | override fun getSurface(): Surface {
108 | return Surface(surfaceTexture)
109 | }
110 |
111 | override fun setTextureID(textureId: Int) {
112 | this.textureId = textureId
113 | TextureHelper.activeBindOESTexture(textureId)
114 | surfaceTexture = SurfaceTexture(textureId)
115 | surfaceTexture.setOnFrameAvailableListener(this)
116 | }
117 |
118 | override fun onFrameAvailable(surfaceTexture: SurfaceTexture?) {
119 | Log.d(TAG,"onFrameAvailable")
120 | onNotifyFrameUpdateListener?.onNotifyUpdate()
121 | }
122 |
123 | private fun shaderAndProgram(){
124 | if (programHandler == -1){
125 | // read shader source code
126 | val vertexShaderCode = if (Config.DEFAULT){
127 | GLUtil.readShaderSourceCodeFromRaw(context, R.raw.video_vertex_shader_default)
128 | } else{
129 | GLUtil.readShaderSourceCodeFromRaw(context, R.raw.video_vertex_shader)
130 | }
131 | val fragmentShaderCode =
132 | GLUtil.readShaderSourceCodeFromRaw(context, R.raw.video_fragment_shader_default)
133 | // check
134 | if (vertexShaderCode.isNullOrEmpty() || fragmentShaderCode.isNullOrEmpty()) {
135 | throw RuntimeException("vertexShaderCode or fragmentShaderCode is null or empty")
136 | }
137 | // compile shader
138 | val vertexShaderHandler = GLUtil.compileShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode)
139 | val fragmentShaderHandler =
140 | GLUtil.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode)
141 | // use Program
142 | programHandler = GLUtil.createAndLinkProgram(vertexShaderHandler, fragmentShaderHandler)
143 | GLES20.glUseProgram(programHandler)
144 | }
145 | }
146 |
147 | interface OnNotifyFrameUpdateListener{
148 | fun onNotifyUpdate();
149 | }
150 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/record/AudioEncode.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.record
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaCodecInfo
5 | import android.media.MediaFormat
6 | import android.media.MediaMuxer
7 | import android.widget.Toast
8 | import com.manu.mediasamples.util.L
9 |
10 | /**
11 | * @Desc: AudioEncode
12 | * @Author: jzman
13 | */
14 | object AudioEncode : MediaCodec.Callback() {
15 | private const val TAG = "AudioEncode"
16 | private lateinit var mAudioCodec: MediaCodec
17 | private lateinit var mAudioMuxer: MediaMuxer
18 |
19 | private var pts: Long = 0
20 | private var isAudioStreamEnd = false;
21 | private lateinit var mAudioThread: AudioThread
22 |
23 | fun initAudio(muxer: MediaMuxer) {
24 | L.i(TAG, "initAudio")
25 | this.mAudioMuxer = muxer
26 | initAudioCodec()
27 | mAudioThread = AudioThread()
28 | }
29 |
30 | /**
31 | * 开始编码
32 | */
33 | fun startAudioEncode() {
34 | L.i(TAG, "startEncode > mAudioMuxer:$mAudioMuxer")
35 | mAudioCodec.start()
36 | mAudioThread.startRecord()
37 | }
38 |
39 | /**
40 | * 结束编码
41 | */
42 | fun stopAudioEncode() {
43 | L.i(TAG, "stopEncode")
44 | mAudioCodec.stop()
45 | mAudioCodec.release()
46 | mAudioThread.stopRecord()
47 | RecordConfig.isAudioStop = true
48 | if (RecordConfig.isVideoStop) {
49 | mAudioMuxer.stop()
50 | mAudioMuxer.release()
51 | RecordConfig.isAudioStop = false
52 | }
53 | }
54 |
55 | override fun onOutputBufferAvailable(
56 | codec: MediaCodec,
57 | index: Int,
58 | info: MediaCodec.BufferInfo
59 | ) {
60 | L.i(
61 | TAG,
62 | "onOutputBufferAvailable index:$index, info->offset:${info.offset},size:${info.size}" +
63 | ",pts:${info.presentationTimeUs / 1000000} , isMuxerStart:${RecordConfig.isMuxerStart}"
64 | )
65 | // 如果发现MediaMuxer还未启动,则释放这个OutputBuffer
66 | if (!RecordConfig.isMuxerStart) {
67 | mAudioCodec.releaseOutputBuffer(index, false)
68 | return
69 | }
70 | val outputBuffer = codec.getOutputBuffer(index) ?: return
71 | if (info.size > 0) {
72 | outputBuffer.position(info.offset)
73 | outputBuffer.limit(info.size)
74 | if (pts == 0L) {
75 | info.presentationTimeUs = info.presentationTimeUs - pts
76 | }
77 | mAudioMuxer.writeSampleData(RecordConfig.audioTrackIndex, outputBuffer, info)
78 | mAudioCodec.releaseOutputBuffer(index, false)
79 | }
80 | }
81 |
82 | override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
83 | L.i(
84 | TAG,
85 | "onInputBufferAvailable index:$index ,isMuxerStart:${RecordConfig.isMuxerStart}"
86 | )
87 |
88 | val inputBuffer = codec.getInputBuffer(index)
89 | val result = mAudioThread.poll()
90 |
91 | if (result != null && result.size == 1 && result[0] == (-100).toByte()) {
92 | isAudioStreamEnd = true
93 | }
94 |
95 | L.i(TAG, "result:$result , isAudioStreamEnd:$isAudioStreamEnd")
96 | if (result != null && !isAudioStreamEnd) {
97 | val readSize = result.size
98 | inputBuffer?.clear()
99 | inputBuffer?.limit(readSize)
100 | inputBuffer?.put(result, 0, readSize)
101 | pts = System.nanoTime() / 1000;
102 | L.i(
103 | TAG,
104 | "pcm一帧时间戳 = ${pts / 1000000.0f}---pts:$pts"
105 | )
106 | mAudioCodec.queueInputBuffer(index, 0, readSize, pts, 0)
107 | }
108 |
109 | //如果为null就不调用queueInputBuffer 回调几次后就会导致无可用InputBuffer,从而导致MediaCodec任务结束 只能写个配置文件
110 | if (result == null && !isAudioStreamEnd) {
111 | codec.queueInputBuffer(
112 | index,
113 | 0,
114 | 0,
115 | 0,
116 | 0
117 | )
118 | }
119 |
120 | if (isAudioStreamEnd) {
121 | codec.queueInputBuffer(
122 | index,
123 | 0,
124 | 0,
125 | 0,
126 | MediaCodec.BUFFER_FLAG_END_OF_STREAM
127 | )
128 | }
129 | }
130 |
131 | override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
132 | L.i(TAG, "onOutputFormatChanged format:${format}")
133 | addAudioTrack(format)
134 | if (RecordConfig.videoTrackIndex != -1) {
135 | mAudioMuxer.start()
136 | RecordConfig.isMuxerStart = true
137 | L.i(TAG, "onOutputFormatChanged isMuxerStart:${RecordConfig.isMuxerStart}")
138 | }
139 | }
140 |
141 | override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
142 | L.i(TAG, "onError e:${e.message}")
143 | }
144 |
145 | private fun initAudioCodec() {
146 | L.i(TAG, "init Codec start")
147 | try {
148 | val mediaFormat =
149 | MediaFormat.createAudioFormat(
150 | MediaFormat.MIMETYPE_AUDIO_AAC,
151 | RecordConfig.SAMPLE_RATE,
152 | 2
153 | )
154 | mAudioCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC)
155 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 96000)
156 | mediaFormat.setInteger(
157 | MediaFormat.KEY_AAC_PROFILE,
158 | MediaCodecInfo.CodecProfileLevel.AACObjectLC
159 | )
160 | mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 8192)
161 | mAudioCodec.setCallback(this)
162 | mAudioCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
163 | } catch (e: Exception) {
164 | L.i(TAG, "init error:${e.message}")
165 | }
166 | L.i(TAG, "init Codec end")
167 | }
168 |
169 | private fun addAudioTrack(format: MediaFormat) {
170 | L.i(TAG, "addAudioTrack format:${format}")
171 | RecordConfig.audioTrackIndex = mAudioMuxer.addTrack(format)
172 | RecordConfig.isAddAudioTrack = true
173 | }
174 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/record/AudioEncode2.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.record
2 |
3 | import android.media.*
4 | import android.os.Build
5 | import android.util.Log
6 | import com.manu.mediasamples.util.L
7 | import java.nio.ByteBuffer
8 | import java.util.*
9 | import kotlin.properties.Delegates
10 |
11 | /**
12 | *
13 | * @Desc: AudioEncode
14 | * @Author: jzman
15 | */
16 | object AudioEncode2 : MediaCodec.Callback() {
17 | private const val TAG = "AudioEncode2"
18 | /** 录音源为主麦克风 */
19 | private const val AUDIO_SOURCE = MediaRecorder.AudioSource.MIC
20 | /** 音频格式 */
21 | private const val AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT
22 | /** 采样率 */
23 | private const val SAMPLE_RATE = 44100
24 | /** 声道配置 */
25 | private const val CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_STEREO
26 |
27 | private var bufferSize by Delegates.notNull()
28 | private lateinit var mAudioCodec: MediaCodec
29 | private lateinit var mAudioMuxer: MediaMuxer
30 | private lateinit var mAudioRecord: AudioRecord
31 |
32 | private var pts: Long = 0
33 | private var isAudioStreamEnd = false;
34 | private lateinit var mAudioThread:AudioThread
35 |
36 | fun initAudio(muxer: MediaMuxer) {
37 | L.i(TAG, "initAudio")
38 | bufferSize = AudioRecord.getMinBufferSize(
39 | SAMPLE_RATE,
40 | CHANNEL_CONFIG,
41 | AUDIO_FORMAT
42 | )
43 | this.mAudioMuxer = muxer
44 | initAudioCodec()
45 | mAudioThread = AudioThread()
46 | }
47 |
48 | /**
49 | * 开始编码
50 | */
51 | fun startAudioEncode() {
52 | L.i(TAG, "startEncode")
53 | mAudioCodec.start()
54 | // mAudioThread.startRecord()
55 | startAudioRecord()
56 |
57 | }
58 |
59 |
60 | /**
61 | * 结束编码
62 | */
63 | fun stopAudioEncode() {
64 | L.i(TAG, "stopEncode")
65 | mAudioCodec.stop()
66 | mAudioThread.stopRecord()
67 | }
68 |
69 | private fun initAudioCodec() {
70 | L.i(TAG, "init Codec start")
71 | try {
72 | val mediaFormat =
73 | MediaFormat.createAudioFormat(MediaFormat.MIMETYPE_AUDIO_AAC, RecordConfig.SAMPLE_RATE, 2)
74 | mAudioCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC)
75 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 96000)
76 | mediaFormat.setInteger(
77 | MediaFormat.KEY_AAC_PROFILE,
78 | MediaCodecInfo.CodecProfileLevel.AACObjectLC
79 | )
80 | mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 8192)
81 | mAudioCodec.setCallback(this)
82 | mAudioCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
83 | } catch (e: Exception) {
84 | L.i(TAG, "init error:${e.message}")
85 | }
86 | L.i(TAG, "init Codec end")
87 | }
88 |
89 | override fun onOutputBufferAvailable(
90 | codec: MediaCodec,
91 | index: Int,
92 | info: MediaCodec.BufferInfo
93 | ) {
94 | L.i(
95 | TAG,
96 | "onOutputBufferAvailable index:$index, info->offset:${info.offset},size:${info.size}" +
97 | ",pts:${info.presentationTimeUs / 1000000} , isMuxerStart:${RecordConfig.isMuxerStart}"
98 | )
99 | // 如果发现MediaMuxer还未启动,则释放这个OutputBuffer
100 | if (!RecordConfig.isMuxerStart){
101 | mAudioCodec.releaseOutputBuffer(index, false)
102 | return
103 | }
104 | val outputBuffer = codec.getOutputBuffer(index) ?: return
105 | if (info.size > 0 && RecordConfig.isMuxerStart) {
106 | outputBuffer.position(info.offset)
107 | outputBuffer.limit(info.size)
108 | if (pts == 0L) {
109 | info.presentationTimeUs = info.presentationTimeUs - pts
110 | }
111 | mAudioMuxer.writeSampleData(RecordConfig.audioTrackIndex, outputBuffer, info)
112 | mAudioCodec.releaseOutputBuffer(index, false)
113 | }
114 | }
115 |
116 | override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
117 | Log.d(TAG,"onInputBufferAvailable index:$index")
118 | val byteArray = ByteArray(bufferSize)
119 | when (val result = mAudioRecord.read(byteArray, 0, bufferSize)) {
120 | AudioRecord.ERROR_INVALID_OPERATION -> {
121 | Log.i(TAG,"ERROR_INVALID_OPERATION")
122 | }
123 | AudioRecord.ERROR_BAD_VALUE -> {
124 | Log.i(TAG,"ERROR_BAD_VALUE")
125 | }
126 | AudioRecord.ERROR_DEAD_OBJECT -> {
127 | Log.i(TAG,"ERROR_DEAD_OBJECT")
128 | }
129 | AudioRecord.ERROR -> {
130 | Log.i(TAG,"ERROR")
131 | }
132 | else -> {
133 | encodePcmSource(index, byteArray, result)
134 | }
135 | }
136 | }
137 |
138 | override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
139 | L.i(TAG, "onOutputFormatChanged format:${format}")
140 | RecordConfig.audioTrackIndex = mAudioMuxer.addTrack(format)
141 | L.i(TAG, "onOutputFormatChanged mAudioTrackIndex:${RecordConfig.audioTrackIndex}")
142 | if (RecordConfig.videoTrackIndex != -1) {
143 | mAudioMuxer.start()
144 | RecordConfig.isMuxerStart = true
145 | L.i(TAG, "onOutputFormatChanged isMuxerStart:${RecordConfig.isMuxerStart}")
146 | }
147 | }
148 |
149 | override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
150 | L.i(TAG, "onError e:${e.message}")
151 | }
152 |
153 | fun startAudioRecord(){
154 | if (bufferSize == AudioRecord.ERROR_BAD_VALUE){
155 | Log.i(TAG,"参数异常")
156 | return;
157 | }
158 |
159 | mAudioRecord = AudioRecord(
160 | AUDIO_SOURCE,
161 | SAMPLE_RATE,
162 | CHANNEL_CONFIG,
163 | AUDIO_FORMAT,
164 | bufferSize
165 | )
166 | mAudioRecord.startRecording()
167 | }
168 |
169 | private fun encodePcmSource(buffIndex:Int,pcmBuffer: ByteArray, buffSize: Int) {
170 | try {
171 |
172 | val byteBuffer = mAudioCodec.getInputBuffer(buffIndex) ?: return
173 | byteBuffer.clear()
174 | byteBuffer.put(pcmBuffer)
175 | // presentationTimeUs = 1000000L * (buffSize / 2) / sampleRate
176 | // 一帧音频帧大小 int size = 采样率 x 位宽 x 采样时间 x 通道数
177 | // 1s时间戳计算公式 presentationTimeUs = 1000000L * (totalBytes / sampleRate/ audioFormat / channelCount / 8 )
178 | // totalBytes : 传入编码器的总大小
179 | // 1000 000L : 单位为 微秒,换算后 = 1s,
180 | //除以8 : pcm原始单位是bit, 1 byte = 8 bit, 1 short = 16 bit, 用 Byte[]、Short[] 承载则需要进行换算
181 | // pts += (1.0 * buffSize / (SAMPLE_RATE * 2 * (AUDIO_FORMAT / 8)) * 1000000.0).toLong()
182 | pts = System.nanoTime() / 1000;
183 | Log.i(TAG,
184 | "pcm一帧时间戳 = " + pts / 1000000.0f
185 | )
186 | mAudioCodec.queueInputBuffer(buffIndex, 0, buffSize, pts, 0)
187 | } catch (e: Exception) {
188 | //audioCodec 线程对象已释放MediaCodec对象
189 | Log.i(TAG,"encodePcmSource: ${e.message}")
190 | }
191 | }
192 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/record/AudioThread.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.record
2 |
3 | import android.media.AudioRecord
4 | import com.manu.mediasamples.util.L
5 | import java.util.concurrent.LinkedBlockingQueue
6 | import kotlin.properties.Delegates
7 |
8 | /**
9 | * @Desc:
10 | * @Author: jzman
11 | * @Date: 2021/4/28 22:45.
12 | */
13 | class AudioThread {
14 | private val tag = "AudioThread"
15 | private var bufferSize by Delegates.notNull()
16 | private var quene: LinkedBlockingQueue = LinkedBlockingQueue()
17 |
18 | /** 录制状态 -1表示默认状态,1表述录制状态,0表示停止录制*/
19 | private var recording = -1
20 |
21 | init {
22 | bufferSize = AudioRecord.getMinBufferSize(
23 | RecordConfig.SAMPLE_RATE,
24 | RecordConfig.CHANNEL_CONFIG,
25 | RecordConfig.AUDIO_FORMAT
26 | )
27 | }
28 |
29 | private val mAudioRecord: AudioRecord by lazy {
30 | AudioRecord(
31 | RecordConfig.AUDIO_SOURCE,
32 | RecordConfig.SAMPLE_RATE,
33 | RecordConfig.CHANNEL_CONFIG,
34 | RecordConfig.AUDIO_FORMAT,
35 | bufferSize
36 | )
37 | }
38 |
39 | /**
40 | * 开始录制
41 | */
42 | fun startRecord(){
43 | L.i(tag, "startAudioRecord")
44 | if (bufferSize == AudioRecord.ERROR_BAD_VALUE) {
45 | L.i(tag, "参数异常")
46 | return;
47 | }
48 | recording = 1;
49 | mAudioRecord.startRecording()
50 | mAudioRecord.recordingState
51 | Thread(RecordRunnable()).start();
52 | }
53 |
54 | /**
55 | * 结束录制
56 | */
57 | fun stopRecord(){
58 | mAudioRecord.stop()
59 | recording = 0
60 | }
61 |
62 | /**
63 | * 获取音频数据
64 | */
65 | fun poll():ByteArray?{
66 | return quene.poll()
67 | }
68 |
69 | /**
70 | * 录制Runnable
71 | */
72 | inner class RecordRunnable : Runnable{
73 |
74 | override fun run() {
75 | val byteArray = ByteArray(bufferSize)
76 | while (recording == 1){
77 | val result = mAudioRecord.read(byteArray, 0, bufferSize)
78 | if (result > 0){
79 | val resultArray = ByteArray(result)
80 | System.arraycopy(byteArray, 0, resultArray, 0, result)
81 | quene.offer(resultArray)
82 | }
83 | }
84 | // 自定义流结束的数据
85 | if (recording == 0){
86 | val stopArray = byteArrayOf((-100).toByte())
87 | quene.offer(stopArray)
88 | }
89 | }
90 | }
91 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/record/EncodeManager.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.record
2 |
3 | import android.media.MediaMuxer
4 | import android.util.Log
5 | import android.widget.Toast
6 | import com.manu.mediasamples.app.MediaApplication
7 | import com.manu.mediasamples.util.L
8 | import java.io.IOException
9 |
10 | /**
11 | * @Desc:
12 | * @Author: jzman
13 | */
14 | object EncodeManager {
15 | private const val TAG = "EncodeManager"
16 | private lateinit var mMediaMuxer: MediaMuxer
17 |
18 | /**
19 | * 初始化
20 | */
21 | fun init(width: Int, height: Int){
22 | L.i(TAG, "init")
23 | initMuxer()
24 | AudioEncode.initAudio(mMediaMuxer)
25 | VideoEncode.initVideo(width,height, mMediaMuxer)
26 | }
27 |
28 | /**
29 | * start
30 | */
31 | fun startEncode(){
32 | L.i(TAG, "startEncode")
33 | AudioEncode.startAudioEncode()
34 | VideoEncode.startVideoEncode()
35 | }
36 |
37 | /**
38 | * stop
39 | */
40 | fun stopEncode(){
41 | L.i(TAG, "stopEncode")
42 | AudioEncode.stopAudioEncode()
43 | VideoEncode.stopVideoEncode()
44 | RecordConfig.isMuxerStart = false
45 | RecordConfig.audioTrackIndex = -1
46 | RecordConfig.videoTrackIndex = -1
47 | }
48 |
49 | /**
50 | * 初始化MediaMuxer
51 | */
52 | private fun initMuxer() {
53 | L.i(TAG, "initMuxer")
54 | try {
55 | val path = "${MediaApplication.context.filesDir}/test.mp4"
56 | mMediaMuxer = MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
57 | } catch (e: IOException) {
58 | Toast.makeText(RecordActivity.activity,"initMuxer error",Toast.LENGTH_LONG).show()
59 | Log.e(
60 | TAG,
61 | "initMuxer fail: ${e.message}"
62 | )
63 | }
64 | }
65 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/record/RecordActivity.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.record
2 |
3 | import android.annotation.SuppressLint
4 | import android.content.Context
5 | import android.graphics.SurfaceTexture
6 | import android.hardware.camera2.*
7 | import android.hardware.camera2.params.OutputConfiguration
8 | import android.hardware.camera2.params.SessionConfiguration
9 | import android.os.Build
10 | import android.os.Bundle
11 | import android.os.Handler
12 | import android.os.HandlerThread
13 | import android.util.Log
14 | import android.util.Size
15 | import android.view.Surface
16 | import android.view.TextureView
17 | import android.view.View
18 | import androidx.annotation.RequiresApi
19 | import androidx.appcompat.app.AppCompatActivity
20 | import com.google.android.material.snackbar.Snackbar
21 | import com.manu.mediasamples.MainActivity
22 | import com.manu.mediasamples.R
23 | import com.manu.mediasamples.databinding.ActivityCameraBinding
24 | import com.manu.mediasamples.util.L
25 | import java.util.concurrent.ExecutorService
26 | import java.util.concurrent.Executors
27 |
28 | /**
29 | * @Desc: RecordActivity
30 | * @Author: jzman
31 | */
32 | class RecordActivity : AppCompatActivity(), View.OnClickListener {
33 |
34 | private lateinit var binding: ActivityCameraBinding
35 | private lateinit var mCameraId: String
36 |
37 | private lateinit var mCaptureRequestBuild: CaptureRequest.Builder
38 | private lateinit var mExecutor: ExecutorService
39 |
40 | private lateinit var mCameraDevice: CameraDevice
41 | private lateinit var mCameraCaptureSession: CameraCaptureSession
42 | private lateinit var mSurfaceTexture: SurfaceTexture
43 | private lateinit var mSurface: Surface
44 | private lateinit var previewSize: Size
45 |
46 | private var mCameraThread = HandlerThread("CameraThread").apply { start() }
47 | private var mCameraHandler = Handler(mCameraThread.looper)
48 |
49 | private var isRecordState = false
50 | private var isCameraState = false
51 |
52 | /**
53 | * 获取CameraManager
54 | */
55 | private val mCameraManager: CameraManager by lazy {
56 | application.getSystemService(Context.CAMERA_SERVICE) as CameraManager
57 | }
58 |
59 | /**
60 | * 获取CameraCharacteristics
61 | */
62 | private val mCameraCharacteristics: CameraCharacteristics by lazy {
63 | mCameraManager.getCameraCharacteristics(mCameraId)
64 | }
65 |
66 | companion object {
67 | private const val TAG = "RecordActivity"
68 | lateinit var activity: RecordActivity
69 | }
70 |
71 | @RequiresApi(Build.VERSION_CODES.P)
72 | override fun onCreate(savedInstanceState: Bundle?) {
73 | super.onCreate(savedInstanceState)
74 | binding = ActivityCameraBinding.inflate(layoutInflater)
75 | setContentView(binding.root)
76 | activity = this
77 | binding.btnRecord.setOnClickListener(this)
78 | binding.btnStop.setOnClickListener(this)
79 | mCameraId = intent.getStringExtra(MainActivity.CAMERA_ID).toString()
80 | mExecutor = Executors.newSingleThreadExecutor()
81 | previewSize = Size(1920, 1080)
82 | binding.textureView.setAspectRatio(previewSize.width, previewSize.height)
83 | binding.textureView.surfaceTextureListener = TextureListener()
84 | }
85 |
86 | @RequiresApi(Build.VERSION_CODES.P)
87 | override fun onClick(v: View?) {
88 | when (v?.id) {
89 | R.id.btnRecord -> startRecord()
90 | R.id.btnStop -> stop()
91 | }
92 | }
93 |
94 | override fun onStop() {
95 | super.onStop()
96 | try {
97 | mCameraDevice.close()
98 | } catch (exc: Throwable) {
99 | L.e(TAG, "Error closing camera", exc)
100 | }
101 | }
102 |
103 | override fun onDestroy() {
104 | super.onDestroy()
105 | mCameraThread.quitSafely()
106 | mExecutor.shutdownNow()
107 | }
108 |
109 | @SuppressLint("MissingPermission", "Recycle")
110 | private fun initCamera() {
111 | L.i(TAG, "initCamera")
112 | // 打开Camera
113 | openCamera()
114 | }
115 |
116 | /**
117 | * 打开Camera
118 | */
119 | @SuppressLint("MissingPermission")
120 | private fun openCamera() {
121 | mCameraManager.openCamera(mCameraId, object : CameraDevice.StateCallback() {
122 | override fun onOpened(camera: CameraDevice) {
123 | // 设备开启
124 | L.i(TAG, "onOpened")
125 | mCameraDevice = camera
126 | isCameraState = true
127 | }
128 |
129 | override fun onDisconnected(camera: CameraDevice) {
130 | // 设备断开
131 | L.i(TAG, "onDisconnected")
132 | isCameraState = false
133 | finish()
134 | }
135 |
136 | override fun onError(camera: CameraDevice, error: Int) {
137 | // 意外错误
138 | L.i(TAG, "onError:$error")
139 | isCameraState = false
140 | val msg = when (error) {
141 | ERROR_CAMERA_DEVICE -> "Fatal (device)"
142 | ERROR_CAMERA_DISABLED -> "Device policy"
143 | ERROR_CAMERA_IN_USE -> "Camera in use"
144 | ERROR_CAMERA_SERVICE -> "Fatal (service)"
145 | ERROR_MAX_CAMERAS_IN_USE -> "Maximum cameras in use"
146 | else -> "Unknown"
147 | }
148 | val exc = RuntimeException("Camera error: ($error) $msg")
149 | L.e(TAG, exc.message, exc)
150 | }
151 |
152 | override fun onClosed(camera: CameraDevice) {
153 | super.onClosed(camera)
154 | // 设备关闭,CameraDevice的close方法触发
155 | L.i(TAG, "onClosed")
156 | isCameraState = false
157 | }
158 | }, mCameraHandler)
159 | }
160 |
161 | /**
162 | * 开启录制
163 | */
164 | @RequiresApi(Build.VERSION_CODES.P)
165 | private fun startRecord() {
166 | L.i(TAG, "startRecord")
167 | EncodeManager.init(previewSize.width, previewSize.height)
168 | if (!isCameraState) {
169 | Snackbar.make(
170 | binding.container,
171 | getString(R.string.camera_error),
172 | Snackbar.LENGTH_LONG
173 | ).show()
174 | return
175 | }
176 |
177 | Snackbar.make(
178 | binding.container,
179 | getString(if (isRecordState) R.string.record_now else R.string.record_start),
180 | Snackbar.LENGTH_LONG
181 | ).show()
182 | if (isRecordState) return
183 |
184 | mSurfaceTexture = binding.textureView.surfaceTexture!!
185 | mSurface = Surface(mSurfaceTexture)
186 |
187 | mSurfaceTexture.setDefaultBufferSize(previewSize.width, previewSize.height)
188 |
189 | // 添加预览的Surface和作为输入的Surface
190 | mCaptureRequestBuild = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
191 | mCaptureRequestBuild.addTarget(mSurface)
192 | mCaptureRequestBuild.addTarget(VideoEncode.getSurface())
193 |
194 | val outputs = mutableListOf()
195 | outputs.add(OutputConfiguration(mSurface))
196 | outputs.add(OutputConfiguration(VideoEncode.getSurface()))
197 | val sessionConfiguration = SessionConfiguration(
198 | SessionConfiguration.SESSION_REGULAR,
199 | outputs, mExecutor, object : CameraCaptureSession.StateCallback() {
200 |
201 | override fun onActive(session: CameraCaptureSession) {
202 | super.onActive(session)
203 | // 会话主动处理Capture Request
204 | L.i(TAG, "onActive")
205 | }
206 |
207 | override fun onReady(session: CameraCaptureSession) {
208 | super.onReady(session)
209 | // 每次会话没有更多的Capture Request时调用
210 | // Camera完成自身配置没有Capture Request提交至会话也会调用
211 | // 会话完成所有的Capture Request会回调
212 | L.i(TAG, "onReady")
213 | }
214 |
215 | override fun onConfigureFailed(session: CameraCaptureSession) {
216 | val exc = RuntimeException("Camera $mCameraId session configuration failed")
217 | L.e(TAG, exc.message, exc)
218 | }
219 |
220 | override fun onConfigured(session: CameraCaptureSession) {
221 | // Camera完成自身配置,会话开始处理请求
222 | // Capture Request已经在会话中排队,则立即调用onActive
223 | // 没有提交Capture Request则调用onReady
224 | L.i(TAG, "onConfigured")
225 | mCameraCaptureSession = session
226 |
227 | // 设置各种参数
228 | mCaptureRequestBuild.set(
229 | CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, // 视频稳定功能是否激活
230 | 1
231 | )
232 | // 发送CaptureRequest
233 | mCameraCaptureSession.setRepeatingRequest(
234 | mCaptureRequestBuild.build(),
235 | null,
236 | mCameraHandler
237 | )
238 | // 开始编码
239 | EncodeManager.startEncode()
240 | isRecordState = true
241 | }
242 | })
243 | mCameraDevice.createCaptureSession(sessionConfiguration)
244 | }
245 |
246 | /**
247 | * 关闭CaptureSession
248 | */
249 | private fun closeCaptureSession() {
250 | mCameraCaptureSession.stopRepeating()
251 | mCameraCaptureSession.close()
252 | }
253 |
254 | private fun stop() {
255 | Snackbar
256 | .make(
257 | binding.container,
258 | getString(if (isRecordState) R.string.record_end else R.string.record_none),
259 | Snackbar.LENGTH_LONG
260 | ).show()
261 | if (!isRecordState) return
262 | EncodeManager.stopEncode()
263 | closeCaptureSession()
264 | isRecordState = false
265 | }
266 |
267 | /**
268 | * TextureView关联的surfaceTexture可用时通知的回调
269 | */
270 | private inner class TextureListener : TextureView.SurfaceTextureListener {
271 | override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) {
272 | // surfaceTexture的缓冲区大小变化时调用
273 | Log.i(TAG, "onSurfaceTextureSizeChanged")
274 | }
275 |
276 | override fun onSurfaceTextureUpdated(surface: SurfaceTexture) {
277 | // surfaceTexture的updateTextImage更新指定的surfaceTexture时调用
278 | Log.i(TAG, "onSurfaceTextureUpdated")
279 | }
280 |
281 | override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
282 | // surfaceTexture销毁的时候调用
283 | // 返回true表示surfaceTexture将不进行渲染,false表示需调用surfaceTexture的release方法进行destroy
284 | Log.i(TAG, "onSurfaceTextureDestroyed")
285 | return true
286 | }
287 |
288 | override fun onSurfaceTextureAvailable(surface: SurfaceTexture, width: Int, height: Int) {
289 | // surfaceTexture可用的时候调用
290 | Log.i(TAG, "onSurfaceTextureAvailable")
291 |
292 | // 获取合适的预览大小
293 | // previewSize = getPreviewOutputSize(
294 | // binding.textureView.display,
295 | // mCameraCharacteristics,
296 | // mSurfaceTexture::class.java
297 | // )
298 |
299 | initCamera()
300 | }
301 | }
302 | }
303 |
304 |
305 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/record/RecordConfig.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.record
2 |
3 | import android.media.AudioFormat
4 | import android.media.MediaRecorder
5 |
6 | /**
7 | * @Desc:录制配置
8 | * @Author: jzman
9 | * @Date: 2021/4/24 15:47.
10 | */
11 | object RecordConfig {
12 |
13 | /** 录音源为主麦克风 */
14 | const val AUDIO_SOURCE = MediaRecorder.AudioSource.MIC
15 |
16 | /** 音频格式 */
17 | const val AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT
18 |
19 | /** 采样率 */
20 | const val SAMPLE_RATE = 44100
21 |
22 | /** 声道配置 */
23 | const val CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_STEREO
24 |
25 | /** 音频轨道索引 */
26 | var audioTrackIndex = -1
27 |
28 | /** 视频轨道索引 */
29 | var videoTrackIndex = -1
30 |
31 | /** 标示MediaMuxer是否已经start */
32 | var isMuxerStart: Boolean = false
33 |
34 | /** 标识视频是否停止 */
35 | var isAudioStop: Boolean = false
36 |
37 | /** 标识音频是否停止 */
38 | var isVideoStop: Boolean = false
39 |
40 | /** 标识添加音轨是否成功*/
41 | var isAddAudioTrack: Boolean = false
42 |
43 | /** 标识添加视轨是否成功 */
44 | var isAddVideoTrack: Boolean = false
45 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/record/VideoEncode.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.record
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaCodecInfo
5 | import android.media.MediaFormat
6 | import android.media.MediaMuxer
7 | import android.os.Build
8 | import android.view.Surface
9 | import android.widget.Toast
10 | import com.manu.mediasamples.util.L
11 |
12 | /**
13 | * @Desc:VideoEncode
14 | * @Author: jzman
15 | */
16 | object VideoEncode : MediaCodec.Callback() {
17 | private const val TAG = "VideoEncode"
18 | private const val MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC
19 | private const val COLOR_FORMAT_SURFACE =
20 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
21 | private lateinit var mVideoCodec: MediaCodec
22 | private lateinit var mVideoMuxer: MediaMuxer
23 |
24 | /** 用作数据流输入的Surface */
25 | private lateinit var mSurface: Surface
26 |
27 | private var pts: Long = 0
28 |
29 | /**
30 | * 初始化
31 | */
32 | fun initVideo(width: Int, height: Int, muxer: MediaMuxer) {
33 | L.i(TAG, "initVideo")
34 | this.mVideoMuxer = muxer
35 | initCodec(width, height)
36 | }
37 |
38 | /**
39 | * 获取用作输入流输入的Surface
40 | */
41 | fun getSurface(): Surface {
42 | return mSurface
43 | }
44 |
45 | /**
46 | * 开始编码
47 | */
48 | fun startVideoEncode() {
49 | L.i(TAG, "startEncode > mVideoMuxer:${mVideoMuxer}")
50 | mVideoCodec.start()
51 | }
52 |
53 | /**
54 | * 结束编码
55 | */
56 | fun stopVideoEncode() {
57 | L.i(TAG, "stopEncode")
58 | mVideoCodec.stop()
59 | mVideoCodec.release()
60 | RecordConfig.isVideoStop = true
61 | if (RecordConfig.isAudioStop) {
62 | mVideoMuxer.stop()
63 | mVideoMuxer.release()
64 | RecordConfig.isVideoStop = false
65 | }
66 | }
67 |
68 | override fun onOutputBufferAvailable(
69 | codec: MediaCodec,
70 | index: Int,
71 | info: MediaCodec.BufferInfo
72 | ) {
73 | L.i(
74 | TAG,
75 | "onOutputBufferAvailable index:$index, info->offset:${info.offset},size:${info.size}" +
76 | ",pts:${info.presentationTimeUs / 1000000} , isMuxerStart:${RecordConfig.isMuxerStart}"
77 | )
78 | // 如果发现MediaMuxer还未启动,则释放这个OutputBuffer
79 | if (!RecordConfig.isMuxerStart) {
80 | mVideoCodec.releaseOutputBuffer(index, false)
81 | return
82 | }
83 | val outputBuffer = codec.getOutputBuffer(index) ?: return
84 | if (info.size > 0) {
85 | outputBuffer.position(info.offset)
86 | outputBuffer.limit(info.size)
87 | if (pts == 0L) {
88 | info.presentationTimeUs = info.presentationTimeUs - pts
89 | }
90 | mVideoMuxer.writeSampleData(RecordConfig.videoTrackIndex, outputBuffer, info)
91 | mVideoCodec.releaseOutputBuffer(index, false)
92 | }
93 | }
94 |
95 | override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
96 | L.i(TAG, "onInputBufferAvailable index:$index")
97 | }
98 |
99 | override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
100 | L.i(TAG, "onOutputFormatChanged format:${format}")
101 | addVideoTrack(format)
102 | if (RecordConfig.audioTrackIndex != -1) {
103 | mVideoMuxer.start()
104 | RecordConfig.isMuxerStart = true
105 | }
106 | }
107 |
108 | override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
109 | L.i(TAG, "onError e:${e.message}")
110 | }
111 |
112 | /**
113 | * 初始化MediaCodec
114 | */
115 | private fun initCodec(width: Int, height: Int) {
116 | L.i(TAG, "initCodec start")
117 | try {
118 | // 创建MediaCodec
119 | mVideoCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
120 | // 参数设置
121 | val mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height)
122 | mediaFormat.setInteger(
123 | MediaFormat.KEY_COLOR_FORMAT,
124 | COLOR_FORMAT_SURFACE
125 | ) // 颜色采样格式
126 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 4) // 比特率
127 | mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30) // 帧率
128 | mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1) // I帧间隔
129 |
130 | mediaFormat.setInteger(
131 | MediaFormat.KEY_PROFILE,
132 | MediaCodecInfo.CodecProfileLevel.AVCProfileHigh
133 | )
134 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
135 | mediaFormat.setInteger(
136 | MediaFormat.KEY_LEVEL,
137 | MediaCodecInfo.CodecProfileLevel.AVCLevel31
138 | )
139 | }
140 | // 设置Callback
141 | mVideoCodec.setCallback(this)
142 | // 配置状态
143 | mVideoCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
144 | // 创建Surface作为MediaCodec的输入,createInputSurface只能在configure与start之间调用创建Surface
145 | mSurface = mVideoCodec.createInputSurface()
146 | } catch (e: Exception) {
147 | L.i(TAG, "initCodec fail:${e.message} ")
148 | e.printStackTrace()
149 | }
150 | L.i(TAG, "initCodec end")
151 | }
152 |
153 | private fun addVideoTrack(format: MediaFormat) {
154 | L.i(TAG, "addVideoTrack format:${format}")
155 | RecordConfig.videoTrackIndex = mVideoMuxer.addTrack(format)
156 | RecordConfig.isAddVideoTrack = true
157 | }
158 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/sync/EncodeManager.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.sync
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaCodecInfo
5 | import android.media.MediaFormat
6 | import android.media.MediaMuxer
7 | import android.os.Build
8 | import android.util.Log
9 | import android.view.Surface
10 | import com.manu.mediasamples.app.MediaApplication
11 | import java.io.IOException
12 |
13 | /**
14 | * @Desc:EncodeManager
15 | * @Author: jzman
16 | */
17 | object EncodeManager {
18 | private const val TAG = "EncodeManager"
19 | private const val MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC
20 | private const val COLOR_FORMAT_SURFACE =
21 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
22 | private lateinit var mMediaCodec: MediaCodec
23 | private lateinit var mMediaMuxer: MediaMuxer
24 |
25 | /** 用作数据流输入的Surface */
26 | private lateinit var mSurface: Surface
27 |
28 | /** 编码线程 */
29 | private var mEncodeThread: SyncEncodeThread? = null
30 |
31 | /**
32 | * 初始化
33 | */
34 | fun init(width: Int, height: Int) {
35 | initCodec(width, height)
36 | initMuxer()
37 | mEncodeThread =
38 | SyncEncodeThread(
39 | mMediaCodec,
40 | mMediaMuxer
41 | )
42 | }
43 |
44 | /**
45 | * 获取用作输入流输入的Surface
46 | */
47 | fun getSurface(): Surface {
48 | return mSurface
49 | }
50 |
51 | /**
52 | * 开始编码
53 | */
54 | fun startEncode() {
55 | Log.d(TAG, "startEncode")
56 | if (mEncodeThread == null) error("not call init method.")
57 | mEncodeThread?.start()
58 | }
59 |
60 | /**
61 | * 结束编码
62 | */
63 | fun stopEncode() {
64 | Log.d(TAG, "stopEncode")
65 | if (mEncodeThread == null) error("not call init method.")
66 | mEncodeThread?.isStop = true
67 | mEncodeThread = null
68 | }
69 |
70 | /**
71 | * 初始化MediaCodec
72 | */
73 | private fun initCodec(width: Int, height: Int) {
74 | Log.i(TAG, "initCodec start")
75 | try {
76 | // 创建MediaCodec
77 | mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
78 | // 参数设置
79 | val mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height)
80 | mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
81 | COLOR_FORMAT_SURFACE
82 | ) // 颜色采样格式
83 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 4) // 比特率
84 | mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30) // 帧率
85 | mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1) // I帧间隔
86 |
87 | mediaFormat.setInteger(
88 | MediaFormat.KEY_PROFILE,
89 | MediaCodecInfo.CodecProfileLevel.AVCProfileHigh
90 | )
91 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
92 | mediaFormat.setInteger(
93 | MediaFormat.KEY_LEVEL,
94 | MediaCodecInfo.CodecProfileLevel.AVCLevel31
95 | )
96 | }
97 | // 配置状态
98 | mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
99 | // 创建Surface作为MediaCodec的输入,createInputSurface只能在configure与start之间调用创建Surface
100 | mSurface = mMediaCodec.createInputSurface()
101 | } catch (e: Exception) {
102 | Log.i(TAG, "initCodec fail:${e.message} ")
103 | e.printStackTrace()
104 | }
105 | }
106 |
107 | /**
108 | * 初始化MediaMuxer
109 | */
110 | private fun initMuxer() {
111 | try {
112 | val path = "${MediaApplication.context.filesDir}/test.mp4"
113 | mMediaMuxer = MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
114 | } catch (e: IOException) {
115 | Log.e(
116 | TAG,
117 | "initMuxer fail: ${e.message}"
118 | )
119 | }
120 | }
121 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/samples/sync/SyncEncodeThread.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.samples.sync
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaMuxer
5 | import android.util.Log
6 | import kotlin.properties.Delegates
7 |
8 | /**
9 | * @Desc: 编码线程
10 | * @Author: jzman
11 | */
12 | class SyncEncodeThread(var mMediaCodec: MediaCodec, var mMediaMuxer: MediaMuxer) : Thread() {
13 | /** 结束编码标志 */
14 | var isStop = false
15 | /** 复用器开启的标志 */
16 | private var mStartMuxer = false
17 | /** 缓冲区元数据:大小、偏移量、pts*/
18 | private var bufferInfo = MediaCodec.BufferInfo()
19 | /** 显示时间戳 */
20 | private var pts: Long = 0
21 | /** 轨道索引 */
22 | private var mTrackIndex by Delegates.notNull()
23 |
24 | companion object {
25 | private const val TAG = "EncodeManager"
26 | }
27 |
28 | override fun run() {
29 | super.run()
30 | mMediaCodec.start()
31 | while (true) {
32 | if (isStop) {
33 | mMediaCodec.stop()
34 | mMediaCodec.release()
35 |
36 | mMediaMuxer.stop()
37 | mMediaMuxer.release()
38 | break
39 | }
40 |
41 | // 返回已成功编码的输出缓冲区的索引
42 | var outputBufferId: Int = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0)
43 | if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
44 | // 添加视频轨道
45 | mTrackIndex = mMediaMuxer.addTrack(mMediaCodec.outputFormat)
46 | mMediaMuxer.start()
47 | mStartMuxer = true
48 | } else {
49 | while (outputBufferId >= 0) {
50 | if (!mStartMuxer) {
51 | Log.i(TAG, "MediaMuxer not start")
52 | continue
53 | }
54 | // 获取有效数据
55 | val outputBuffer = mMediaCodec.getOutputBuffer(outputBufferId) ?: continue
56 | outputBuffer.position(bufferInfo.offset)
57 | outputBuffer.limit(bufferInfo.offset + bufferInfo.size)
58 | if (pts == 0L) {
59 | pts = bufferInfo.presentationTimeUs
60 | }
61 | bufferInfo.presentationTimeUs = bufferInfo.presentationTimeUs - pts
62 | // 将数据写入复用器以生成文件
63 | mMediaMuxer.writeSampleData(mTrackIndex, outputBuffer, bufferInfo)
64 | Log.d(
65 | TAG,
66 | "pts = ${bufferInfo.presentationTimeUs / 1000000.0f} s ,${pts / 1000} ms"
67 | )
68 | mMediaCodec.releaseOutputBuffer(outputBufferId, false)
69 | outputBufferId = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0)
70 | }
71 | }
72 | }
73 | }
74 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/util/AutoFitTextureView.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.util
2 |
3 | import android.content.Context
4 | import android.util.AttributeSet
5 | import android.util.Log
6 | import android.view.TextureView
7 |
8 | /**
9 | * @Desc: AutoFitTexture
10 | * @Author: jzman
11 | */
12 | class AutoFitTextureView @JvmOverloads constructor(
13 | context: Context,
14 | attrs: AttributeSet? = null,
15 | defStyleAttr: Int = 0
16 | ) : TextureView(context, attrs, defStyleAttr) {
17 |
18 | companion object{
19 | private val TAG = AutoFitTextureView::class.java.simpleName
20 | }
21 |
22 | private var mRatioWidth = 0
23 | private var mRatioHeight = 0
24 |
25 | fun setAspectRatio(width:Int, height:Int){
26 | require(width > 0 && height > 0){"width and height cannot be negative or zero."}
27 | this.mRatioWidth = width
28 | this.mRatioHeight = height
29 | requestLayout()
30 | }
31 |
32 | override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
33 | super.onMeasure(widthMeasureSpec, heightMeasureSpec)
34 | val width = MeasureSpec.getSize(widthMeasureSpec)
35 | val height = MeasureSpec.getSize(heightMeasureSpec)
36 | Log.d(TAG, "Measured dimensions origin size: $width x $height")
37 | val newWidth: Int
38 | val newHeight: Int
39 | if (mRatioWidth == 0 || mRatioHeight == 0){
40 | newWidth = width
41 | newHeight = height
42 | }else{
43 | if (width < height * mRatioWidth / mRatioHeight) {
44 | newWidth = width
45 | newHeight = width * mRatioHeight / mRatioWidth
46 | } else {
47 | newWidth = height * mRatioWidth / mRatioHeight
48 | newHeight = height
49 | }
50 | setMeasuredDimension(newWidth, newHeight)
51 | }
52 | Log.d(TAG, "Measured dimensions set: $newWidth x $newHeight")
53 | }
54 |
55 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/util/CameraSizes.kt:
--------------------------------------------------------------------------------
1 |
2 | package com.manu.mediasamples.util
3 |
4 | import android.graphics.Point
5 | import android.hardware.camera2.CameraCharacteristics
6 | import android.hardware.camera2.params.StreamConfigurationMap
7 | import android.util.Size
8 | import android.view.Display
9 | import kotlin.math.max
10 | import kotlin.math.min
11 |
12 | /**
13 | * Size辅助类
14 | */
15 | class SmartSize(width: Int, height: Int) {
16 | var size = Size(width, height)
17 | var long = max(size.width, size.height)
18 | var short = min(size.width, size.height)
19 | override fun toString() = "SmartSize(${long}x${short})"
20 | }
21 |
22 | /** 图片和视频的标准高清尺寸 */
23 | val SIZE_1080P: SmartSize =
24 | SmartSize(1920, 1080)
25 |
26 | /**
27 | * 获得给定Display对应屏幕真实尺寸的SmartSize
28 | */
29 | fun getDisplaySmartSize(display: Display): SmartSize {
30 | val outPoint = Point()
31 | display.getRealSize(outPoint)
32 | return SmartSize(outPoint.x, outPoint.y)
33 | }
34 |
35 | /**
36 | * 获取可用的最大预览尺寸
37 | */
38 | fun getPreviewOutputSize(
39 | display: Display,
40 | characteristics: CameraCharacteristics,
41 | targetClass: Class,
42 | format: Int? = null
43 | ): Size {
44 |
45 | val screenSize = getDisplaySmartSize(display)
46 | val hdScreen = screenSize.long >= SIZE_1080P.long || screenSize.short >= SIZE_1080P.short
47 | val maxSize = if (hdScreen) SIZE_1080P else screenSize
48 |
49 | // 如果提供图像格式则由具体格式决定预览大小,否则则由targetClass决定
50 | val config = characteristics.get(
51 | CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
52 | // 检查提供的target和format是否支持
53 | if (format == null) {
54 | if (!StreamConfigurationMap.isOutputSupportedFor(targetClass)) error("$targetClass not support.")
55 | }else{
56 | if (!config.isOutputSupportedFor(format)) error("$format not support.")
57 | }
58 | val allSizes = if (format == null)
59 | config.getOutputSizes(targetClass) else config.getOutputSizes(format)
60 |
61 | // 根据Size从大到小排序
62 | val validSizes = allSizes
63 | .sortedWith(compareBy { it.height * it.width })
64 | .map { SmartSize(it.width, it.height) }
65 | .reversed()
66 |
67 | return validSizes.first { it.long <= maxSize.long && it.short <= maxSize.short }.size
68 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/util/CodecUtil.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.util
2 |
3 | import android.media.MediaCodecInfo
4 | import android.media.MediaCodecList
5 |
6 |
7 | /**
8 | * @Desc:
9 | * @Author: jzman
10 | */
11 | object CodecUtil {
12 |
13 | /**
14 | * 查询指定MIME类型的编码器
15 | */
16 | fun selectCodec(mimeType: String): MediaCodecInfo? {
17 | val mediaCodecList = MediaCodecList(MediaCodecList.REGULAR_CODECS)
18 | val codeInfos = mediaCodecList.codecInfos
19 | for (codeInfo in codeInfos) {
20 | if (!codeInfo.isEncoder) continue
21 | val types = codeInfo.supportedTypes
22 | for (type in types) {
23 | if (type.equals(mimeType, true)) {
24 | return codeInfo
25 | }
26 | }
27 | }
28 | return null
29 | }
30 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/util/GLUtil.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.util
2 |
3 | import android.app.ActivityManager
4 | import android.content.Context
5 | import android.opengl.GLES20
6 | import android.opengl.GLUtils
7 | import android.opengl.Matrix
8 | import android.util.Log
9 | import java.io.BufferedReader
10 | import java.io.IOException
11 | import java.io.InputStreamReader
12 |
13 | /**
14 | * @author : jzman
15 | * @desc: GLUtil
16 | * @date: 2021/7/3 20:46.
17 | */
18 | object GLUtil {
19 | private val TAG = GLUtil::class.java.simpleName
20 | private val sIdentityMatrix = FloatArray(16)
21 | fun identityMatrix(): FloatArray {
22 | return sIdentityMatrix
23 | }
24 |
25 | /**
26 | * 是否支持OpenGL ES2.0.
27 | *
28 | * @param context Context
29 | * @return true表示supported
30 | */
31 | fun supportOpenGLES2(context: Context): Boolean {
32 | val activityManager = context.getSystemService(Context.ACTIVITY_SERVICE) as ActivityManager
33 | val configurationInfo = activityManager.deviceConfigurationInfo
34 | return configurationInfo.reqGlEsVersion >= 0x20000
35 | }
36 |
37 | /**
38 | * GL操作错误检查
39 | *
40 | * @param operation operation
41 | */
42 | fun glCheck(operation: String) {
43 | var error: Int
44 | while (GLES20.glGetError().also { error = it } != GLES20.GL_NO_ERROR) {
45 | Log.e(TAG, operation + ": glError " + GLUtils.getEGLErrorString(error))
46 | }
47 | }
48 |
49 | /**
50 | * 从raw木raw目录读取着色程序
51 | *
52 | * @param context Context
53 | * @param resId resId
54 | * @return shader Source Code
55 | */
56 | fun readShaderSourceCodeFromRaw(context: Context, resId: Int): String?{
57 | val inputStream = context.resources.openRawResource(resId)
58 | val inputStreamReader = InputStreamReader(inputStream)
59 | val bufferedReader = BufferedReader(inputStreamReader)
60 | var nextLine: String?
61 | val body = StringBuilder()
62 | try {
63 | while (bufferedReader.readLine().also { nextLine = it } != null) {
64 | body.append(nextLine)
65 | body.append('\n')
66 | }
67 | } catch (e: IOException) {
68 | e.printStackTrace()
69 | return null
70 | }
71 | return body.toString()
72 | }
73 |
74 | /**
75 | * 编译着色器
76 | *
77 | * @param shaderType Shader type,[GLES20.GL_VERTEX_SHADER]
78 | * @param shaderSource Shader Source Code
79 | * @return An OpenGL handle to the shader.
80 | */
81 | fun compileShader(shaderType: Int, shaderSource: String): Int {
82 | // 创建着色器
83 | var shaderHandle = GLES20.glCreateShader(shaderType)
84 | if (shaderHandle != 0) {
85 | // 替换着色器中的源代码
86 | GLES20.glShaderSource(shaderHandle, shaderSource)
87 | // 编译着色器
88 | GLES20.glCompileShader(shaderHandle)
89 | // 获取编译着色器状态
90 | val compileStatus = IntArray(1)
91 | GLES20.glGetShaderiv(shaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0)
92 | // 编译着色器失败
93 | if (compileStatus[0] == 0) {
94 | Log.e(TAG, "compile shader error: " + GLES20.glGetShaderInfoLog(shaderHandle))
95 | GLES20.glDeleteShader(shaderHandle)
96 | shaderHandle = 0
97 | }
98 | }
99 | if (shaderHandle == 0){
100 | throw RuntimeException("create shader error.")
101 | }
102 | return shaderHandle
103 | }
104 |
105 | /**
106 | * 创建Program
107 | * 附件Shader到Program
108 | * Shader链接到Program
109 | */
110 | fun createAndLinkProgram(vertexShaderHandle: Int, fragmentShaderHandle: Int): Int {
111 | var programHandle = GLES20.glCreateProgram()
112 | if (programHandle != 0) {
113 | GLES20.glAttachShader(programHandle, vertexShaderHandle)
114 | GLES20.glAttachShader(programHandle, fragmentShaderHandle)
115 | GLES20.glLinkProgram(programHandle)
116 |
117 | // Get the link status.
118 | val linkStatus = IntArray(1)
119 | GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0)
120 | // If the link failed, delete the program.
121 | if (linkStatus[0] == 0) {
122 | Log.e(TAG, "program link error: " + GLES20.glGetProgramInfoLog(programHandle))
123 | GLES20.glDeleteProgram(programHandle)
124 | programHandle = 0
125 | }
126 | }
127 | if (programHandle == 0){
128 | throw RuntimeException("create program error")
129 | }
130 | return programHandle
131 | }
132 |
133 | init {
134 | Matrix.setIdentityM(sIdentityMatrix, 0)
135 | }
136 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/util/L.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.util
2 |
3 | import android.util.Log
4 | import com.manu.mediasamples.BuildConfig
5 |
6 | /**
7 | * @Desc:日志工具类
8 | * @Author: jzman
9 | * @Date: 2021/4/24 11:57.
10 | */
11 | object L {
12 | private const val TAG = "MLog"
13 | fun i(tag: String, msg: String) {
14 | if (BuildConfig.DEBUG) {
15 | Log.i(TAG, "$tag > $msg")
16 | }
17 | }
18 |
19 | fun e(tag: String, msg: String?,tr:Throwable) {
20 | if (BuildConfig.DEBUG) {
21 | Log.e(TAG, "$tag > $msg",tr)
22 | }
23 | }
24 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/java/com/manu/mediasamples/util/TextureHelper.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples.util
2 |
3 | import android.opengl.GLES11Ext
4 | import android.opengl.GLES20
5 | import android.util.Log
6 | import java.lang.RuntimeException
7 | import javax.microedition.khronos.opengles.GL10
8 |
9 | /**
10 | * @Desc: TextureHelper
11 | * @Author: jzman
12 | * @Date: 2021/8/5 11:17.
13 | */
14 | object TextureHelper {
15 | const val TAG = "TextureHelper"
16 |
17 | /**
18 | * 生成纹理ID
19 | */
20 | fun createTextureId(): Int {
21 | val tex = IntArray(1)
22 | GLES20.glGenTextures(1, tex, 0)
23 | if (tex[0] == 0) {
24 | throw RuntimeException("create OES texture failed, ${Thread.currentThread().name}")
25 | }
26 | return tex[0]
27 | }
28 |
29 | /**
30 | * 创建OES纹理
31 | * YUV格式到RGB的自动转化
32 | */
33 | fun activeBindOESTexture(textureId:Int) {
34 | // 激活纹理单元
35 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
36 | // 绑定纹理ID到纹理单元的纹理目标上
37 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId)
38 | // 设置纹理参数
39 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST.toFloat())
40 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR.toFloat())
41 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE.toFloat())
42 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE.toFloat())
43 | Log.d(TAG, "activeBindOESTexture: texture id $textureId")
44 | }
45 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
15 |
18 |
21 |
22 |
23 |
24 |
30 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/layout/activity_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
18 |
19 |
28 |
29 |
39 |
40 |
50 |
51 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
20 |
21 |
30 |
31 |
40 |
41 |
50 |
51 |
60 |
61 |
70 |
71 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/layout/activity_open_g_l.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
13 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/layout/activity_track_audio.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
20 |
21 |
32 |
33 |
43 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/raw/video_fragment_shader_default.glsl:
--------------------------------------------------------------------------------
1 | // GL_TEXTURE_EXTERNAL_OES
2 | #extension GL_OES_EGL_image_external : require
3 | precision mediump float;
4 | varying vec2 vTextureCoordinate;
5 | uniform samplerExternalOES uTexture;
6 | void main() {
7 | gl_FragColor=texture2D(uTexture, vTextureCoordinate);
8 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/raw/video_vertex_shader.glsl:
--------------------------------------------------------------------------------
1 | attribute vec4 aPosition;
2 | attribute vec2 aCoordinate;
3 | uniform mat4 uMVPMatrix;
4 | // varying变量是vertex和fragment之间做数据传递用的,
5 | // 一般vertex shader修改varying变量的值,然后fragment shader使用该varying变量的值
6 | // 因此varying变量在vertex和fragment shader二者之间的声明必须是一致的
7 | varying vec2 vTextureCoordinate; // 纹理坐标
8 | void main() {
9 | gl_Position = uMVPMatrix * aPosition;
10 | vTextureCoordinate = aCoordinate;
11 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/raw/video_vertex_shader_default.glsl:
--------------------------------------------------------------------------------
1 | attribute vec4 aPosition;
2 | attribute vec2 aCoordinate;
3 | // varying变量是vertex和fragment之间做数据传递用的,
4 | // 一般vertex shader修改varying变量的值,然后fragment shader使用该varying变量的值
5 | // 因此varying变量在vertex和fragment shader二者之间的声明必须是一致的
6 | varying vec2 vTextureCoordinate; // 纹理坐标
7 | void main() {
8 | gl_Position = aPosition;
9 | vTextureCoordinate = aCoordinate;
10 | }
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/values-night/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #FFBB86FC
4 | #FF6200EE
5 | #FF3700B3
6 | #FF03DAC5
7 | #FF018786
8 | #FF000000
9 | #FFFFFFFF
10 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | MediaSamples
3 | 请求相机权限
4 | 授予
5 | 取消
6 | 录制开始…
7 | 正在录制…
8 | 还没开始录制…
9 | 录制结束…
10 |
11 | Camera2 open error.
12 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/values/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
17 |
20 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/main/res/xml/network_security_config.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/MediaSamples/app/src/test/java/com/manu/mediasamples/ExampleUnitTest.kt:
--------------------------------------------------------------------------------
1 | package com.manu.mediasamples
2 |
3 | import org.junit.Test
4 |
5 | import org.junit.Assert.*
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * See [testing documentation](http://d.android.com/tools/testing).
11 | */
12 | class ExampleUnitTest {
13 | @Test
14 | fun addition_isCorrect() {
15 | assertEquals(4, 2 + 2)
16 | }
17 | }
--------------------------------------------------------------------------------
/MediaSamples/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 | buildscript {
3 | ext.kotlin_version = "1.3.72"
4 | repositories {
5 | google()
6 | jcenter()
7 | }
8 | dependencies {
9 | classpath "com.android.tools.build:gradle:4.1.3"
10 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
11 |
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 | }
22 | }
23 |
24 | task clean(type: Delete) {
25 | delete rootProject.buildDir
26 | }
--------------------------------------------------------------------------------
/MediaSamples/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app"s APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 | # Kotlin code style for this project: "official" or "obsolete":
21 | kotlin.code.style=official
--------------------------------------------------------------------------------
/MediaSamples/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/MediaSamples/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Fri Dec 11 11:20:01 CST 2020
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip
7 |
--------------------------------------------------------------------------------
/MediaSamples/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/MediaSamples/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/MediaSamples/screenshot/media_record.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jzmanu/MediaSamples/262587298d4665e3951c693e7a7ee410ccc49c42/MediaSamples/screenshot/media_record.gif
--------------------------------------------------------------------------------
/MediaSamples/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':mylibrary'
2 | include ':app'
3 | rootProject.name = "MediaSamples"
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # MediaSamples
2 |
3 |
4 | #### 主要内容
5 |
6 | `Android` 音视频学习案例,你将从这个 `Samples` 中学到如下内容:
7 |
8 | - [x] `MediaCodec` 同步、异步处理方式
9 | - [x] `MediaCodec` 编码 `Camera2` 数据录制 `MP4`
10 | - [x] `MediaMuxer` 的使用
11 | - [x] `AudioRecord` 采集音频数据,
12 | - [x] 音频数据与视频数据通过 `MediaMuxer` 合成 `MP4`
13 | - [x] OpenGL ES视频渲染及画面矫正
14 | - [ ] `Camera` 帧数据处理,后续补充完善
15 | - [ ] 继续完善和补充
16 |
17 | #### 文章链接
18 |
19 | - [音视频开发基础知识](https://mp.weixin.qq.com/s/pcKIorxMdpUoZN2nNYeqLg)
20 | - [音频帧、视频帧及其同步](https://mp.weixin.qq.com/s/4FRuU92wDm1zfvzgQRnS6w)
21 | - [Camera2、MediaCodec录制mp4](https://mp.weixin.qq.com/s/uCHyY6NlajHrhjI5dHI1ng)
22 | - [Android原生编解码接口MediaCodec详解](https://mp.weixin.qq.com/s/TDgoDfC2K00iH2e9g_mb8Q)
23 | - [音频基础知识](https://mp.weixin.qq.com/s/SOh2BQHpXZGFj2jcvqyzgA)
24 | - [AudioRecord采集音频数据及合成](https://mp.weixin.qq.com/s/VjZiqUvZXBx261NOkxHkGA)
25 | - [OpenGL ES渲染播放视频](https://mp.weixin.qq.com/s/V1VMDappRxs94ZaZMk-Uwg)
26 | - [如何正确编译ijkplayer](https://mp.weixin.qq.com/s/i_c7DbB5H7bE4X13hH83Bw)
27 |
28 |
29 | #### 作者
30 |
31 | 个人微信公众号: **躬行之**
32 |
33 | 
34 |
--------------------------------------------------------------------------------
/ffmpeg-decode-av/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.26)
2 | project(ffmpeg_decode_av C)
3 |
4 | set(CMAKE_C_STANDARD 11)
5 | # 头文件
6 | include_directories(
7 | E:/msys64/home/ffmpeg/build/ffmepg-4.2/include
8 | src/base
9 | src/video
10 | src/audio
11 | src/avio
12 | )
13 | # 库文件
14 | link_directories(E:/msys64/home/ffmpeg/build/ffmepg-4.2/lib)
15 |
16 | add_executable(ffmpeg_decode_av
17 | main.c
18 | src/base/av_base.c
19 | src/audio/audio_sample.c
20 | src/video/video_sample.c
21 | src/avio/avio_audio_sample.c
22 | )
23 |
24 | target_link_libraries(
25 | ffmpeg_decode_av
26 | avcodec
27 | avdevice
28 | avfilter
29 | avformat
30 | avutil
31 | postproc
32 | swresample
33 | swscale
34 | )
35 |
--------------------------------------------------------------------------------
/ffmpeg-decode-av/main.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include "libavformat/avformat.h"
3 | #include "audio_sample.h"
4 | #include "video_sample.h"
5 | #include "avio_audio_sample.h"
6 |
7 | int main(int argc, char **argv) {
8 | // 从命令行接收参数
9 | if (argc <= 2) {
10 | fprintf(stderr, "Usage: %s