├── .gitignore ├── README.md ├── app ├── .gitignore ├── build.gradle ├── proguard-rules.pro └── src │ ├── androidTest │ └── java │ │ └── com │ │ └── icechn │ │ └── videorecorder │ │ └── ExampleInstrumentedTest.java │ ├── main │ ├── AndroidManifest.xml │ ├── assets │ │ ├── drawimage_fragment.sh │ │ ├── drawimage_vertex.sh │ │ ├── gaussian_fragment.sh │ │ ├── gaussian_vertex.sh │ │ ├── mirror_fragment.sh │ │ ├── mirror_vertex.sh │ │ ├── skinblur_fragment.sh │ │ ├── split_fragment.sh │ │ └── split_mirror_fragment.sh │ ├── java │ │ └── com │ │ │ └── icechn │ │ │ └── videorecorder │ │ │ ├── client │ │ │ ├── AudioClient.java │ │ │ ├── CallbackDelivery.java │ │ │ ├── RecorderClient.java │ │ │ └── VideoClient.java │ │ │ ├── core │ │ │ ├── CameraHelper.java │ │ │ ├── GLHelper.java │ │ │ ├── MediaCodecHelper.java │ │ │ ├── Packager.java │ │ │ ├── audio │ │ │ │ ├── AudioCore.java │ │ │ │ └── AudioSenderThread.java │ │ │ ├── listener │ │ │ │ └── IVideoChange.java │ │ │ └── video │ │ │ │ ├── IVideoCore.java │ │ │ │ ├── VideoCore.java │ │ │ │ └── VideoSenderThread.java │ │ │ ├── demo │ │ │ ├── App.java │ │ │ └── MainActivity.java │ │ │ ├── encoder │ │ │ └── MediaMuxerWrapper.java │ │ │ ├── filter │ │ │ ├── gpuimage │ │ │ │ ├── GPUImageColorInvertFilter.java │ │ │ │ ├── GPUImageCompatibleFilter.java │ │ │ │ └── GPUImageFilter.java │ │ │ ├── hardvideofilter │ │ │ │ ├── BaseHardVideoFilter.java │ │ │ │ ├── GaussianBlurHardFilter.java │ │ │ │ ├── HardVideoGroupFilter.java │ │ │ │ ├── MirrorHardVideoFilter.java │ │ │ │ ├── OriginalHardVideoFilter.java │ │ │ │ ├── SkinBlurHardVideoFilter.java │ │ │ │ ├── SplitHardVideoFilter.java │ │ │ │ ├── SplitMirrorHardVideoFilter.java │ │ │ │ └── WhiteningHardVideoFilter.java │ │ │ ├── image │ │ │ │ ├── BaseDrawImageFilter.java │ │ │ │ ├── DrawMultiImageFilter.java │ │ │ │ ├── ImageDrawConstants.java │ │ │ │ └── ImageTexture.java │ │ │ └── softaudiofilter │ │ │ │ ├── BaseSoftAudioFilter.java │ │ │ │ └── SetVolumeAudioFilter.java │ │ │ ├── model │ │ │ ├── AudioBuff.java │ │ │ ├── MediaCodecGLWapper.java │ │ │ ├── MediaConfig.java │ │ │ ├── MediaMakerConfig.java │ │ │ ├── OffScreenGLWapper.java │ │ │ ├── RecordConfig.java │ │ │ ├── ScreenGLWapper.java │ │ │ └── Size.java │ │ │ ├── tools │ │ │ ├── BitmapUtils.java │ │ │ ├── ByteArrayTools.java │ │ │ ├── GLESTools.java │ │ │ ├── TimeHandler.java │ │ │ └── VideoSplicer.java │ │ │ └── ui │ │ │ ├── AspectTextureView.java │ │ │ ├── RecordingActivity.java │ │ │ └── RecordingActivity2.java │ └── res │ │ ├── drawable-xhdpi │ │ ├── nose_0.png │ │ ├── t.png │ │ └── teeth_0.png │ │ ├── layout │ │ ├── activity_main.xml │ │ ├── activity_streaming.xml │ │ └── activity_video_record.xml │ │ ├── mipmap-hdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-mdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xxhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ ├── mipmap-xxxhdpi │ │ ├── ic_launcher.png │ │ └── ic_launcher_round.png │ │ └── values │ │ ├── colors.xml │ │ ├── dimens.xml │ │ ├── strings.xml │ │ └── styles.xml │ └── test │ └── java │ └── com │ └── icechn │ └── videorecorder │ └── ExampleUnitTest.java ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | .gradle 3 | /local.properties 4 | /.idea/workspace.xml 5 | /.idea/libraries 6 | .DS_Store 7 | /build 8 | /captures 9 | .externalNativeBuild 10 | /idea 11 | /.idea 12 | /.idea/* 13 | /app/build 14 | idea 15 | idea/* 16 | *.iml 17 | /app/*.iml 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # VideoRecorderWithOpenGL 2 | android 录制视频,使用OpenGL 3 | 4 | 这里有两篇文章用来介绍,与此DEMO相关。 5 | (http://www.jianshu.com/p/113e4eedb752) 6 | 7 | 支持断点续录、回删(https://www.jianshu.com/p/6d2d4f00b43c) 8 | 9 | 参考:https://github.com/lakeinchina/librestreaming -------------------------------------------------------------------------------- /app/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | /idea 3 | /.idea 4 | /.idea/* 5 | /app/build 6 | -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 26 5 | buildToolsVersion "26.0.0" 6 | defaultConfig { 7 | applicationId "com.icechn.videorecorder" 8 | minSdkVersion 18 9 | targetSdkVersion 26 10 | versionCode 1 11 | versionName "1.0" 12 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" 13 | 14 | ndk{ 15 | abiFilters "armeabi-v7a" 16 | } 17 | } 18 | buildTypes { 19 | release { 20 | minifyEnabled false 21 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 22 | } 23 | } 24 | 25 | sourceSets { 26 | main { 27 | jniLibs.srcDirs = ['libs', 'src/main/libs', 'src/main/jniLibs'] 28 | } 29 | } 30 | } 31 | 32 | dependencies { 33 | compile fileTree(dir: 'libs', include: ['*.jar']) 34 | androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', { 35 | exclude group: 'com.android.support', module: 'support-annotations' 36 | }) 37 | compile 'com.android.support:appcompat-v7:26.+' 38 | compile 'com.android.support:design:23+' 39 | testCompile 'junit:junit:4.12' 40 | } 41 | -------------------------------------------------------------------------------- /app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # By default, the flags in this file are appended to flags specified 3 | # in D:\work\android\android-sdk-windows/tools/proguard/proguard-android.txt 4 | # You can edit the include path and order by changing the proguardFiles 5 | # directive in build.gradle. 6 | # 7 | # For more details, see 8 | # http://developer.android.com/guide/developing/tools/proguard.html 9 | 10 | # Add any project specific keep options here: 11 | 12 | # If your project uses WebView with JS, uncomment the following 13 | # and specify the fully qualified class name to the JavaScript interface 14 | # class: 15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 16 | # public *; 17 | #} 18 | 19 | # Uncomment this to preserve the line number information for 20 | # debugging stack traces. 21 | #-keepattributes SourceFile,LineNumberTable 22 | 23 | # If you keep the line number information, uncomment this to 24 | # hide the original source file name. 25 | #-renamesourcefileattribute SourceFile 26 | -------------------------------------------------------------------------------- /app/src/androidTest/java/com/icechn/videorecorder/ExampleInstrumentedTest.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder; 2 | 3 | import android.content.Context; 4 | import android.support.test.InstrumentationRegistry; 5 | import android.support.test.runner.AndroidJUnit4; 6 | 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | 10 | import static org.junit.Assert.*; 11 | 12 | /** 13 | * Instrumentation test, which will execute on an Android device. 14 | * 15 | * @see Testing documentation 16 | */ 17 | @RunWith(AndroidJUnit4.class) 18 | public class ExampleInstrumentedTest { 19 | @Test 20 | public void useAppContext() throws Exception { 21 | // Context of the app under test. 22 | Context appContext = InstrumentationRegistry.getTargetContext(); 23 | 24 | assertEquals("com.ktvme.livestreaming", appContext.getPackageName()); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 42 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /app/src/main/assets/drawimage_fragment.sh: -------------------------------------------------------------------------------- 1 | precision highp float; 2 | varying highp vec2 vCamTextureCoord; 3 | uniform sampler2D uCamTexture; 4 | uniform sampler2D uImageTexture; 5 | uniform vec4 imageRect; 6 | uniform float imageAngel; 7 | 8 | vec2 rotate(vec2 p0, vec2 center, float angel) 9 | { 10 | float x2 = (p0.x - center.x)*cos(angel) - (p0.y - center.y)*sin(angel) + center.x ; 11 | float y2 = (p0.x - center.x)*sin(angel) + (p0.y - center.y)*cos(angel) + center.y ; 12 | return vec2(x2, y2); 13 | } 14 | void main(){ 15 | lowp vec4 c1 = texture2D(uCamTexture, vCamTextureCoord); 16 | lowp vec2 vCamTextureCoord2 = vec2(vCamTextureCoord.x,1.0-vCamTextureCoord.y); 17 | vec2 point = vCamTextureCoord2; 18 | if(imageAngel != 0.0) 19 | { 20 | vec2 center = vec2((imageRect.r+imageRect.b)/2.0, (imageRect.g+imageRect.a)/2.0); 21 | vec2 p2 = rotate(vCamTextureCoord2, center, -imageAngel); 22 | point = p2; 23 | } 24 | if(point.x>imageRect.r && point.ximageRect.g && point.yignoreRect.r && point.xignoreRect.g && point.y 0.5) 10 | { 11 | c1 = texture2D(uCamTexture, vec2(1.0 - vCamTextureCoord.x, vCamTextureCoord.y)); 12 | } 13 | 14 | gl_FragColor = c1; 15 | } -------------------------------------------------------------------------------- /app/src/main/assets/mirror_vertex.sh: -------------------------------------------------------------------------------- 1 | attribute vec4 aCamPosition; 2 | attribute vec2 aCamTextureCoord; 3 | varying vec2 vCamTextureCoord; 4 | 5 | void main() 6 | { 7 | gl_Position= aCamPosition; 8 | vCamTextureCoord = aCamTextureCoord; 9 | } -------------------------------------------------------------------------------- /app/src/main/assets/skinblur_fragment.sh: -------------------------------------------------------------------------------- 1 | precision highp float; 2 | uniform sampler2D uCamTexture; 3 | varying highp vec2 vCamTextureCoord; 4 | const float maxdelta = 0.08; 5 | uniform highp float xStep; 6 | uniform highp float yStep; 7 | const highp mat3 rgb2yuv = mat3(0.299,-0.147,0.615,0.587,-0.289,-0.515,0.114,0.436,-0.1); 8 | const highp mat3 gaussianMap = mat3(0.142,0.131,0.104,0.131,0.122,0.096,0.104,0.096,0.075); 9 | void main(){ 10 | vec4 color = texture2D(uCamTexture,vCamTextureCoord); 11 | vec3 yuv = rgb2yuv*color.rgb; 12 | if(yuv.g<-0.225 || yuv.g>0.0 || yuv.b<0.022 || yuv.b>0.206){ 13 | gl_FragColor = color; 14 | return; 15 | } 16 | float xfS = vCamTextureCoord.x - xStep*2.0; 17 | float yf = vCamTextureCoord.y - yStep*2.0; 18 | int x,y; 19 | float xf=xfS; 20 | vec4 sum=vec4(0.0,0.0,0.0,0.0); 21 | vec4 fact=vec4(0.0,0.0,0.0,0.0); 22 | vec4 tmp; 23 | vec4 color2; 24 | float gauss; 25 | for(y=-2;y<3;y+=1){ 26 | if (yf < 0.0 || yf > 1.0){ 27 | yf+=yStep; 28 | continue; 29 | } 30 | for(x=-2;x<3;x+=1){ 31 | if (xf < 0.0 || xf > 1.0){ 32 | xf+=xStep; 33 | continue; 34 | } 35 | color2 = texture2D(uCamTexture,vec2(xf,yf)); 36 | tmp = color - color2; 37 | gauss = gaussianMap[x<0?-x:x][y<0?-y:y]; 38 | if (abs(tmp.r) < maxdelta){ 39 | sum.r += (color2.r*gauss); 40 | fact.r +=gauss; 41 | } 42 | if (abs(tmp.g) < maxdelta){ 43 | sum.g += color2.g*gauss; 44 | fact.g +=gauss; 45 | } 46 | if (abs(tmp.b) < maxdelta){ 47 | sum.b += color2.b*gauss; 48 | fact.b +=gauss; 49 | } 50 | xf+=xStep; 51 | } 52 | yf+=yStep; 53 | xf=xfS; 54 | } 55 | vec4 res = sum/fact; 56 | if(fact.r<1.0){ 57 | tmp.r = color.r; 58 | }else{ 59 | tmp.r = res.r; 60 | } 61 | if(fact.g<1.0){ 62 | tmp.g = color.g; 63 | }else{ 64 | tmp.g = res.g; 65 | } 66 | if(fact.b<1.0){ 67 | tmp.b = color.b; 68 | }else{ 69 | tmp.b = res.b; 70 | } 71 | gl_FragColor = vec4(tmp.rgb,1.0); 72 | } -------------------------------------------------------------------------------- /app/src/main/assets/split_fragment.sh: -------------------------------------------------------------------------------- 1 | #ifdef SPLIT_SQUARE_BASE 2 | #define NUM SPLIT_SQUARE_BASE 3 | #else 4 | #define NUM 2. 5 | #endif 6 | 7 | precision highp float; 8 | varying highp vec2 vCamTextureCoord; 9 | uniform sampler2D uCamTexture; 10 | 11 | void main(){ 12 | float subLen = 1.0 / NUM; 13 | 14 | float xIdx = floor(vCamTextureCoord.x / subLen); 15 | float yIdx = floor(vCamTextureCoord.y / subLen); 16 | 17 | float rLeft = xIdx * subLen; 18 | float rBottom = yIdx * subLen; 19 | 20 | float x = (vCamTextureCoord.x - rLeft) / subLen; 21 | float y = (vCamTextureCoord.y - rBottom) / subLen; 22 | 23 | gl_FragColor = texture2D(uCamTexture, vec2(x, y)); 24 | } -------------------------------------------------------------------------------- /app/src/main/assets/split_mirror_fragment.sh: -------------------------------------------------------------------------------- 1 | precision highp float; 2 | varying highp vec2 vCamTextureCoord; 3 | uniform sampler2D uCamTexture; 4 | 5 | void main(){ 6 | float x; 7 | float y; 8 | if (vCamTextureCoord.x <= 0.5) 9 | { 10 | x = vCamTextureCoord.x / 0.5; 11 | if (vCamTextureCoord.y > 0.5) 12 | { 13 | y = (vCamTextureCoord.y - 0.5) / 0.5; 14 | } 15 | else 16 | { 17 | y = (0.5 - vCamTextureCoord.y) / 0.5; 18 | } 19 | } 20 | else 21 | { 22 | x = (1.0 - vCamTextureCoord.x) / 0.5; 23 | if (vCamTextureCoord.y > 0.5) 24 | { 25 | y = (vCamTextureCoord.y - 0.5) / 0.5; 26 | } 27 | else 28 | { 29 | y = (0.5 - vCamTextureCoord.y) / 0.5; 30 | } 31 | } 32 | 33 | gl_FragColor = texture2D(uCamTexture, vec2(x, y)); 34 | } -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/client/AudioClient.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.client; 2 | 3 | import android.media.AudioFormat; 4 | import android.media.AudioRecord; 5 | import android.media.MediaRecorder; 6 | import android.util.Log; 7 | 8 | import com.icechn.videorecorder.core.audio.AudioCore; 9 | import com.icechn.videorecorder.encoder.MediaMuxerWrapper; 10 | import com.icechn.videorecorder.filter.softaudiofilter.BaseSoftAudioFilter; 11 | import com.icechn.videorecorder.model.MediaMakerConfig; 12 | import com.icechn.videorecorder.model.RecordConfig; 13 | 14 | /** 15 | * Created by lake on 16-5-24. 16 | */ 17 | public class AudioClient { 18 | MediaMakerConfig mediaMakerConfig; 19 | private final Object syncOp = new Object(); 20 | private AudioRecordThread audioRecordThread; 21 | private AudioRecord audioRecord; 22 | private byte[] audioBuffer; 23 | private AudioCore softAudioCore; 24 | 25 | public AudioClient(MediaMakerConfig parameters) { 26 | mediaMakerConfig = parameters; 27 | } 28 | 29 | public boolean prepare(RecordConfig recordConfig) { 30 | synchronized (syncOp) { 31 | mediaMakerConfig.audioBufferQueueNum = 5; 32 | softAudioCore = new AudioCore(mediaMakerConfig); 33 | if (!softAudioCore.prepare(recordConfig)) { 34 | Log.e("","AudioClient,prepare"); 35 | return false; 36 | } 37 | mediaMakerConfig.audioRecoderFormat = AudioFormat.ENCODING_PCM_16BIT; 38 | mediaMakerConfig.audioRecoderChannelConfig = AudioFormat.CHANNEL_IN_MONO; 39 | mediaMakerConfig.audioRecoderSliceSize = mediaMakerConfig.mediacodecAACSampleRate / 10; 40 | mediaMakerConfig.audioRecoderBufferSize = mediaMakerConfig.audioRecoderSliceSize * 2; 41 | mediaMakerConfig.audioRecoderSource = MediaRecorder.AudioSource.DEFAULT; 42 | mediaMakerConfig.audioRecoderSampleRate = mediaMakerConfig.mediacodecAACSampleRate; 43 | prepareAudio(); 44 | return true; 45 | } 46 | } 47 | 48 | public boolean startRecording(MediaMuxerWrapper muxer) { 49 | synchronized (syncOp) { 50 | softAudioCore.startRecording(muxer); 51 | audioRecord.startRecording(); 52 | audioRecordThread = new AudioRecordThread(); 53 | audioRecordThread.start(); 54 | Log.d("","AudioClient,start()"); 55 | return true; 56 | } 57 | } 58 | 59 | public boolean stopRecording() { 60 | synchronized (syncOp) { 61 | if (audioRecordThread != null) { 62 | audioRecordThread.quit(); 63 | try { 64 | audioRecordThread.join(); 65 | } catch (InterruptedException ignored) { 66 | } 67 | audioRecordThread = null; 68 | } 69 | softAudioCore.stop(); 70 | audioRecord.stop(); 71 | return true; 72 | } 73 | } 74 | 75 | public boolean destroy() { 76 | synchronized (syncOp) { 77 | audioRecord.release(); 78 | return true; 79 | } 80 | } 81 | public void setSoftAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) { 82 | softAudioCore.setAudioFilter(baseSoftAudioFilter); 83 | } 84 | public BaseSoftAudioFilter acquireSoftAudioFilter() { 85 | return softAudioCore.acquireAudioFilter(); 86 | } 87 | 88 | public void releaseSoftAudioFilter() { 89 | softAudioCore.releaseAudioFilter(); 90 | } 91 | 92 | private boolean prepareAudio() { 93 | int minBufferSize = AudioRecord.getMinBufferSize(mediaMakerConfig.audioRecoderSampleRate, 94 | mediaMakerConfig.audioRecoderChannelConfig, 95 | mediaMakerConfig.audioRecoderFormat); 96 | audioRecord = new AudioRecord(mediaMakerConfig.audioRecoderSource, 97 | mediaMakerConfig.audioRecoderSampleRate, 98 | mediaMakerConfig.audioRecoderChannelConfig, 99 | mediaMakerConfig.audioRecoderFormat, 100 | minBufferSize * 5); 101 | audioBuffer = new byte[mediaMakerConfig.audioRecoderBufferSize]; 102 | if (AudioRecord.STATE_INITIALIZED != audioRecord.getState()) { 103 | Log.e("","audioRecord.getState()!=AudioRecord.STATE_INITIALIZED!"); 104 | return false; 105 | } 106 | if (AudioRecord.SUCCESS != audioRecord.setPositionNotificationPeriod(mediaMakerConfig.audioRecoderSliceSize)) { 107 | Log.e("","AudioRecord.SUCCESS != audioRecord.setPositionNotificationPeriod(" + mediaMakerConfig.audioRecoderSliceSize + ")"); 108 | return false; 109 | } 110 | return true; 111 | } 112 | 113 | class AudioRecordThread extends Thread { 114 | private boolean isRunning = true; 115 | 116 | AudioRecordThread() { 117 | isRunning = true; 118 | } 119 | 120 | public void quit() { 121 | isRunning = false; 122 | } 123 | 124 | @Override 125 | public void run() { 126 | Log.d("","AudioRecordThread,tid=" + Thread.currentThread().getId()); 127 | while (isRunning) { 128 | int size = audioRecord.read(audioBuffer, 0, audioBuffer.length); 129 | if (isRunning && softAudioCore != null && size > 0) { 130 | softAudioCore.queueAudio(audioBuffer); 131 | } 132 | } 133 | } 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/client/CallbackDelivery.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.client; 2 | 3 | import android.os.Handler; 4 | import android.os.Looper; 5 | 6 | import java.util.concurrent.Executor; 7 | 8 | /** 9 | * Created by lake on 16-4-11. 10 | */ 11 | public class CallbackDelivery { 12 | static private CallbackDelivery instance; 13 | private final Executor mCallbackPoster; 14 | private final Handler handler = new Handler(Looper.getMainLooper()); 15 | 16 | public static CallbackDelivery i() { 17 | return instance == null ? instance = new CallbackDelivery() : instance; 18 | } 19 | 20 | private CallbackDelivery() { 21 | mCallbackPoster = new Executor() { 22 | @Override 23 | public void execute(Runnable command) { 24 | handler.post(command); 25 | } 26 | }; 27 | } 28 | 29 | public void post(Runnable runnable) { 30 | mCallbackPoster.execute(runnable); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/client/RecorderClient.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.client; 2 | 3 | 4 | import android.content.Context; 5 | import android.graphics.SurfaceTexture; 6 | import android.util.Log; 7 | 8 | import com.icechn.videorecorder.core.listener.IVideoChange; 9 | import com.icechn.videorecorder.encoder.MediaMuxerWrapper; 10 | import com.icechn.videorecorder.filter.hardvideofilter.BaseHardVideoFilter; 11 | import com.icechn.videorecorder.filter.softaudiofilter.BaseSoftAudioFilter; 12 | import com.icechn.videorecorder.model.MediaMakerConfig; 13 | import com.icechn.videorecorder.model.RecordConfig; 14 | import com.icechn.videorecorder.model.Size; 15 | 16 | import java.io.IOException; 17 | 18 | public class RecorderClient { 19 | private VideoClient videoClient; 20 | private AudioClient audioClient; 21 | private final Object SyncOp; 22 | private MediaMakerConfig mediaMakerConfig; 23 | 24 | public RecorderClient() { 25 | SyncOp = new Object(); 26 | mediaMakerConfig = new MediaMakerConfig(); 27 | CallbackDelivery.i(); 28 | } 29 | 30 | /** 31 | * prepare to stream 32 | * 33 | * @param config config 34 | * @return true if prepare success 35 | */ 36 | public boolean prepare(Context context, RecordConfig config) { 37 | synchronized (SyncOp) { 38 | try { 39 | checkDirection(config); 40 | } catch (RuntimeException e) { 41 | e.printStackTrace(); 42 | return false; 43 | } 44 | mediaMakerConfig.printDetailMsg = config.isPrintDetailMsg(); 45 | mediaMakerConfig.isSquare = config.isSquare(); 46 | mediaMakerConfig.saveVideoEnable = config.isSaveVideoEnable(); 47 | mediaMakerConfig.saveVideoPath = config.getSaveVideoPath(); 48 | videoClient = new VideoClient(context, mediaMakerConfig); 49 | audioClient = new AudioClient(mediaMakerConfig); 50 | if (!videoClient.prepare(config)) { 51 | Log.d("","!!!!!videoClient.prepare()failed"); 52 | Log.d("", mediaMakerConfig.toString()); 53 | return false; 54 | } 55 | if (!audioClient.prepare(config)) { 56 | Log.d("","!!!!!audioClient.prepare()failed"); 57 | Log.d("", mediaMakerConfig.toString()); 58 | return false; 59 | } 60 | mediaMakerConfig.done = true; 61 | Log.d("","===INFO===coreParametersReady:"); 62 | Log.d("", mediaMakerConfig.toString()); 63 | return true; 64 | } 65 | } 66 | 67 | 68 | public void updatePath(String path) { 69 | mediaMakerConfig.saveVideoPath = path; 70 | } 71 | public String getFilePath() { 72 | return mediaMakerConfig.saveVideoEnable ? mediaMakerConfig.saveVideoPath : null; 73 | } 74 | 75 | /** 76 | * start recording 77 | */ 78 | public void startRecording() { 79 | synchronized (SyncOp) { 80 | prepareMuxer(); 81 | videoClient.startRecording(mMuxer); 82 | audioClient.startRecording(mMuxer); 83 | Log.d("","RecorderClient,startRecording()"); 84 | } 85 | } 86 | 87 | /** 88 | * stop recording 89 | */ 90 | public void stopRecording() { 91 | synchronized (SyncOp) { 92 | videoClient.stopRecording(); 93 | audioClient.stopRecording(); 94 | Log.d("","RecorderClient,stopRecording()"); 95 | } 96 | } 97 | 98 | /** 99 | * clean up 100 | */ 101 | public void destroy() { 102 | synchronized (SyncOp) { 103 | videoClient.destroy(); 104 | audioClient.destroy(); 105 | videoClient = null; 106 | audioClient = null; 107 | Log.d("","RecorderClient,destroy()"); 108 | } 109 | } 110 | 111 | /** 112 | * call it AFTER {@link #prepare} 113 | * 114 | * @param surfaceTexture to rendering preview 115 | */ 116 | public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) { 117 | videoClient.startPreview(surfaceTexture, visualWidth, visualHeight); 118 | Log.d("","RecorderClient,startPreview()"); 119 | } 120 | 121 | public void updatePreview(int visualWidth, int visualHeight) { 122 | videoClient.updatePreview(visualWidth, visualHeight); 123 | Log.d("","RecorderClient,updatePreview()"); 124 | } 125 | 126 | /** 127 | * @param releaseTexture true if you won`t reuse this surfaceTexture later 128 | */ 129 | public void stopPreview(boolean releaseTexture) { 130 | videoClient.stopPreview(releaseTexture); 131 | Log.d("","RecorderClient,stopPreview()"); 132 | } 133 | 134 | /** 135 | * change camera on running.
136 | */ 137 | public boolean swapCamera() { 138 | synchronized (SyncOp) { 139 | Log.d("","RecorderClient,swapCamera()"); 140 | return videoClient.swapCamera(); 141 | } 142 | } 143 | 144 | public boolean isFrontCamera() { 145 | return videoClient.isFrontCamera(); 146 | } 147 | 148 | /** 149 | * get the real video size,call after prepare() 150 | * 151 | * @return 152 | */ 153 | public Size getVideoSize() { 154 | return new Size(mediaMakerConfig.videoWidth, mediaMakerConfig.videoHeight); 155 | } 156 | 157 | /** 158 | * only for hard filter mode.
159 | * set videofilter.
160 | * can be called Repeatedly.
161 | * 162 | * @param baseHardVideoFilter videofilter to apply 163 | */ 164 | public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter) { 165 | videoClient.setHardVideoFilter(baseHardVideoFilter); 166 | } 167 | 168 | /** 169 | * set audiofilter.
170 | * can be called Repeatedly.
171 | * 172 | * @param baseSoftAudioFilter audiofilter to apply 173 | */ 174 | public void setSoftAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) { 175 | audioClient.setSoftAudioFilter(baseSoftAudioFilter); 176 | } 177 | 178 | /** 179 | * listener for video size change 180 | * 181 | * @param videoChangeListener 182 | */ 183 | public void setVideoChangeListener(IVideoChange videoChangeListener) { 184 | videoClient.setVideoChangeListener(videoChangeListener); 185 | } 186 | 187 | /** 188 | * toggle flash light 189 | * 190 | * @return true if operation success 191 | */ 192 | public boolean toggleFlashLight() { 193 | return videoClient.toggleFlashLight(); 194 | } 195 | public boolean toggleFlashLight(boolean on) { 196 | return videoClient.toggleFlashLight(on); 197 | } 198 | 199 | /** 200 | * =====================PRIVATE================= 201 | **/ 202 | private void checkDirection(RecordConfig config) { 203 | int frontFlag = config.getFrontCameraDirectionMode(); 204 | int backFlag = config.getBackCameraDirectionMode(); 205 | int fbit = 0; 206 | int bbit = 0; 207 | //check or set default value 208 | if ((frontFlag >> 4) == 0) { 209 | frontFlag |= MediaMakerConfig.FLAG_DIRECTION_ROATATION_0; 210 | } 211 | if ((backFlag >> 4) == 0) { 212 | backFlag |= MediaMakerConfig.FLAG_DIRECTION_ROATATION_0; 213 | } 214 | //make sure only one direction 215 | for (int i = 4; i <= 8; ++i) { 216 | if (((frontFlag >> i) & 0x1) == 1) { 217 | fbit++; 218 | } 219 | if (((backFlag >> i) & 0x1) == 1) { 220 | bbit++; 221 | } 222 | } 223 | if (fbit != 1 || bbit != 1) { 224 | throw new RuntimeException("invalid direction rotation flag:frontFlagNum=" + fbit + ",backFlagNum=" + bbit); 225 | } 226 | if (((frontFlag & MediaMakerConfig.FLAG_DIRECTION_ROATATION_0) != 0) || ((frontFlag & MediaMakerConfig.FLAG_DIRECTION_ROATATION_180) != 0)) { 227 | fbit = 0; 228 | } else { 229 | fbit = 1; 230 | } 231 | if (((backFlag & MediaMakerConfig.FLAG_DIRECTION_ROATATION_0) != 0) || ((backFlag & MediaMakerConfig.FLAG_DIRECTION_ROATATION_180) != 0)) { 232 | bbit = 0; 233 | } else { 234 | bbit = 1; 235 | } 236 | if (bbit != fbit) { 237 | if (bbit == 0) { 238 | throw new RuntimeException("invalid direction rotation flag:back camera is landscape but front camera is portrait"); 239 | } else { 240 | throw new RuntimeException("invalid direction rotation flag:back camera is portrait but front camera is landscape"); 241 | } 242 | } 243 | if (fbit == 1) { 244 | mediaMakerConfig.isPortrait = true; 245 | } else { 246 | mediaMakerConfig.isPortrait = false; 247 | } 248 | mediaMakerConfig.backCameraDirectionMode = backFlag; 249 | mediaMakerConfig.frontCameraDirectionMode = frontFlag; 250 | } 251 | 252 | private MediaMuxerWrapper mMuxer = null; 253 | 254 | private void prepareMuxer() { 255 | if (!mediaMakerConfig.saveVideoEnable) { 256 | return; 257 | } 258 | try { 259 | mMuxer = new MediaMuxerWrapper(mediaMakerConfig.saveVideoPath); 260 | mMuxer.setTrackCount(2); 261 | } catch (IOException e) { 262 | e.printStackTrace(); 263 | } 264 | } 265 | } 266 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/core/CameraHelper.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.core; 2 | 3 | import android.graphics.ImageFormat; 4 | import android.hardware.Camera; 5 | import android.util.Log; 6 | 7 | import com.icechn.videorecorder.model.MediaMakerConfig; 8 | import com.icechn.videorecorder.model.Size; 9 | 10 | import java.util.Collections; 11 | import java.util.Comparator; 12 | import java.util.LinkedList; 13 | import java.util.List; 14 | 15 | /** 16 | * Created by lake on 16-3-16. 17 | */ 18 | public class CameraHelper { 19 | public static int targetFps = 30000; 20 | private static int[] supportedSrcVideoFrameColorType = new int[]{ImageFormat.NV21, ImageFormat.YV12}; 21 | 22 | public static boolean configCamera(Camera camera, MediaMakerConfig config) { 23 | Camera.Parameters parameters = camera.getParameters(); 24 | parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO); 25 | List focusModes = parameters.getSupportedFocusModes(); 26 | if (focusModes != null) { 27 | if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { 28 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); 29 | } else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { 30 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); 31 | } else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) { 32 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED); 33 | } 34 | } 35 | parameters.setPreviewSize(config.previewVideoWidth, config.previewVideoHeight); 36 | parameters.setPreviewFpsRange(config.previewMinFps, config.previewMaxFps); 37 | try { 38 | camera.setParameters(parameters); 39 | } catch (Exception e) { 40 | camera.release(); 41 | return false; 42 | } 43 | return true; 44 | } 45 | 46 | public static void selectCameraFpsRange(Camera.Parameters parameters, MediaMakerConfig config) { 47 | List fpsRanges = parameters.getSupportedPreviewFpsRange(); 48 | Collections.sort(fpsRanges, new Comparator() { 49 | @Override 50 | public int compare(int[] lhs, int[] rhs) { 51 | int r = Math.abs(lhs[0] - targetFps) + Math.abs(lhs[1] - targetFps); 52 | int l = Math.abs(rhs[0] - targetFps) + Math.abs(rhs[1] - targetFps); 53 | if (r > l) { 54 | return 1; 55 | } else if (r < l) { 56 | return -1; 57 | } else { 58 | return 0; 59 | } 60 | } 61 | }); 62 | config.previewMinFps = fpsRanges.get(0)[0]; 63 | config.previewMaxFps = fpsRanges.get(0)[1]; 64 | } 65 | 66 | public static void selectCameraPreviewWH(Camera.Parameters parameters, MediaMakerConfig config, Size targetSize) { 67 | List previewsSizes = parameters.getSupportedPreviewSizes(); 68 | Collections.sort(previewsSizes, new Comparator() { 69 | @Override 70 | public int compare(Camera.Size lhs, Camera.Size rhs) { 71 | if ((lhs.width * lhs.height) > (rhs.width * rhs.height)) { 72 | return 1; 73 | } else { 74 | return -1; 75 | } 76 | } 77 | }); 78 | for (Camera.Size size : previewsSizes) { 79 | if (size.width >= targetSize.getWidth() && size.height >= targetSize.getHeight()) { 80 | config.previewVideoWidth = size.width; 81 | config.previewVideoHeight = size.height; 82 | return; 83 | } 84 | } 85 | } 86 | 87 | public static boolean selectCameraColorFormat(Camera.Parameters parameters, MediaMakerConfig config) { 88 | List srcColorTypes = new LinkedList<>(); 89 | List supportedPreviewFormates = parameters.getSupportedPreviewFormats(); 90 | for (int colortype : supportedSrcVideoFrameColorType) { 91 | if (supportedPreviewFormates.contains(colortype)) { 92 | srcColorTypes.add(colortype); 93 | } 94 | } 95 | //select preview colorformat 96 | if (srcColorTypes.contains(config.previewColorFormat = ImageFormat.NV21)) { 97 | config.previewColorFormat = ImageFormat.NV21; 98 | } else if ((srcColorTypes.contains(config.previewColorFormat = ImageFormat.YV12))) { 99 | config.previewColorFormat = ImageFormat.YV12; 100 | } else { 101 | Log.e("","!!!!!!!!!!!UnSupport,previewColorFormat"); 102 | return false; 103 | } 104 | return true; 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/core/MediaCodecHelper.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.core; 2 | 3 | import android.media.MediaCodec; 4 | import android.media.MediaCodecInfo; 5 | import android.media.MediaFormat; 6 | import android.util.Log; 7 | 8 | import com.icechn.videorecorder.model.MediaMakerConfig; 9 | 10 | import java.io.IOException; 11 | 12 | 13 | /** 14 | * Created by lake on 16-3-16. 15 | */ 16 | public class MediaCodecHelper { 17 | public static MediaCodec createSoftVideoMediaCodec(MediaMakerConfig config, MediaFormat videoFormat) { 18 | videoFormat.setString(MediaFormat.KEY_MIME, "video/avc"); 19 | videoFormat.setInteger(MediaFormat.KEY_WIDTH, config.videoWidth); 20 | videoFormat.setInteger(MediaFormat.KEY_HEIGHT, config.videoHeight); 21 | videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, config.mediacdoecAVCBitRate); 22 | videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, config.mediacodecAVCFrameRate); 23 | videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, config.mediacodecAVCIFrameInterval); 24 | videoFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline); 25 | videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31); 26 | videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR); 27 | MediaCodec result = null; 28 | try { 29 | result = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME)); 30 | //select color 31 | int[] colorful = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).colorFormats; 32 | int dstVideoColorFormat = -1; 33 | //select mediacodec colorformat 34 | if (isArrayContain(colorful, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar)) { 35 | dstVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar; 36 | config.mediacodecAVCColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar; 37 | } 38 | if (dstVideoColorFormat == -1 && isArrayContain(colorful, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar)) { 39 | dstVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar; 40 | config.mediacodecAVCColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar; 41 | } 42 | if (dstVideoColorFormat == -1) { 43 | Log.e("","!!!!!!!!!!!UnSupport,mediaCodecColorFormat"); 44 | return null; 45 | } 46 | videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, dstVideoColorFormat); 47 | //selectprofile 48 | // if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { 49 | // MediaCodecInfo.CodecProfileLevel[] profileLevels = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).profileLevels; 50 | // if (isProfileContain(profileLevels, MediaCodecInfo.CodecProfileLevel.AVCProfileMain)) { 51 | // config.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileMain; 52 | // config.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31; 53 | // } else { 54 | // config.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline; 55 | // config.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31; 56 | // } 57 | // videoFormat.setInteger(MediaFormat.KEY_PROFILE, config.mediacodecAVCProfile); 58 | // //level must be set even below M 59 | // videoFormat.setInteger(MediaFormat.KEY_LEVEL, config.mediacodecAVClevel); 60 | // } 61 | } catch (IOException e) { 62 | e.printStackTrace(); 63 | return null; 64 | } 65 | return result; 66 | } 67 | 68 | public static MediaCodec createAudioMediaCodec(MediaMakerConfig config, MediaFormat audioFormat) { 69 | //Audio 70 | MediaCodec result; 71 | audioFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm"); 72 | audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, config.mediacodecAACProfile); 73 | audioFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, config.mediacodecAACSampleRate); 74 | audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, config.mediacodecAACChannelCount); 75 | audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, config.mediacodecAACBitRate); 76 | audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, config.mediacodecAACMaxInputSize); 77 | Log.d("","creatingAudioEncoder,format=" + audioFormat.toString()); 78 | try { 79 | result = MediaCodec.createEncoderByType(audioFormat.getString(MediaFormat.KEY_MIME)); 80 | } catch (Exception e) { 81 | e.printStackTrace(); 82 | return null; 83 | } 84 | return result; 85 | } 86 | 87 | public static MediaCodec createHardVideoMediaCodec(MediaMakerConfig config, MediaFormat videoFormat) { 88 | videoFormat.setString(MediaFormat.KEY_MIME, "video/avc"); 89 | videoFormat.setInteger(MediaFormat.KEY_WIDTH, config.videoWidth); 90 | videoFormat.setInteger(MediaFormat.KEY_HEIGHT, config.videoHeight); 91 | videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 92 | videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, config.mediacdoecAVCBitRate); 93 | videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, config.mediacodecAVCFrameRate); 94 | videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, config.mediacodecAVCIFrameInterval); 95 | videoFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline); 96 | videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31); 97 | videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR); 98 | MediaCodec result = null; 99 | try { 100 | result = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME)); 101 | //selectprofile 102 | // if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { 103 | // MediaCodecInfo.CodecProfileLevel[] profileLevels = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).profileLevels; 104 | // if (isProfileContain(profileLevels, MediaCodecInfo.CodecProfileLevel.AVCProfileMain)) { 105 | // config.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileMain; 106 | // config.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31; 107 | // } else { 108 | // config.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline; 109 | // config.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31; 110 | // } 111 | // videoFormat.setInteger(MediaFormat.KEY_PROFILE, config.mediacodecAVCProfile); 112 | // //level must be set even below M 113 | // videoFormat.setInteger(MediaFormat.KEY_LEVEL, config.mediacodecAVClevel); 114 | // } 115 | } catch (IOException e) { 116 | e.printStackTrace(); 117 | return null; 118 | } 119 | return result; 120 | } 121 | 122 | private static boolean isArrayContain(int[] src, int target) { 123 | for (int color : src) { 124 | if (color == target) { 125 | return true; 126 | } 127 | } 128 | return false; 129 | } 130 | 131 | private static boolean isProfileContain(MediaCodecInfo.CodecProfileLevel[] src, int target) { 132 | for (MediaCodecInfo.CodecProfileLevel color : src) { 133 | if (color.profile == target) { 134 | return true; 135 | } 136 | } 137 | return false; 138 | } 139 | } -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/core/Packager.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.core; 2 | 3 | import android.media.MediaFormat; 4 | 5 | import com.icechn.videorecorder.tools.ByteArrayTools; 6 | 7 | import java.nio.ByteBuffer; 8 | 9 | 10 | /** 11 | * Created by lake on 16-3-30. 12 | */ 13 | public class Packager { 14 | public static class H264Packager { 15 | 16 | public static byte[] generateAVCDecoderConfigurationRecord(MediaFormat mediaFormat) { 17 | ByteBuffer SPSByteBuff = mediaFormat.getByteBuffer("csd-0"); 18 | SPSByteBuff.position(4); 19 | ByteBuffer PPSByteBuff = mediaFormat.getByteBuffer("csd-1"); 20 | PPSByteBuff.position(4); 21 | int spslength = SPSByteBuff.remaining(); 22 | int ppslength = PPSByteBuff.remaining(); 23 | int length = 11 + spslength + ppslength; 24 | byte[] result = new byte[length]; 25 | SPSByteBuff.get(result, 8, spslength); 26 | PPSByteBuff.get(result, 8 + spslength + 3, ppslength); 27 | /** 28 | * UB[8]configurationVersion 29 | * UB[8]AVCProfileIndication 30 | * UB[8]profile_compatibility 31 | * UB[8]AVCLevelIndication 32 | * UB[8]lengthSizeMinusOne 33 | */ 34 | result[0] = 0x01; 35 | result[1] = result[9]; 36 | result[2] = result[10]; 37 | result[3] = result[11]; 38 | result[4] = (byte) 0xFF; 39 | /** 40 | * UB[8]numOfSequenceParameterSets 41 | * UB[16]sequenceParameterSetLength 42 | */ 43 | result[5] = (byte) 0xE1; 44 | ByteArrayTools.intToByteArrayTwoByte(result, 6, spslength); 45 | /** 46 | * UB[8]numOfPictureParameterSets 47 | * UB[16]pictureParameterSetLength 48 | */ 49 | int pos = 8 + spslength; 50 | result[pos] = (byte) 0x01; 51 | ByteArrayTools.intToByteArrayTwoByte(result, pos + 1, ppslength); 52 | 53 | return result; 54 | } 55 | } 56 | 57 | public static class FLVPackager { 58 | public static final int FLV_TAG_LENGTH = 11; 59 | public static final int FLV_VIDEO_TAG_LENGTH = 5; 60 | public static final int FLV_AUDIO_TAG_LENGTH = 2; 61 | public static final int FLV_TAG_FOOTER_LENGTH = 4; 62 | public static final int NALU_HEADER_LENGTH = 4; 63 | 64 | public static void fillFlvVideoTag(byte[] dst, int pos, boolean isAVCSequenceHeader, boolean isIDR, int readDataLength) { 65 | //FrameType&CodecID 66 | dst[pos] = isIDR ? (byte) 0x17 : (byte) 0x27; 67 | //AVCPacketType 68 | dst[pos + 1] = isAVCSequenceHeader ? (byte) 0x00 : (byte) 0x01; 69 | //LAKETODO CompositionTime 70 | dst[pos + 2] = 0x00; 71 | dst[pos + 3] = 0x00; 72 | dst[pos + 4] = 0x00; 73 | if (!isAVCSequenceHeader) { 74 | //NALU HEADER 75 | ByteArrayTools.intToByteArrayFull(dst, pos + 5, readDataLength); 76 | } 77 | } 78 | 79 | public static void fillFlvAudioTag(byte[] dst, int pos, boolean isAACSequenceHeader) { 80 | /** 81 | * UB[4] 10=AAC 82 | * UB[2] 3=44kHz 83 | * UB[1] 1=16-bit 84 | * UB[1] 0=MonoSound 85 | */ 86 | dst[pos] = (byte) 0xAE; 87 | dst[pos + 1] = isAACSequenceHeader ? (byte) 0x00 : (byte) 0x01; 88 | } 89 | } 90 | 91 | } 92 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/core/audio/AudioCore.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.core.audio; 2 | 3 | import android.media.AudioFormat; 4 | import android.media.MediaCodec; 5 | import android.media.MediaCodecInfo; 6 | import android.media.MediaFormat; 7 | import android.os.Handler; 8 | import android.os.HandlerThread; 9 | import android.os.Looper; 10 | import android.os.Message; 11 | import android.os.SystemClock; 12 | import android.util.Log; 13 | 14 | import com.icechn.videorecorder.core.MediaCodecHelper; 15 | import com.icechn.videorecorder.encoder.MediaMuxerWrapper; 16 | import com.icechn.videorecorder.filter.softaudiofilter.BaseSoftAudioFilter; 17 | import com.icechn.videorecorder.model.AudioBuff; 18 | import com.icechn.videorecorder.model.MediaMakerConfig; 19 | import com.icechn.videorecorder.model.RecordConfig; 20 | 21 | import java.nio.ByteBuffer; 22 | import java.util.concurrent.TimeUnit; 23 | import java.util.concurrent.locks.Lock; 24 | import java.util.concurrent.locks.ReentrantLock; 25 | 26 | /** 27 | * Created by lake on 16-5-24. 28 | */ 29 | public class AudioCore { 30 | MediaMakerConfig mediaMakerConfig; 31 | private final Object syncOp = new Object(); 32 | private MediaCodec dstAudioEncoder; 33 | private MediaFormat dstAudioFormat; 34 | //filter 35 | private Lock lockAudioFilter = null; 36 | private BaseSoftAudioFilter audioFilter; 37 | //AudioBuffs 38 | //buffers to handle buff from queueAudio 39 | private AudioBuff[] orignAudioBuffs; 40 | private int lastAudioQueueBuffIndex; 41 | //buffer to handle buff from orignAudioBuffs 42 | private AudioBuff orignAudioBuff; 43 | private AudioBuff filteredAudioBuff; 44 | private AudioFilterHandler audioFilterHandler; 45 | private HandlerThread audioFilterHandlerThread; 46 | private AudioSenderThread audioSenderThread; 47 | 48 | public AudioCore(MediaMakerConfig parameters) { 49 | mediaMakerConfig = parameters; 50 | lockAudioFilter = new ReentrantLock(false); 51 | } 52 | 53 | public void queueAudio(byte[] rawAudioFrame) { 54 | int targetIndex = (lastAudioQueueBuffIndex + 1) % orignAudioBuffs.length; 55 | if (orignAudioBuffs[targetIndex].isReadyToFill) { 56 | Log.d("","queueAudio,accept ,targetIndex" + targetIndex); 57 | System.arraycopy(rawAudioFrame, 0, orignAudioBuffs[targetIndex].buff, 0, mediaMakerConfig.audioRecoderBufferSize); 58 | orignAudioBuffs[targetIndex].isReadyToFill = false; 59 | lastAudioQueueBuffIndex = targetIndex; 60 | audioFilterHandler.sendMessage(audioFilterHandler.obtainMessage(AudioFilterHandler.WHAT_INCOMING_BUFF, targetIndex, 0)); 61 | } else { 62 | Log.d("","queueAudio,abandon,targetIndex" + targetIndex); 63 | } 64 | } 65 | 66 | public boolean prepare(RecordConfig resConfig) { 67 | synchronized (syncOp) { 68 | mediaMakerConfig.mediacodecAACProfile = MediaCodecInfo.CodecProfileLevel.AACObjectLC; 69 | mediaMakerConfig.mediacodecAACSampleRate = 44100; 70 | mediaMakerConfig.mediacodecAACChannelCount = 1; 71 | mediaMakerConfig.mediacodecAACBitRate = 32 * 1024; 72 | mediaMakerConfig.mediacodecAACMaxInputSize = 8820; 73 | 74 | dstAudioFormat = new MediaFormat(); 75 | dstAudioEncoder = MediaCodecHelper.createAudioMediaCodec(mediaMakerConfig, dstAudioFormat); 76 | if (dstAudioEncoder == null) { 77 | Log.e("","create Audio MediaCodec failed"); 78 | return false; 79 | } 80 | //audio 81 | //44100/10=4410,4410*2 = 8820 82 | int audioQueueNum = mediaMakerConfig.audioBufferQueueNum; 83 | int orignAudioBuffSize = mediaMakerConfig.mediacodecAACSampleRate / 5; 84 | orignAudioBuffs = new AudioBuff[audioQueueNum]; 85 | for (int i = 0; i < audioQueueNum; i++) { 86 | orignAudioBuffs[i] = new AudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize); 87 | } 88 | orignAudioBuff = new AudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize); 89 | filteredAudioBuff = new AudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize); 90 | return true; 91 | } 92 | } 93 | 94 | public void startRecording(MediaMuxerWrapper muxer) { 95 | synchronized (syncOp) { 96 | try { 97 | for (AudioBuff buff : orignAudioBuffs) { 98 | buff.isReadyToFill = true; 99 | } 100 | if (dstAudioEncoder == null) { 101 | dstAudioEncoder = MediaCodec.createEncoderByType(dstAudioFormat.getString(MediaFormat.KEY_MIME)); 102 | } 103 | dstAudioEncoder.configure(dstAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 104 | dstAudioEncoder.start(); 105 | lastAudioQueueBuffIndex = 0; 106 | audioFilterHandlerThread = new HandlerThread("audioFilterHandlerThread"); 107 | audioSenderThread = new AudioSenderThread("AudioSenderThread", dstAudioEncoder, muxer); 108 | audioFilterHandlerThread.start(); 109 | audioSenderThread.start(); 110 | audioFilterHandler = new AudioFilterHandler(audioFilterHandlerThread.getLooper()); 111 | } catch (Exception e) { 112 | e.printStackTrace(); 113 | } 114 | } 115 | } 116 | 117 | public void stop() { 118 | synchronized (syncOp) { 119 | audioFilterHandler.removeCallbacksAndMessages(null); 120 | if (audioFilterHandlerThread != null) { 121 | audioFilterHandlerThread.quit(); 122 | } 123 | try { 124 | if (audioFilterHandlerThread != null) { 125 | audioFilterHandlerThread.join(); 126 | } 127 | if(audioSenderThread != null) { 128 | audioSenderThread.quit(); 129 | audioSenderThread.join(); 130 | } 131 | } catch (InterruptedException e) { 132 | e.printStackTrace(); 133 | } 134 | audioFilterHandlerThread = null; 135 | audioSenderThread = null; 136 | if (dstAudioEncoder != null) { 137 | dstAudioEncoder.stop(); 138 | dstAudioEncoder.release(); 139 | dstAudioEncoder = null; 140 | } 141 | } 142 | } 143 | 144 | public BaseSoftAudioFilter acquireAudioFilter() { 145 | lockAudioFilter.lock(); 146 | return audioFilter; 147 | } 148 | 149 | public void releaseAudioFilter() { 150 | lockAudioFilter.unlock(); 151 | } 152 | 153 | public void setAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) { 154 | lockAudioFilter.lock(); 155 | if (audioFilter != null) { 156 | audioFilter.onDestroy(); 157 | } 158 | audioFilter = baseSoftAudioFilter; 159 | if (audioFilter != null) { 160 | audioFilter.onInit(mediaMakerConfig.mediacodecAACSampleRate / 5); 161 | } 162 | lockAudioFilter.unlock(); 163 | } 164 | 165 | public void destroy() { 166 | synchronized (syncOp) { 167 | lockAudioFilter.lock(); 168 | if (audioFilter != null) { 169 | audioFilter.onDestroy(); 170 | } 171 | lockAudioFilter.unlock(); 172 | } 173 | } 174 | 175 | private class AudioFilterHandler extends Handler { 176 | public static final int FILTER_LOCK_TOLERATION = 3;//3ms 177 | public static final int WHAT_INCOMING_BUFF = 1; 178 | private int sequenceNum; 179 | 180 | AudioFilterHandler(Looper looper) { 181 | super(looper); 182 | sequenceNum = 0; 183 | } 184 | 185 | @Override 186 | public void handleMessage(Message msg) { 187 | if (msg.what != WHAT_INCOMING_BUFF) { 188 | return; 189 | } 190 | sequenceNum++; 191 | int targetIndex = msg.arg1; 192 | long nowTimeMs = SystemClock.uptimeMillis(); 193 | System.arraycopy(orignAudioBuffs[targetIndex].buff, 0, 194 | orignAudioBuff.buff, 0, orignAudioBuff.buff.length); 195 | orignAudioBuffs[targetIndex].isReadyToFill = true; 196 | boolean isFilterLocked = lockAudioFilter(); 197 | boolean filtered = false; 198 | if (isFilterLocked) { 199 | filtered = audioFilter.onFrame(orignAudioBuff.buff, filteredAudioBuff.buff, nowTimeMs, sequenceNum); 200 | unlockAudioFilter(); 201 | } else { 202 | System.arraycopy(orignAudioBuffs[targetIndex].buff, 0, 203 | orignAudioBuff.buff, 0, orignAudioBuff.buff.length); 204 | orignAudioBuffs[targetIndex].isReadyToFill = true; 205 | } 206 | //orignAudioBuff is ready 207 | int eibIndex = dstAudioEncoder.dequeueInputBuffer(-1); 208 | if (eibIndex >= 0) { 209 | ByteBuffer dstAudioEncoderIBuffer = dstAudioEncoder.getInputBuffers()[eibIndex]; 210 | dstAudioEncoderIBuffer.position(0); 211 | dstAudioEncoderIBuffer.put(filtered?filteredAudioBuff.buff:orignAudioBuff.buff, 0, orignAudioBuff.buff.length); 212 | dstAudioEncoder.queueInputBuffer(eibIndex, 0, orignAudioBuff.buff.length, nowTimeMs * 1000, 0); 213 | } else { 214 | Log.d("","dstAudioEncoder.dequeueInputBuffer(-1)<0"); 215 | } 216 | Log.d("","AudioFilterHandler,ProcessTime:" + (System.currentTimeMillis() - nowTimeMs)); 217 | } 218 | 219 | /** 220 | * @return ture if filter locked & filter!=null 221 | */ 222 | 223 | private boolean lockAudioFilter() { 224 | try { 225 | boolean locked = lockAudioFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS); 226 | if (locked) { 227 | if (audioFilter != null) { 228 | return true; 229 | } else { 230 | lockAudioFilter.unlock(); 231 | return false; 232 | } 233 | } else { 234 | return false; 235 | } 236 | } catch (InterruptedException e) { 237 | } 238 | return false; 239 | } 240 | 241 | private void unlockAudioFilter() { 242 | lockAudioFilter.unlock(); 243 | } 244 | } 245 | } 246 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/core/audio/AudioSenderThread.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.core.audio; 2 | 3 | import android.media.MediaCodec; 4 | import android.media.MediaFormat; 5 | import android.util.Log; 6 | 7 | import com.icechn.videorecorder.encoder.MediaMuxerWrapper; 8 | 9 | import java.lang.ref.WeakReference; 10 | import java.nio.ByteBuffer; 11 | 12 | /** 13 | * Created by lakeinchina on 26/05/16. 14 | */ 15 | public class AudioSenderThread extends Thread { 16 | private static final long WAIT_TIME = 5000;//1ms; 17 | private MediaCodec.BufferInfo eInfo; 18 | private long startTime = 0; 19 | private MediaCodec dstAudioEncoder; 20 | 21 | AudioSenderThread(String name, MediaCodec encoder, MediaMuxerWrapper muxer) { 22 | super(name); 23 | eInfo = new MediaCodec.BufferInfo(); 24 | startTime = 0; 25 | dstAudioEncoder = encoder; 26 | mWeakMuxer = new WeakReference(muxer); 27 | } 28 | 29 | private boolean shouldQuit = false; 30 | 31 | void quit() { 32 | shouldQuit = true; 33 | this.interrupt(); 34 | 35 | if (mMuxerStarted) { 36 | final MediaMuxerWrapper muxer = mWeakMuxer != null ? mWeakMuxer.get() : null; 37 | if (muxer != null) { 38 | try { 39 | muxer.stop(); 40 | } catch (final Exception e) { 41 | Log.e("AudioSenderThread", "failed stopping muxer", e); 42 | } 43 | } 44 | } 45 | } 46 | 47 | @Override 48 | public void run() { 49 | final MediaMuxerWrapper muxer = mWeakMuxer != null ? mWeakMuxer.get() : null; 50 | boolean isMuxerEnable = muxer != null; 51 | Log.w("AudioSenderThread", "muxer enable:"+isMuxerEnable); 52 | 53 | while (!shouldQuit) { 54 | int eobIndex = dstAudioEncoder.dequeueOutputBuffer(eInfo, WAIT_TIME); 55 | switch (eobIndex) { 56 | case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 57 | Log.d("","AudioSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED"); 58 | break; 59 | case MediaCodec.INFO_TRY_AGAIN_LATER: 60 | // Log.d("","AudioSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER"); 61 | break; 62 | case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 63 | Log.d("","AudioSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" + 64 | dstAudioEncoder.getOutputFormat().toString()); 65 | if (isMuxerEnable) { 66 | //addTrack 67 | final MediaFormat format = dstAudioEncoder.getOutputFormat(); // API >= 16 68 | mTrackIndex = muxer.addTrack(format); 69 | muxer.start(); 70 | mMuxerStarted = true; 71 | } 72 | break; 73 | default: 74 | Log.d("","AudioSenderThread,MediaCode,eobIndex=" + eobIndex); 75 | if (startTime == 0) { 76 | startTime = eInfo.presentationTimeUs / 1000; 77 | } 78 | /** 79 | * we send audio SpecificConfig already in INFO_OUTPUT_FORMAT_CHANGED 80 | * so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG 81 | */ 82 | if (eInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && eInfo.size != 0) { 83 | ByteBuffer realData = dstAudioEncoder.getOutputBuffers()[eobIndex]; 84 | realData.position(eInfo.offset); 85 | realData.limit(eInfo.offset + eInfo.size); 86 | if (isMuxerEnable && mMuxerStarted) { 87 | eInfo.presentationTimeUs = getPTSUs(); 88 | muxer.writeSampleData(mTrackIndex, realData, eInfo); 89 | prevOutputPTSUs = eInfo.presentationTimeUs; 90 | } 91 | } 92 | dstAudioEncoder.releaseOutputBuffer(eobIndex, false); 93 | break; 94 | } 95 | } 96 | eInfo = null; 97 | } 98 | 99 | protected WeakReference mWeakMuxer; 100 | protected int mTrackIndex; 101 | protected boolean mMuxerStarted = false; 102 | /** 103 | * previous presentationTimeUs for writing 104 | */ 105 | private long prevOutputPTSUs = 0; 106 | /** 107 | * get next encoding presentationTimeUs 108 | * @return 109 | */ 110 | protected long getPTSUs() { 111 | long result = System.nanoTime() / 1000L; 112 | // presentationTimeUs should be monotonic 113 | // otherwise muxer fail to write 114 | if (result < prevOutputPTSUs) 115 | result = (prevOutputPTSUs - result) + result; 116 | return result; 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/core/listener/IVideoChange.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.core.listener; 2 | 3 | /** 4 | * Created by lake on 22/09/16. 5 | * Librestreaming project. 6 | */ 7 | public interface IVideoChange { 8 | void onVideoSizeChanged(int width, int height); 9 | 10 | class VideoChangeRunable implements Runnable { 11 | IVideoChange videoChangeListener; 12 | int w, h; 13 | 14 | public VideoChangeRunable(IVideoChange videoChangeListener, int w, int h) { 15 | this.videoChangeListener = videoChangeListener; 16 | this.w = w; 17 | this.h = h; 18 | } 19 | 20 | @Override 21 | public void run() { 22 | if (videoChangeListener != null) { 23 | videoChangeListener.onVideoSizeChanged(w, h); 24 | } 25 | } 26 | } 27 | } -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/core/video/IVideoCore.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.core.video; 2 | 3 | import android.graphics.SurfaceTexture; 4 | 5 | import com.icechn.videorecorder.core.listener.IVideoChange; 6 | import com.icechn.videorecorder.encoder.MediaMuxerWrapper; 7 | import com.icechn.videorecorder.model.RecordConfig; 8 | 9 | /** 10 | * Created by lake on 16-5-25. 11 | */ 12 | public interface IVideoCore { 13 | int OVERWATCH_TEXTURE_ID = 10; 14 | boolean prepare(RecordConfig resConfig); 15 | 16 | void updateCamTexture(SurfaceTexture camTex); 17 | 18 | void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight); 19 | 20 | void updatePreview(int visualWidth, int visualHeight); 21 | 22 | void stopPreview(boolean releaseTexture); 23 | 24 | boolean startRecording(MediaMuxerWrapper muxer); 25 | 26 | boolean stopRecording(); 27 | 28 | boolean destroy(); 29 | 30 | void setCurrentCamera(int cameraIndex); 31 | 32 | void setVideoChangeListener(IVideoChange listener); 33 | } 34 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/core/video/VideoSenderThread.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.core.video; 2 | 3 | import android.media.MediaCodec; 4 | import android.media.MediaFormat; 5 | import android.util.Log; 6 | 7 | import com.icechn.videorecorder.encoder.MediaMuxerWrapper; 8 | 9 | import java.lang.ref.WeakReference; 10 | import java.nio.ByteBuffer; 11 | 12 | /** 13 | * Created by lakeinchina on 26/05/16. 14 | */ 15 | public class VideoSenderThread extends Thread { 16 | private static final long WAIT_TIME = 5000; 17 | private MediaCodec.BufferInfo eInfo; 18 | private long startTime = 0; 19 | private MediaCodec dstVideoEncoder; 20 | private final Object syncDstVideoEncoder = new Object(); 21 | 22 | VideoSenderThread(String name, MediaCodec encoder, MediaMuxerWrapper muxer) { 23 | super(name); 24 | eInfo = new MediaCodec.BufferInfo(); 25 | startTime = 0; 26 | dstVideoEncoder = encoder; 27 | mWeakMuxer = new WeakReference(muxer); 28 | } 29 | 30 | public void updateMediaCodec(MediaCodec encoder) { 31 | synchronized (syncDstVideoEncoder) { 32 | dstVideoEncoder = encoder; 33 | } 34 | } 35 | 36 | private boolean shouldQuit = false; 37 | 38 | void quit() { 39 | shouldQuit = true; 40 | this.interrupt(); 41 | if (mMuxerStarted) { 42 | final MediaMuxerWrapper muxer = mWeakMuxer != null ? mWeakMuxer.get() : null; 43 | if (muxer != null) { 44 | try { 45 | muxer.stop(); 46 | } catch (final Exception e) { 47 | Log.e("VideoSenderThread", "failed stopping muxer", e); 48 | } 49 | } 50 | } 51 | } 52 | 53 | @Override 54 | public void run() { 55 | final MediaMuxerWrapper muxer = mWeakMuxer != null ? mWeakMuxer.get() : null; 56 | boolean isMuxerEnable = muxer != null; 57 | Log.w("VideoSenderThread", "muxer enable:"+isMuxerEnable); 58 | while (!shouldQuit) { 59 | synchronized (syncDstVideoEncoder) { 60 | int eobIndex = MediaCodec.INFO_TRY_AGAIN_LATER; 61 | try { 62 | eobIndex = dstVideoEncoder.dequeueOutputBuffer(eInfo, WAIT_TIME); 63 | } catch (Exception ignored) { 64 | } 65 | switch (eobIndex) { 66 | case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 67 | Log.d("","VideoSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED"); 68 | break; 69 | case MediaCodec.INFO_TRY_AGAIN_LATER: 70 | // Log.d("","VideoSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER"); 71 | break; 72 | case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 73 | Log.d("","VideoSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" + 74 | dstVideoEncoder.getOutputFormat().toString()); 75 | if (isMuxerEnable) { 76 | //addTrack 77 | final MediaFormat format = dstVideoEncoder.getOutputFormat(); // API >= 16 78 | mTrackIndex = muxer.addTrack(format); 79 | muxer.start(); 80 | mMuxerStarted = true; 81 | } 82 | break; 83 | default: 84 | Log.d("","VideoSenderThread,MediaCode,eobIndex=" + eobIndex); 85 | if (startTime == 0) { 86 | startTime = eInfo.presentationTimeUs / 1000; 87 | } 88 | /** 89 | * we send sps pps already in INFO_OUTPUT_FORMAT_CHANGED 90 | * so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG 91 | */ 92 | if (eInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && eInfo.size != 0) { 93 | ByteBuffer realData = dstVideoEncoder.getOutputBuffers()[eobIndex]; 94 | realData.position(eInfo.offset + 4); 95 | realData.limit(eInfo.offset + eInfo.size); 96 | if (isMuxerEnable && mMuxerStarted) { 97 | eInfo.presentationTimeUs = getPTSUs(); 98 | muxer.writeSampleData(mTrackIndex, realData, eInfo); 99 | prevOutputPTSUs = eInfo.presentationTimeUs; 100 | } 101 | } 102 | dstVideoEncoder.releaseOutputBuffer(eobIndex, false); 103 | break; 104 | } 105 | } 106 | try { 107 | sleep(5); 108 | } catch (InterruptedException ignored) { 109 | } 110 | } 111 | eInfo = null; 112 | } 113 | 114 | protected WeakReference mWeakMuxer; 115 | protected int mTrackIndex; 116 | protected boolean mMuxerStarted = false; 117 | /** 118 | * previous presentationTimeUs for writing 119 | */ 120 | private long prevOutputPTSUs = 0; 121 | /** 122 | * get next encoding presentationTimeUs 123 | * @return 124 | */ 125 | protected long getPTSUs() { 126 | long result = System.nanoTime() / 1000L; 127 | // presentationTimeUs should be monotonic 128 | // otherwise muxer fail to write 129 | if (result < prevOutputPTSUs) 130 | result = (prevOutputPTSUs - result) + result; 131 | return result; 132 | } 133 | } -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/demo/App.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.demo; 2 | 3 | import android.app.Application; 4 | 5 | /** 6 | * Created by ICE on 2017/10/12. 7 | */ 8 | 9 | public class App extends Application { 10 | @Override 11 | public void onCreate() { 12 | super.onCreate(); 13 | _app = this; 14 | } 15 | public static Application getApp() { 16 | return _app; 17 | } 18 | private static Application _app; 19 | } 20 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/demo/MainActivity.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.demo; 2 | 3 | import android.Manifest; 4 | import android.content.Intent; 5 | import android.content.pm.PackageManager; 6 | import android.os.Bundle; 7 | import android.support.annotation.Nullable; 8 | import android.support.design.widget.Snackbar; 9 | import android.support.v4.app.ActivityCompat; 10 | import android.support.v7.app.AppCompatActivity; 11 | import android.view.View; 12 | import android.widget.CheckBox; 13 | import android.widget.CompoundButton; 14 | import android.widget.CompoundButton.OnCheckedChangeListener; 15 | 16 | import com.icechn.videorecorder.R; 17 | import com.icechn.videorecorder.ui.RecordingActivity; 18 | import com.icechn.videorecorder.ui.RecordingActivity2; 19 | 20 | 21 | public class MainActivity extends AppCompatActivity { 22 | private static final int REQUEST_STREAM = 1; 23 | private static String[] PERMISSIONS_STREAM = { 24 | Manifest.permission.CAMERA, 25 | Manifest.permission.RECORD_AUDIO, 26 | Manifest.permission.WRITE_EXTERNAL_STORAGE 27 | }; 28 | 29 | boolean authorized = false; 30 | boolean continuedRecord = false; 31 | 32 | @Override 33 | protected void onCreate(@Nullable Bundle savedInstanceState) { 34 | super.onCreate(savedInstanceState); 35 | setContentView(R.layout.activity_main); 36 | continuedRecord = ((CheckBox)findViewById(R.id.ck_duandian)).isChecked(); 37 | ((CheckBox)findViewById(R.id.ck_duandian)).setOnCheckedChangeListener(new OnCheckedChangeListener() { 38 | @Override 39 | public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { 40 | continuedRecord = isChecked; 41 | } 42 | }); 43 | findViewById(R.id.btn_record).setOnClickListener(new View.OnClickListener() { 44 | @Override 45 | public void onClick(View v) { 46 | if (authorized) { 47 | start(false); 48 | } else { 49 | Snackbar.make(MainActivity.this.getWindow().getDecorView().getRootView(), "streaming need permissions!", Snackbar.LENGTH_LONG) 50 | .setAction("auth", new View.OnClickListener() { 51 | @Override 52 | public void onClick(View v) { 53 | verifyPermissions(); 54 | } 55 | }).show(); 56 | } 57 | } 58 | }); 59 | findViewById(R.id.btn_record_squqre).setOnClickListener(new View.OnClickListener() { 60 | @Override 61 | public void onClick(View v) { 62 | if (authorized) { 63 | start(true); 64 | } else { 65 | Snackbar.make(MainActivity.this.getWindow().getDecorView().getRootView(), "streaming need permissions!", Snackbar.LENGTH_LONG) 66 | .setAction("auth", new View.OnClickListener() { 67 | @Override 68 | public void onClick(View v) { 69 | verifyPermissions(); 70 | } 71 | }).show(); 72 | } 73 | } 74 | }); 75 | verifyPermissions(); 76 | } 77 | 78 | private void start(boolean isSquare) { 79 | Intent intent; 80 | if (continuedRecord) { 81 | intent = new Intent(MainActivity.this, RecordingActivity2.class); 82 | } else { 83 | intent = new Intent(MainActivity.this, RecordingActivity.class); 84 | } 85 | 86 | intent.putExtra(RecordingActivity.IS_SQUARE, isSquare); 87 | startActivity(intent); 88 | } 89 | 90 | public void verifyPermissions() { 91 | int CAMERA_permission = ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA); 92 | int RECORD_AUDIO_permission = ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.RECORD_AUDIO); 93 | int WRITE_EXTERNAL_STORAGE_permission = ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.WRITE_EXTERNAL_STORAGE); 94 | if (CAMERA_permission != PackageManager.PERMISSION_GRANTED || 95 | RECORD_AUDIO_permission != PackageManager.PERMISSION_GRANTED || 96 | WRITE_EXTERNAL_STORAGE_permission != PackageManager.PERMISSION_GRANTED) { 97 | ActivityCompat.requestPermissions( 98 | MainActivity.this, 99 | PERMISSIONS_STREAM, 100 | REQUEST_STREAM 101 | ); 102 | authorized = false; 103 | } else { 104 | authorized = true; 105 | } 106 | } 107 | 108 | @Override 109 | public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { 110 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 111 | if (requestCode == REQUEST_STREAM) { 112 | if (grantResults[0] == PackageManager.PERMISSION_GRANTED && 113 | grantResults[1] == PackageManager.PERMISSION_GRANTED && 114 | grantResults[2] == PackageManager.PERMISSION_GRANTED) { 115 | authorized = true; 116 | } 117 | } 118 | } 119 | } -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/encoder/MediaMuxerWrapper.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.encoder; 2 | 3 | import android.media.MediaCodec; 4 | import android.media.MediaFormat; 5 | import android.media.MediaMuxer; 6 | import android.util.Log; 7 | 8 | import com.icechn.videorecorder.BuildConfig; 9 | 10 | import java.io.IOException; 11 | import java.nio.ByteBuffer; 12 | 13 | public class MediaMuxerWrapper { 14 | private static final boolean DEBUG = BuildConfig.DEBUG; 15 | private static final String TAG = "MediaMuxerWrapper"; 16 | 17 | private final MediaMuxer mMediaMuxer; 18 | private int mEncoderCount = 0; 19 | private int mStartedCount = 0; 20 | private boolean mIsStarted = false; 21 | 22 | public MediaMuxerWrapper(String outputPath) throws IOException { 23 | mMediaMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 24 | mEncoderCount = 0; 25 | mStartedCount = 0; 26 | mIsStarted = false; 27 | } 28 | public void setTrackCount(int count) { 29 | mEncoderCount = count; 30 | } 31 | 32 | //********************************************************************** 33 | //********************************************************************** 34 | 35 | /** 36 | * request start recording from encoder 37 | * @return true when muxer is ready to write 38 | */ 39 | public synchronized boolean start() { 40 | if (DEBUG) { 41 | Log.v(TAG, "start:"); 42 | } 43 | mStartedCount++; 44 | if ((mEncoderCount > 0) && (mStartedCount == mEncoderCount)) { 45 | mMediaMuxer.start(); 46 | mIsStarted = true; 47 | notifyAll(); 48 | if (DEBUG) { 49 | Log.v(TAG, "MediaMuxer started:"); 50 | } 51 | } 52 | return mIsStarted; 53 | } 54 | 55 | /** 56 | * request stop recording from encoder when encoder received EOS 57 | */ 58 | public synchronized void stop() { 59 | if (DEBUG) { 60 | Log.v(TAG, "stop:mStartedCount=" + mStartedCount); 61 | } 62 | if (!mIsStarted) { 63 | Log.d("","not started"); 64 | return ; 65 | } 66 | mStartedCount--; 67 | if ((mEncoderCount > 0) && (mStartedCount <= 0)) { 68 | mMediaMuxer.stop(); 69 | mMediaMuxer.release(); 70 | mIsStarted = false; 71 | if (DEBUG) { 72 | Log.v(TAG, "MediaMuxer stopped:"); 73 | } 74 | } 75 | } 76 | 77 | /** 78 | * assign encoder to muxer 79 | * @param format 80 | * @return minus value indicate error 81 | */ 82 | public synchronized int addTrack(final MediaFormat format) { 83 | if (mIsStarted) 84 | throw new IllegalStateException("muxer already started"); 85 | final int trackIx = mMediaMuxer.addTrack(format); 86 | if (DEBUG) { 87 | Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format); 88 | } 89 | return trackIx; 90 | } 91 | 92 | /** 93 | * write encoded data to muxer 94 | * @param trackIndex 95 | * @param byteBuf 96 | * @param bufferInfo 97 | */ 98 | public synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) { 99 | if (!mIsStarted) { 100 | return ; 101 | } 102 | if (mStartedCount > 0) { 103 | mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo); 104 | } 105 | } 106 | 107 | } 108 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/gpuimage/GPUImageColorInvertFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.gpuimage; 2 | 3 | /** 4 | * Created by ICE on 2018/2/2. 5 | */ 6 | 7 | public class GPUImageColorInvertFilter extends GPUImageFilter { 8 | public static final String COLOR_INVERT_FRAGMENT_SHADER = "" + 9 | "varying highp vec2 textureCoordinate;\n" + 10 | "\n" + 11 | "uniform sampler2D inputImageTexture;\n" + 12 | "\n" + 13 | "void main()\n" + 14 | "{\n" + 15 | " lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n" + 16 | " \n" + 17 | " gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);\n" + 18 | "}"; 19 | 20 | public GPUImageColorInvertFilter() { 21 | super(NO_FILTER_VERTEX_SHADER, COLOR_INVERT_FRAGMENT_SHADER); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/gpuimage/GPUImageCompatibleFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.gpuimage; 2 | 3 | import android.opengl.GLES20; 4 | 5 | 6 | import com.icechn.videorecorder.filter.hardvideofilter.BaseHardVideoFilter; 7 | import com.icechn.videorecorder.model.MediaConfig; 8 | import com.icechn.videorecorder.tools.GLESTools; 9 | 10 | import java.nio.ByteBuffer; 11 | import java.nio.ByteOrder; 12 | import java.nio.FloatBuffer; 13 | 14 | /** 15 | * Created by ICE on 2018/2/2. 16 | */ 17 | 18 | public class GPUImageCompatibleFilter extends BaseHardVideoFilter { 19 | private T innerGPUImageFilter; 20 | 21 | private FloatBuffer innerShapeBuffer; 22 | private FloatBuffer innerTextureBuffer; 23 | 24 | public GPUImageCompatibleFilter(T filter) { 25 | innerGPUImageFilter = filter; 26 | } 27 | 28 | public T getGPUImageFilter() { 29 | return innerGPUImageFilter; 30 | } 31 | 32 | @Override 33 | public void onInit(int VWidth, int VHeight) { 34 | super.onInit(VWidth, VHeight); 35 | innerGPUImageFilter.init(); 36 | innerGPUImageFilter.onOutputSizeChanged(VWidth, VHeight); 37 | } 38 | 39 | @Override 40 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) { 41 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 42 | innerGPUImageFilter.onDraw(cameraTexture, innerShapeBuffer, innerTextureBuffer); 43 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 44 | } 45 | 46 | @Override 47 | public void onDestroy() { 48 | super.onDestroy(); 49 | innerGPUImageFilter.destroy(); 50 | } 51 | 52 | @Override 53 | public void onDirectionUpdate(int _directionFlag) { 54 | if (directionFlag != _directionFlag) { 55 | innerShapeBuffer = getGPUImageCompatShapeVerticesBuffer(); 56 | innerTextureBuffer = getGPUImageCompatTextureVerticesBuffer(directionFlag); 57 | } 58 | } 59 | 60 | public static final float TEXTURE_NO_ROTATION[] = { 61 | 1.0f, 1.0f, 62 | 0.0f, 1.0f, 63 | 1.0f, 0.0f, 64 | 0.0f, 0.0f, 65 | }; 66 | 67 | public static final float TEXTURE_ROTATED_90[] = { 68 | 0.0f, 1.0f, 69 | 0.0f, 0.0f, 70 | 1.0f, 1.0f, 71 | 1.0f, 0.0f, 72 | }; 73 | public static final float TEXTURE_ROTATED_180[] = { 74 | 0.0f, 0.0f, 75 | 1.0f, 0.0f, 76 | 0.0f, 1.0f, 77 | 1.0f, 1.0f, 78 | }; 79 | public static final float TEXTURE_ROTATED_270[] = { 80 | 1.0f, 0.0f, 81 | 1.0f, 1.0f, 82 | 0.0f, 0.0f, 83 | 0.0f, 1.0f, 84 | }; 85 | static final float CUBE[] = { 86 | -1.0f, -1.0f, 87 | 1.0f, -1.0f, 88 | -1.0f, 1.0f, 89 | 1.0f, 1.0f, 90 | }; 91 | 92 | public static FloatBuffer getGPUImageCompatShapeVerticesBuffer() { 93 | FloatBuffer result = ByteBuffer.allocateDirect(GLESTools.FLOAT_SIZE_BYTES * CUBE.length). 94 | order(ByteOrder.nativeOrder()). 95 | asFloatBuffer(); 96 | result.put(CUBE); 97 | result.position(0); 98 | return result; 99 | } 100 | 101 | public static FloatBuffer getGPUImageCompatTextureVerticesBuffer(final int directionFlag) { 102 | float[] buffer; 103 | switch (directionFlag & 0xF0) { 104 | case MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90: 105 | buffer = TEXTURE_ROTATED_90.clone(); 106 | break; 107 | case MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180: 108 | buffer = TEXTURE_ROTATED_180.clone(); 109 | break; 110 | case MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270: 111 | buffer = TEXTURE_ROTATED_270.clone(); 112 | break; 113 | default: 114 | buffer = TEXTURE_NO_ROTATION.clone(); 115 | } 116 | if ((directionFlag & MediaConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL) != 0) { 117 | buffer[0] = flip(buffer[0]); 118 | buffer[2] = flip(buffer[2]); 119 | buffer[4] = flip(buffer[4]); 120 | buffer[6] = flip(buffer[6]); 121 | } 122 | if ((directionFlag & MediaConfig.DirectionMode.FLAG_DIRECTION_FLIP_VERTICAL) != 0) { 123 | buffer[1] = flip(buffer[1]); 124 | buffer[3] = flip(buffer[3]); 125 | buffer[5] = flip(buffer[5]); 126 | buffer[7] = flip(buffer[7]); 127 | } 128 | FloatBuffer result = ByteBuffer.allocateDirect(GLESTools.FLOAT_SIZE_BYTES * buffer.length). 129 | order(ByteOrder.nativeOrder()). 130 | asFloatBuffer(); 131 | result.put(buffer); 132 | result.position(0); 133 | return result; 134 | } 135 | 136 | private static float flip(final float i) { 137 | return i == 0.0f ? 1.0f : 0.0f; 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/gpuimage/GPUImageFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.gpuimage; 2 | 3 | import android.content.Context; 4 | import android.content.res.AssetManager; 5 | import android.graphics.PointF; 6 | import android.opengl.GLES20; 7 | 8 | 9 | import com.icechn.videorecorder.tools.GLESTools; 10 | 11 | import java.io.InputStream; 12 | import java.nio.FloatBuffer; 13 | import java.util.LinkedList; 14 | 15 | /** 16 | * Created by ICE on 2018/2/2. 17 | */ 18 | 19 | public class GPUImageFilter { 20 | public static final String NO_FILTER_VERTEX_SHADER = "" + 21 | "attribute vec4 position;\n" + 22 | "attribute vec4 inputTextureCoordinate;\n" + 23 | " \n" + 24 | "varying vec2 textureCoordinate;\n" + 25 | " \n" + 26 | "void main()\n" + 27 | "{\n" + 28 | " gl_Position = position;\n" + 29 | " textureCoordinate = inputTextureCoordinate.xy;\n" + 30 | "}"; 31 | public static final String NO_FILTER_FRAGMENT_SHADER = "" + 32 | "varying highp vec2 textureCoordinate;\n" + 33 | " \n" + 34 | "uniform sampler2D inputImageTexture;\n" + 35 | " \n" + 36 | "void main()\n" + 37 | "{\n" + 38 | " gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n" + 39 | "}"; 40 | 41 | private final LinkedList mRunOnDraw; 42 | private final String mVertexShader; 43 | private final String mFragmentShader; 44 | protected int mGLProgId; 45 | protected int mGLAttribPosition; 46 | protected int mGLUniformTexture; 47 | protected int mGLAttribTextureCoordinate; 48 | protected int mOutputWidth; 49 | protected int mOutputHeight; 50 | private boolean mIsInitialized; 51 | 52 | public GPUImageFilter() { 53 | this(NO_FILTER_VERTEX_SHADER, NO_FILTER_FRAGMENT_SHADER); 54 | } 55 | 56 | public GPUImageFilter(final String vertexShader, final String fragmentShader) { 57 | mRunOnDraw = new LinkedList(); 58 | mVertexShader = vertexShader; 59 | mFragmentShader = fragmentShader; 60 | } 61 | 62 | public final void init() { 63 | onInit(); 64 | mIsInitialized = true; 65 | onInitialized(); 66 | } 67 | 68 | public void onInit() { 69 | mGLProgId = GLESTools.createProgram(mVertexShader, mFragmentShader); 70 | mGLAttribPosition = GLES20.glGetAttribLocation(mGLProgId, "position"); 71 | mGLUniformTexture = GLES20.glGetUniformLocation(mGLProgId, "inputImageTexture"); 72 | mGLAttribTextureCoordinate = GLES20.glGetAttribLocation(mGLProgId, 73 | "inputTextureCoordinate"); 74 | mIsInitialized = true; 75 | } 76 | 77 | public void onInitialized() { 78 | } 79 | 80 | public final void destroy() { 81 | mIsInitialized = false; 82 | GLES20.glDeleteProgram(mGLProgId); 83 | onDestroy(); 84 | } 85 | 86 | public void onDestroy() { 87 | } 88 | 89 | public void onOutputSizeChanged(final int width, final int height) { 90 | mOutputWidth = width; 91 | mOutputHeight = height; 92 | } 93 | 94 | public void onDraw(final int textureId, final FloatBuffer cubeBuffer, 95 | final FloatBuffer textureBuffer) { 96 | GLES20.glUseProgram(mGLProgId); 97 | runPendingOnDrawTasks(); 98 | if (!mIsInitialized) { 99 | return; 100 | } 101 | 102 | cubeBuffer.position(0); 103 | GLES20.glVertexAttribPointer(mGLAttribPosition, 2, GLES20.GL_FLOAT, false, 0, cubeBuffer); 104 | GLES20.glEnableVertexAttribArray(mGLAttribPosition); 105 | textureBuffer.position(0); 106 | GLES20.glVertexAttribPointer(mGLAttribTextureCoordinate, 2, GLES20.GL_FLOAT, false, 0, 107 | textureBuffer); 108 | GLES20.glEnableVertexAttribArray(mGLAttribTextureCoordinate); 109 | if (textureId != GLESTools.NO_TEXTURE) { 110 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 111 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); 112 | GLES20.glUniform1i(mGLUniformTexture, 0); 113 | } 114 | onDrawArraysPre(); 115 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 116 | GLES20.glDisableVertexAttribArray(mGLAttribPosition); 117 | GLES20.glDisableVertexAttribArray(mGLAttribTextureCoordinate); 118 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 119 | } 120 | 121 | protected void onDrawArraysPre() {} 122 | 123 | protected void runPendingOnDrawTasks() { 124 | while (!mRunOnDraw.isEmpty()) { 125 | mRunOnDraw.removeFirst().run(); 126 | } 127 | } 128 | 129 | public boolean isInitialized() { 130 | return mIsInitialized; 131 | } 132 | 133 | public int getOutputWidth() { 134 | return mOutputWidth; 135 | } 136 | 137 | public int getOutputHeight() { 138 | return mOutputHeight; 139 | } 140 | 141 | public int getProgram() { 142 | return mGLProgId; 143 | } 144 | 145 | public int getAttribPosition() { 146 | return mGLAttribPosition; 147 | } 148 | 149 | public int getAttribTextureCoordinate() { 150 | return mGLAttribTextureCoordinate; 151 | } 152 | 153 | public int getUniformTexture() { 154 | return mGLUniformTexture; 155 | } 156 | 157 | protected void setInteger(final int location, final int intValue) { 158 | runOnDraw(new Runnable() { 159 | @Override 160 | public void run() { 161 | GLES20.glUniform1i(location, intValue); 162 | } 163 | }); 164 | } 165 | 166 | protected void setFloat(final int location, final float floatValue) { 167 | runOnDraw(new Runnable() { 168 | @Override 169 | public void run() { 170 | GLES20.glUniform1f(location, floatValue); 171 | } 172 | }); 173 | } 174 | 175 | protected void setFloatVec2(final int location, final float[] arrayValue) { 176 | runOnDraw(new Runnable() { 177 | @Override 178 | public void run() { 179 | GLES20.glUniform2fv(location, 1, FloatBuffer.wrap(arrayValue)); 180 | } 181 | }); 182 | } 183 | 184 | protected void setFloatVec3(final int location, final float[] arrayValue) { 185 | runOnDraw(new Runnable() { 186 | @Override 187 | public void run() { 188 | GLES20.glUniform3fv(location, 1, FloatBuffer.wrap(arrayValue)); 189 | } 190 | }); 191 | } 192 | 193 | protected void setFloatVec4(final int location, final float[] arrayValue) { 194 | runOnDraw(new Runnable() { 195 | @Override 196 | public void run() { 197 | GLES20.glUniform4fv(location, 1, FloatBuffer.wrap(arrayValue)); 198 | } 199 | }); 200 | } 201 | 202 | protected void setFloatArray(final int location, final float[] arrayValue) { 203 | runOnDraw(new Runnable() { 204 | @Override 205 | public void run() { 206 | GLES20.glUniform1fv(location, arrayValue.length, FloatBuffer.wrap(arrayValue)); 207 | } 208 | }); 209 | } 210 | 211 | protected void setPoint(final int location, final PointF point) { 212 | runOnDraw(new Runnable() { 213 | 214 | @Override 215 | public void run() { 216 | float[] vec2 = new float[2]; 217 | vec2[0] = point.x; 218 | vec2[1] = point.y; 219 | GLES20.glUniform2fv(location, 1, vec2, 0); 220 | } 221 | }); 222 | } 223 | 224 | protected void setUniformMatrix3f(final int location, final float[] matrix) { 225 | runOnDraw(new Runnable() { 226 | 227 | @Override 228 | public void run() { 229 | GLES20.glUniformMatrix3fv(location, 1, false, matrix, 0); 230 | } 231 | }); 232 | } 233 | 234 | protected void setUniformMatrix4f(final int location, final float[] matrix) { 235 | runOnDraw(new Runnable() { 236 | 237 | @Override 238 | public void run() { 239 | GLES20.glUniformMatrix4fv(location, 1, false, matrix, 0); 240 | } 241 | }); 242 | } 243 | 244 | protected void runOnDraw(final Runnable runnable) { 245 | synchronized (mRunOnDraw) { 246 | mRunOnDraw.addLast(runnable); 247 | } 248 | } 249 | 250 | public static String loadShader(String file, Context context) { 251 | try { 252 | AssetManager assetManager = context.getAssets(); 253 | InputStream ims = assetManager.open(file); 254 | 255 | String re = convertStreamToString(ims); 256 | ims.close(); 257 | return re; 258 | } catch (Exception e) { 259 | e.printStackTrace(); 260 | } 261 | 262 | return ""; 263 | } 264 | 265 | public static String convertStreamToString(InputStream is) { 266 | java.util.Scanner s = new java.util.Scanner(is).useDelimiter("\\A"); 267 | return s.hasNext() ? s.next() : ""; 268 | } 269 | } 270 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/BaseHardVideoFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import com.icechn.videorecorder.core.GLHelper; 4 | import com.icechn.videorecorder.model.Size; 5 | 6 | import java.nio.FloatBuffer; 7 | import java.nio.ShortBuffer; 8 | 9 | /** 10 | * Created by lake on 16-5-31. 11 | */ 12 | public class BaseHardVideoFilter { 13 | protected int outVideoWidth; 14 | protected int outVideoHeight; 15 | protected int directionFlag=-1; 16 | protected ShortBuffer drawIndecesBuffer; 17 | 18 | public void onInit(int videoWidth, int videoHeight) { 19 | outVideoWidth = videoWidth; 20 | outVideoHeight = videoHeight; 21 | drawIndecesBuffer = GLHelper.getDrawIndecesBuffer(); 22 | } 23 | 24 | public void onDraw(final int cameraTexture, final int targetFrameBuffer, final FloatBuffer shapeBuffer, final FloatBuffer textrueBuffer) { 25 | } 26 | 27 | public void onDestroy() { 28 | 29 | } 30 | 31 | public void onDirectionUpdate(int _directionFlag) { 32 | this.directionFlag = _directionFlag; 33 | } 34 | 35 | protected int previewWidth;//横屏 36 | protected int previewHeight;//横屏 37 | protected Size previewSize; 38 | public void updatePreviewSize(int width, int height) { 39 | previewWidth = width; 40 | previewHeight = height; 41 | previewSize = new Size(width, height); 42 | } 43 | 44 | protected boolean isSquare; 45 | public void updateSquareFlag(boolean isSquare) { 46 | this.isSquare = isSquare; 47 | } 48 | 49 | protected float mCropRatio = 0; 50 | public void updateCropRatio(float cropRatio) { 51 | mCropRatio = cropRatio; 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/GaussianBlurHardFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES11Ext; 5 | import android.opengl.GLES20; 6 | 7 | import com.icechn.videorecorder.tools.GLESTools; 8 | 9 | import java.nio.FloatBuffer; 10 | 11 | /** 12 | * Created by lake on 20/02/17. 13 | * libREStreaming project. 14 | */ 15 | 16 | public class GaussianBlurHardFilter extends BaseHardVideoFilter { 17 | private int blurRadius; 18 | protected int glProgram; 19 | protected int glTextureLoc; 20 | protected int glCamPostionLoc; 21 | protected int glCamTextureCoordLoc; 22 | protected int glStepLoc; 23 | protected int glIgnoreRect; 24 | protected String vertexShader_filter = ""; 25 | protected String fragmentshader_filter = ""; 26 | 27 | public GaussianBlurHardFilter(Context context, int blurRadius) { 28 | this.blurRadius = blurRadius; 29 | this.vertexShader_filter = GLESTools.uRes(context.getResources(), "gaussian_vertex.sh"); 30 | this.fragmentshader_filter = GLESTools.uRes(context.getResources(), "gaussian_fragment.sh"); 31 | } 32 | 33 | @Override 34 | public void onInit(int videoWidth, int videoHeight) { 35 | super.onInit(videoWidth, videoHeight); 36 | String fragShader = "#define GAUSSIAN_BLUR_RADIUS "+blurRadius+".\n"+fragmentshader_filter; 37 | glProgram = GLESTools.createProgram(vertexShader_filter, fragShader); 38 | GLES20.glUseProgram(glProgram); 39 | glTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture"); 40 | glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition"); 41 | glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord"); 42 | glStepLoc = GLES20.glGetUniformLocation(glProgram,"step"); 43 | glIgnoreRect = GLES20.glGetUniformLocation(glProgram,"ignoreRect"); 44 | } 45 | 46 | @Override 47 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) { 48 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 49 | GLES20.glUseProgram(glProgram); 50 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 51 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture); 52 | GLES20.glUniform1i(glTextureLoc, 0); 53 | GLES20.glUniform2f(glStepLoc,1f/ outVideoWidth,0f); 54 | GLES20.glEnableVertexAttribArray(glCamPostionLoc); 55 | GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc); 56 | shapeBuffer.position(0); 57 | GLES20.glVertexAttribPointer(glCamPostionLoc, 2, 58 | GLES20.GL_FLOAT, false, 59 | 2 * 4, shapeBuffer); 60 | textureBuffer.position(0); 61 | GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2, 62 | GLES20.GL_FLOAT, false, 63 | 2 * 4, textureBuffer); 64 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight); 65 | GLES20.glUniform1i(glTextureLoc, 0); 66 | GLES20.glUniform2f(glStepLoc,0f,1f/ outVideoHeight); 67 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 68 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 69 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer); 70 | GLES20.glFinish(); 71 | 72 | GLES20.glDisableVertexAttribArray(glCamPostionLoc); 73 | GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc); 74 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 75 | GLES20.glUseProgram(0); 76 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 77 | } 78 | 79 | @Override 80 | public void onDestroy() { 81 | super.onDestroy(); 82 | GLES20.glDeleteProgram(glProgram); 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/HardVideoGroupFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import android.opengl.GLES20; 4 | 5 | import com.icechn.videorecorder.tools.GLESTools; 6 | 7 | import java.nio.FloatBuffer; 8 | import java.util.LinkedList; 9 | import java.util.List; 10 | 11 | /** 12 | * Created by lake on 07/06/16. 13 | */ 14 | public class HardVideoGroupFilter extends BaseHardVideoFilter { 15 | private LinkedList filterWrappers; 16 | 17 | public HardVideoGroupFilter(List filters) { 18 | if (filters == null || filters.isEmpty()) { 19 | throw new IllegalArgumentException("can not create empty GroupFilter"); 20 | } 21 | filterWrappers = new LinkedList(); 22 | for (BaseHardVideoFilter filter : filters) { 23 | filterWrappers.add(new FilterWrapper(filter)); 24 | } 25 | } 26 | 27 | @Override 28 | public void onInit(int VWidth, int VHeight) { 29 | super.onInit(VWidth, VHeight); 30 | int i = 0; 31 | for (FilterWrapper wrapper : filterWrappers) { 32 | wrapper.filter.onInit(VWidth, VHeight); 33 | int[] frameBuffer = new int[1]; 34 | int[] frameBufferTexture = new int[1]; 35 | GLESTools.createFrameBuff(frameBuffer, 36 | frameBufferTexture, 37 | outVideoWidth, 38 | outVideoHeight); 39 | wrapper.frameBuffer = frameBuffer[0]; 40 | wrapper.frameBufferTexture = frameBufferTexture[0]; 41 | i++; 42 | } 43 | } 44 | 45 | 46 | @Override 47 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) { 48 | FilterWrapper preFilterWrapper = null; 49 | int i = 0; 50 | int texture; 51 | for (FilterWrapper wrapper : filterWrappers) { 52 | if (preFilterWrapper == null) { 53 | texture = cameraTexture; 54 | } else { 55 | texture = preFilterWrapper.frameBufferTexture; 56 | } 57 | if (i == (filterWrappers.size() - 1)) { 58 | wrapper.filter.onDraw(texture, targetFrameBuffer, shapeBuffer, textrueBuffer); 59 | } else { 60 | wrapper.filter.onDraw(texture, wrapper.frameBuffer, shapeBuffer, textrueBuffer); 61 | } 62 | preFilterWrapper = wrapper; 63 | i++; 64 | } 65 | } 66 | 67 | @Override 68 | public void onDestroy() { 69 | super.onDestroy(); 70 | for (FilterWrapper wrapper : filterWrappers) { 71 | wrapper.filter.onDestroy(); 72 | GLES20.glDeleteFramebuffers(1, new int[]{wrapper.frameBuffer}, 0); 73 | GLES20.glDeleteTextures(1, new int[]{wrapper.frameBufferTexture}, 0); 74 | } 75 | } 76 | 77 | @Override 78 | public void onDirectionUpdate(int _directionFlag) { 79 | super.onDirectionUpdate(_directionFlag); 80 | for (FilterWrapper wrapper : filterWrappers) { 81 | wrapper.filter.onDirectionUpdate(_directionFlag); 82 | } 83 | } 84 | 85 | private class FilterWrapper { 86 | BaseHardVideoFilter filter; 87 | int frameBuffer; 88 | int frameBufferTexture; 89 | 90 | FilterWrapper(BaseHardVideoFilter filter) { 91 | this.filter = filter; 92 | } 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/MirrorHardVideoFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES11Ext; 5 | import android.opengl.GLES20; 6 | 7 | 8 | import com.icechn.videorecorder.tools.GLESTools; 9 | 10 | import java.nio.FloatBuffer; 11 | 12 | /** 13 | * Created by ICE on 2018/2/2. 14 | */ 15 | 16 | public class MirrorHardVideoFilter extends BaseHardVideoFilter { 17 | protected int glProgram; 18 | protected int glTextureLoc; 19 | protected int glCamPostionLoc; 20 | protected int glCamTextureCoordLoc; 21 | 22 | protected String vertexShader_filter = ""; 23 | protected String fragmentshader_filter = ""; 24 | 25 | public MirrorHardVideoFilter(Context context) { 26 | super(); 27 | this.vertexShader_filter = GLESTools.uRes(context.getResources(), "mirror_vertex.sh"); 28 | this.fragmentshader_filter = GLESTools.uRes(context.getResources(), "mirror_fragment.sh"); 29 | } 30 | 31 | @Override 32 | public void onInit(int videoWidth, int videoHeight) { 33 | super.onInit(videoWidth, videoHeight); 34 | glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter); 35 | GLES20.glUseProgram(glProgram); 36 | glTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture"); 37 | glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition"); 38 | glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord"); 39 | 40 | } 41 | 42 | @Override 43 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) { 44 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 45 | GLES20.glUseProgram(glProgram); 46 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 47 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture); 48 | GLES20.glUniform1i(glTextureLoc, 0); 49 | GLES20.glEnableVertexAttribArray(glCamPostionLoc); 50 | GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc); 51 | shapeBuffer.position(0); 52 | GLES20.glVertexAttribPointer(glCamPostionLoc, 2, 53 | GLES20.GL_FLOAT, false, 54 | 2 * 4, shapeBuffer); 55 | textureBuffer.position(0); 56 | GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2, 57 | GLES20.GL_FLOAT, false, 58 | 2 * 4, textureBuffer); 59 | 60 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight); 61 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 62 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 63 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer); 64 | GLES20.glFinish(); 65 | 66 | GLES20.glDisableVertexAttribArray(glCamPostionLoc); 67 | GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc); 68 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 69 | GLES20.glUseProgram(0); 70 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/OriginalHardVideoFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import android.opengl.GLES11Ext; 4 | import android.opengl.GLES20; 5 | 6 | 7 | import com.icechn.videorecorder.tools.GLESTools; 8 | 9 | import java.nio.FloatBuffer; 10 | 11 | /** 12 | * Created by lake on 16-5-31. 13 | */ 14 | public class OriginalHardVideoFilter extends BaseHardVideoFilter { 15 | protected int glProgram; 16 | protected int glTextureLoc; 17 | protected int glCamPostionLoc; 18 | protected int glCamTextureCoordLoc; 19 | protected String vertexShader_filter = "" + 20 | "attribute vec4 aCamPosition;\n" + 21 | "attribute vec2 aCamTextureCoord;\n" + 22 | "varying vec2 vCamTextureCoord;\n" + 23 | "void main(){\n" + 24 | " gl_Position= aCamPosition;\n" + 25 | " vCamTextureCoord = aCamTextureCoord;\n" + 26 | "}"; 27 | protected String fragmentshader_filter = "" + 28 | "precision highp float;\n" + 29 | "varying highp vec2 vCamTextureCoord;\n" + 30 | "uniform sampler2D uCamTexture;\n" + 31 | "void main(){\n" + 32 | " vec4 color = texture2D(uCamTexture, vCamTextureCoord);\n" + 33 | " gl_FragColor = color;\n" + 34 | "}"; 35 | 36 | public OriginalHardVideoFilter(String vertexShaderCode, String fragmentShaderCode) { 37 | if (vertexShaderCode != null) { 38 | vertexShader_filter = vertexShaderCode; 39 | } 40 | if (fragmentShaderCode != null) { 41 | fragmentshader_filter = fragmentShaderCode; 42 | } 43 | } 44 | 45 | @Override 46 | public void onInit(int VWidth, int VHeight) { 47 | super.onInit(VWidth, VHeight); 48 | glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter); 49 | GLES20.glUseProgram(glProgram); 50 | glTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture"); 51 | glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition"); 52 | glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord"); 53 | } 54 | 55 | 56 | @Override 57 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) { 58 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 59 | GLES20.glUseProgram(glProgram); 60 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 61 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture); 62 | GLES20.glUniform1i(glTextureLoc, 0); 63 | GLES20.glEnableVertexAttribArray(glCamPostionLoc); 64 | GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc); 65 | shapeBuffer.position(0); 66 | GLES20.glVertexAttribPointer(glCamPostionLoc, 2, 67 | GLES20.GL_FLOAT, false, 68 | 2 * 4, shapeBuffer); 69 | textrueBuffer.position(0); 70 | GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2, 71 | GLES20.GL_FLOAT, false, 72 | 2 * 4, textrueBuffer); 73 | onPreDraw(); 74 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight); 75 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 76 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 77 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer); 78 | GLES20.glFinish(); 79 | onAfterDraw(); 80 | GLES20.glDisableVertexAttribArray(glCamPostionLoc); 81 | GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc); 82 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 83 | GLES20.glUseProgram(0); 84 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 85 | } 86 | 87 | protected void onPreDraw() { 88 | 89 | } 90 | 91 | protected void onAfterDraw() { 92 | 93 | } 94 | 95 | @Override 96 | public void onDestroy() { 97 | super.onDestroy(); 98 | GLES20.glDeleteProgram(glProgram); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/SkinBlurHardVideoFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES20; 5 | 6 | import com.icechn.videorecorder.tools.GLESTools; 7 | 8 | 9 | /** 10 | * 11 | * sigma = 0.1.Optimized Selective Gaussian Blur. 12 | */ 13 | public class SkinBlurHardVideoFilter extends OriginalHardVideoFilter { 14 | private int xStepLoc; 15 | private int yStepLoc; 16 | private float stepScale; 17 | 18 | /** 19 | * @param stepScale suggest:480P = 2,720P = 3 20 | */ 21 | public SkinBlurHardVideoFilter(Context context, int stepScale) { 22 | super(null, GLESTools.uRes(context.getResources(), "skinblur_fragment.sh")); 23 | this.stepScale = (float) stepScale; 24 | } 25 | 26 | @Override 27 | public void onInit(int VWidth, int VHeight) { 28 | super.onInit(VWidth, VHeight); 29 | yStepLoc = GLES20.glGetUniformLocation(glProgram, "yStep"); 30 | xStepLoc = GLES20.glGetUniformLocation(glProgram, "xStep"); 31 | } 32 | 33 | @Override 34 | protected void onPreDraw() { 35 | super.onPreDraw(); 36 | GLES20.glUniform1f(xStepLoc, (float) (stepScale / outVideoWidth)); 37 | GLES20.glUniform1f(yStepLoc, (float) (stepScale / outVideoHeight)); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/SplitHardVideoFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES11Ext; 5 | import android.opengl.GLES20; 6 | 7 | 8 | import com.icechn.videorecorder.tools.GLESTools; 9 | 10 | import java.nio.FloatBuffer; 11 | 12 | /** 13 | * 分屏,等分, 14 | * splitSquareBase=1时,无效果, 15 | * splitSquareBase=2时,4等分, 16 | * splitSquareBase=3时,9等分, 17 | * 以此类推 18 | *

建议值为2或3

19 | * Created by ICE on 2018/2/2. 20 | */ 21 | public class SplitHardVideoFilter extends BaseHardVideoFilter { 22 | protected int glProgram; 23 | protected int glTextureLoc; 24 | protected int glCamPostionLoc; 25 | protected int glCamTextureCoordLoc; 26 | 27 | protected String vertexShader_filter = "attribute vec4 aCamPosition;\n" + 28 | "attribute vec2 aCamTextureCoord;\n" + 29 | "varying vec2 vCamTextureCoord;\n" + 30 | "\n" + 31 | "void main()\n" + 32 | "{\n" + 33 | " gl_Position= aCamPosition;\n" + 34 | " vCamTextureCoord = aCamTextureCoord;\n" + 35 | "}"; 36 | protected String fragmentshader_filter = ""; 37 | 38 | private int mSplitSquareBase = 1; 39 | 40 | public SplitHardVideoFilter(Context context, int splitSquareBase) { 41 | super(); 42 | this.fragmentshader_filter = GLESTools.uRes(context.getResources(), "split_fragment.sh"); 43 | mSplitSquareBase = splitSquareBase; 44 | } 45 | 46 | @Override 47 | public void onInit(int videoWidth, int videoHeight) { 48 | super.onInit(videoWidth, videoHeight); 49 | String fragShader = "#define SPLIT_SQUARE_BASE "+mSplitSquareBase+".\n"+fragmentshader_filter; 50 | glProgram = GLESTools.createProgram(vertexShader_filter, fragShader); 51 | // glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter); 52 | GLES20.glUseProgram(glProgram); 53 | glTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture"); 54 | glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition"); 55 | glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord"); 56 | } 57 | 58 | @Override 59 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) { 60 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 61 | GLES20.glUseProgram(glProgram); 62 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 63 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture); 64 | GLES20.glUniform1i(glTextureLoc, 0); 65 | GLES20.glEnableVertexAttribArray(glCamPostionLoc); 66 | GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc); 67 | shapeBuffer.position(0); 68 | GLES20.glVertexAttribPointer(glCamPostionLoc, 2, 69 | GLES20.GL_FLOAT, false, 70 | 2 * 4, shapeBuffer); 71 | textureBuffer.position(0); 72 | GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2, 73 | GLES20.GL_FLOAT, false, 74 | 2 * 4, textureBuffer); 75 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight); 76 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 77 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 78 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer); 79 | GLES20.glFinish(); 80 | 81 | GLES20.glDisableVertexAttribArray(glCamPostionLoc); 82 | GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc); 83 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 84 | GLES20.glUseProgram(0); 85 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 86 | } 87 | } -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/SplitMirrorHardVideoFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import android.content.Context; 4 | import android.opengl.GLES11Ext; 5 | import android.opengl.GLES20; 6 | 7 | 8 | import com.icechn.videorecorder.tools.GLESTools; 9 | 10 | import java.nio.FloatBuffer; 11 | 12 | /** 13 | * 4等分镜像效果 14 | * Created by ICE on 2018/2/2. 15 | */ 16 | public class SplitMirrorHardVideoFilter extends BaseHardVideoFilter { 17 | protected int glProgram; 18 | protected int glTextureLoc; 19 | protected int glCamPostionLoc; 20 | protected int glCamTextureCoordLoc; 21 | 22 | protected String vertexShader_filter = "attribute vec4 aCamPosition;\n" + 23 | "attribute vec2 aCamTextureCoord;\n" + 24 | "varying vec2 vCamTextureCoord;\n" + 25 | "\n" + 26 | "void main()\n" + 27 | "{\n" + 28 | " gl_Position= aCamPosition;\n" + 29 | " vCamTextureCoord = aCamTextureCoord;\n" + 30 | "}"; 31 | protected String fragmentshader_filter = ""; 32 | 33 | public SplitMirrorHardVideoFilter(Context context) { 34 | super(); 35 | this.fragmentshader_filter = GLESTools.uRes(context.getResources(), "split_mirror_fragment.sh"); 36 | } 37 | 38 | @Override 39 | public void onInit(int videoWidth, int videoHeight) { 40 | super.onInit(videoWidth, videoHeight); 41 | glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter); 42 | GLES20.glUseProgram(glProgram); 43 | glTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture"); 44 | glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition"); 45 | glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord"); 46 | } 47 | 48 | @Override 49 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) { 50 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 51 | GLES20.glUseProgram(glProgram); 52 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 53 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture); 54 | GLES20.glUniform1i(glTextureLoc, 0); 55 | GLES20.glEnableVertexAttribArray(glCamPostionLoc); 56 | GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc); 57 | shapeBuffer.position(0); 58 | GLES20.glVertexAttribPointer(glCamPostionLoc, 2, 59 | GLES20.GL_FLOAT, false, 60 | 2 * 4, shapeBuffer); 61 | textureBuffer.position(0); 62 | GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2, 63 | GLES20.GL_FLOAT, false, 64 | 2 * 4, textureBuffer); 65 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight); 66 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 67 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 68 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer); 69 | GLES20.glFinish(); 70 | 71 | GLES20.glDisableVertexAttribArray(glCamPostionLoc); 72 | GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc); 73 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 74 | GLES20.glUseProgram(0); 75 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 76 | } 77 | } -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/hardvideofilter/WhiteningHardVideoFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.hardvideofilter; 2 | 3 | import android.opengl.GLES11Ext; 4 | import android.opengl.GLES20; 5 | 6 | 7 | import com.icechn.videorecorder.tools.GLESTools; 8 | 9 | import java.nio.ByteBuffer; 10 | import java.nio.ByteOrder; 11 | import java.nio.FloatBuffer; 12 | 13 | /** 14 | */ 15 | public class WhiteningHardVideoFilter extends BaseHardVideoFilter { 16 | byte[] colorMap; 17 | 18 | public WhiteningHardVideoFilter() { 19 | colorMap = new byte[1024]; 20 | int cur = -1; 21 | for (int i = 0; i < 256; i++) { 22 | colorMap[++cur] = ((byte) (int) (255 * Math.pow(i / 255.0, 0.7))); 23 | colorMap[++cur] = ((byte) (int) (255 * Math.pow(i / 255.0, 0.7)));; 24 | colorMap[++cur] = ((byte) (int) (255 * Math.pow(i / 255.0, 0.65)));; 25 | colorMap[++cur] = 0; 26 | } 27 | } 28 | 29 | protected int glProgram; 30 | protected int glCamTextureLoc; 31 | protected int glCamPostionLoc; 32 | protected int glCamTextureCoordLoc; 33 | protected int glColorMapTextureLoc; 34 | protected static String VERTEXSHADER = "" + 35 | "attribute vec4 aCamPosition;\n" + 36 | "attribute vec2 aCamTextureCoord;\n" + 37 | "varying vec2 vCamTextureCoord;\n" + 38 | "void main(){\n" + 39 | " gl_Position= aCamPosition;\n" + 40 | " vCamTextureCoord = aCamTextureCoord;\n" + 41 | "}"; 42 | protected static String FRAGMENTSHADER = "" + 43 | "precision highp float;\n" + 44 | "varying highp vec2 vCamTextureCoord;\n" + 45 | "uniform sampler2D uCamTexture;\n" + 46 | "uniform sampler2D uColorMapTexture;\n" + 47 | "void main(){\n" + 48 | " vec4 c1 = texture2D(uCamTexture, vCamTextureCoord);\n" + 49 | " float r = texture2D(uColorMapTexture, vec2(c1.r,0.0)).r;\n" + 50 | " float g = texture2D(uColorMapTexture, vec2(c1.g,0.0)).g;\n" + 51 | " float b = texture2D(uColorMapTexture, vec2(c1.b,0.0)).b;\n" + 52 | " gl_FragColor = vec4(r,g,b,1.0);\n" + 53 | "}"; 54 | protected int imageTexture; 55 | 56 | 57 | @Override 58 | public void onInit(int VWidth, int VHeight) { 59 | super.onInit(VWidth, VHeight); 60 | int texture[] = new int[1]; 61 | GLES20.glGenTextures(1, texture, 0); 62 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]); 63 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 64 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 65 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 66 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); 67 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 68 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); 69 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 70 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); 71 | ByteBuffer result = ByteBuffer.allocateDirect(colorMap.length). 72 | order(ByteOrder.nativeOrder()); 73 | result.position(0); 74 | result.put(colorMap); 75 | result.position(0); 76 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256, 1, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, result); 77 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 78 | imageTexture = texture[0]; 79 | glProgram = GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER); 80 | GLES20.glUseProgram(glProgram); 81 | glCamTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture"); 82 | glColorMapTextureLoc = GLES20.glGetUniformLocation(glProgram, "uColorMapTexture"); 83 | glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition"); 84 | glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord"); 85 | 86 | } 87 | 88 | @Override 89 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) { 90 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 91 | GLES20.glUseProgram(glProgram); 92 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 93 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture); 94 | GLES20.glUniform1i(glCamTextureLoc, 0); 95 | GLES20.glActiveTexture(GLES20.GL_TEXTURE1); 96 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTexture); 97 | GLES20.glUniform1i(glColorMapTextureLoc, 1); 98 | GLES20.glEnableVertexAttribArray(glCamPostionLoc); 99 | GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc); 100 | shapeBuffer.position(0); 101 | GLES20.glVertexAttribPointer(glCamPostionLoc, 2, 102 | GLES20.GL_FLOAT, false, 103 | 2 * 4, shapeBuffer); 104 | textrueBuffer.position(0); 105 | GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2, 106 | GLES20.GL_FLOAT, false, 107 | 2 * 4, textrueBuffer); 108 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight); 109 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 110 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 111 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer); 112 | GLES20.glFinish(); 113 | GLES20.glDisableVertexAttribArray(glCamPostionLoc); 114 | GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc); 115 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 116 | GLES20.glUseProgram(0); 117 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 118 | } 119 | 120 | @Override 121 | public void onDestroy() { 122 | super.onDestroy(); 123 | GLES20.glDeleteProgram(glProgram); 124 | GLES20.glDeleteTextures(1, new int[]{imageTexture}, 0); 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/image/BaseDrawImageFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.image; 2 | 3 | import android.opengl.GLES11Ext; 4 | import android.opengl.GLES20; 5 | 6 | import java.nio.FloatBuffer; 7 | 8 | import com.icechn.videorecorder.filter.hardvideofilter.BaseHardVideoFilter; 9 | import com.icechn.videorecorder.tools.GLESTools; 10 | 11 | /** 12 | * Created by ICE on 2017/11/6. 13 | */ 14 | 15 | public class BaseDrawImageFilter extends BaseHardVideoFilter { 16 | protected int glDefaultProgram; 17 | protected int glDefaultTextureLoc; 18 | protected int glDefaultCamPostionLoc; 19 | protected int glDefaultCamTextureCoordLoc; 20 | 21 | public BaseDrawImageFilter() { 22 | } 23 | 24 | @Override 25 | public void onInit(int videoWidth, int videoHeight) { 26 | super.onInit(videoWidth, videoHeight); 27 | glDefaultProgram = GLESTools.createProgram( 28 | ImageDrawConstants.Default_vertexShader_filter, 29 | ImageDrawConstants.Default_fragmentshader_filter); 30 | GLES20.glUseProgram(glDefaultProgram); 31 | glDefaultTextureLoc = GLES20.glGetUniformLocation(glDefaultProgram, "uCamTexture"); 32 | glDefaultCamPostionLoc = GLES20.glGetAttribLocation(glDefaultProgram, "aCamPosition"); 33 | glDefaultCamTextureCoordLoc = GLES20.glGetAttribLocation(glDefaultProgram, "aCamTextureCoord"); 34 | } 35 | 36 | 37 | @Override 38 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) { 39 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 40 | GLES20.glUseProgram(glDefaultProgram); 41 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 42 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture); 43 | GLES20.glUniform1i(glDefaultTextureLoc, 0); 44 | GLES20.glEnableVertexAttribArray(glDefaultCamPostionLoc); 45 | GLES20.glEnableVertexAttribArray(glDefaultCamTextureCoordLoc); 46 | shapeBuffer.position(0); 47 | GLES20.glVertexAttribPointer(glDefaultCamPostionLoc, 2, 48 | GLES20.GL_FLOAT, false, 49 | 2 * 4, shapeBuffer); 50 | textrueBuffer.position(0); 51 | GLES20.glVertexAttribPointer(glDefaultCamTextureCoordLoc, 2, 52 | GLES20.GL_FLOAT, false, 53 | 2 * 4, textrueBuffer); 54 | onPreDraw(); 55 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight); 56 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 57 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 58 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer); 59 | GLES20.glFinish(); 60 | onAfterDraw(); 61 | GLES20.glDisableVertexAttribArray(glDefaultCamPostionLoc); 62 | GLES20.glDisableVertexAttribArray(glDefaultCamTextureCoordLoc); 63 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 64 | GLES20.glUseProgram(0); 65 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 66 | } 67 | 68 | protected void onPreDraw() { 69 | 70 | } 71 | 72 | protected void onAfterDraw() { 73 | 74 | } 75 | 76 | @Override 77 | public void onDestroy() { 78 | super.onDestroy(); 79 | GLES20.glDeleteProgram(glDefaultProgram); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/image/DrawMultiImageFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.image; 2 | 3 | import android.content.Context; 4 | import android.graphics.Rect; 5 | import android.graphics.RectF; 6 | import android.opengl.GLES11Ext; 7 | import android.opengl.GLES20; 8 | 9 | import com.icechn.videorecorder.filter.hardvideofilter.BaseHardVideoFilter; 10 | import com.icechn.videorecorder.tools.GLESTools; 11 | 12 | import java.nio.FloatBuffer; 13 | import java.util.ArrayList; 14 | 15 | /** 16 | * Created by ICE on 2017/10/12. 17 | */ 18 | 19 | public class DrawMultiImageFilter extends BaseHardVideoFilter { 20 | protected int glProgram; 21 | protected int glCamTextureLoc; 22 | protected int glCamPostionLoc; 23 | protected int glCamTextureCoordLoc; 24 | protected int glImageTextureLoc; 25 | protected int glImageRectLoc; 26 | protected int glImageAngelLoc; 27 | 28 | protected Context mContext; 29 | private ArrayList mImageInfos = new ArrayList<>(); 30 | private ArrayList imageTextures = new ArrayList<>(); 31 | private int mSize; 32 | 33 | public DrawMultiImageFilter(Context context, ArrayList imageInfos) { 34 | super(); 35 | mContext = context; 36 | if (imageInfos == null || imageInfos.size() == 0) { 37 | throw new RuntimeException("imageInfos must be not empty"); 38 | } 39 | this.mImageInfos.addAll(imageInfos); 40 | mSize = mImageInfos.size(); 41 | } 42 | 43 | @Override 44 | public void onInit(int videoWidth, int videoHeight) { 45 | super.onInit(videoWidth, videoHeight); 46 | glProgram = GLESTools.createProgram(GLESTools.uRes(mContext.getResources(), "drawimage_vertex.sh"), 47 | GLESTools.uRes(mContext.getResources(), "drawimage_fragment.sh")); 48 | GLES20.glUseProgram(glProgram); 49 | glCamTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture"); 50 | glImageTextureLoc = GLES20.glGetUniformLocation(glProgram, "uImageTexture"); 51 | glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition"); 52 | glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord"); 53 | glImageRectLoc = GLES20.glGetUniformLocation(glProgram, "imageRect"); 54 | glImageAngelLoc = GLES20.glGetUniformLocation(glProgram, "imageAngel"); 55 | 56 | initImageTexture(); 57 | } 58 | 59 | protected void initImageTexture() { 60 | imageTextures = new ArrayList<>(); 61 | ImageTexture imageTexture; 62 | for (int i = 0; i < mSize; i++) { 63 | imageTexture = new ImageTexture(outVideoWidth, outVideoHeight); 64 | imageTexture.load(mContext, mImageInfos.get(i).resId); 65 | imageTextures.add(imageTexture); 66 | } 67 | } 68 | 69 | @Override 70 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) { 71 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight); 72 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 73 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 74 | int textureId; 75 | int frameBuffer; 76 | Rect rect; 77 | ImageTexture preImageTexture = null; 78 | for (int i = 0; i < mSize; i++) { 79 | if (preImageTexture == null) { 80 | textureId = cameraTexture; 81 | } else { 82 | textureId = preImageTexture.getTextureId(); 83 | } 84 | if (i == mSize - 1) { 85 | frameBuffer = targetFrameBuffer; 86 | } else { 87 | frameBuffer = imageTextures.get(i).getFrameBuffer(); 88 | } 89 | rect = mImageInfos.get(i).rect; 90 | if (rect.left == rect.right || rect.top == rect.bottom) { 91 | continue; 92 | } 93 | drawImage(convertToRectF(rect), imageTextures.get(i).getImageTextureId(), textureId, frameBuffer, shapeBuffer, textureBuffer); 94 | preImageTexture = imageTextures.get(i); 95 | } 96 | GLES20.glFinish(); 97 | } 98 | 99 | protected void drawImage(RectF rectF, int imageTextureId, int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) { 100 | GLES20.glEnableVertexAttribArray(glCamPostionLoc); 101 | GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc); 102 | shapeBuffer.position(0); 103 | GLES20.glVertexAttribPointer(glCamPostionLoc, 2, 104 | GLES20.GL_FLOAT, false, 105 | 2 * 4, shapeBuffer); 106 | textureBuffer.position(0); 107 | GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2, 108 | GLES20.GL_FLOAT, false, 109 | 2 * 4, textureBuffer); 110 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer); 111 | GLES20.glUseProgram(glProgram); 112 | GLES20.glUniform4f(glImageRectLoc, rectF.left, rectF.top, rectF.right, rectF.bottom); 113 | // GLES20.glUniform1f(glImageAngelLoc, (float)(30.0f*Math.PI/180));//用来更新旋转角度的 114 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 115 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture); 116 | GLES20.glUniform1i(glCamTextureLoc, 0); 117 | GLES20.glActiveTexture(GLES20.GL_TEXTURE1); 118 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTextureId); 119 | GLES20.glUniform1i(glImageTextureLoc, 1); 120 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer); 121 | GLES20.glDisableVertexAttribArray(glCamPostionLoc); 122 | GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc); 123 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 124 | GLES20.glUseProgram(0); 125 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 126 | } 127 | 128 | @Override 129 | public void onDestroy() { 130 | super.onDestroy(); 131 | GLES20.glDeleteProgram(glProgram); 132 | destroyImageTexture(); 133 | } 134 | 135 | protected void destroyImageTexture() { 136 | for (ImageTexture imageTexture : imageTextures) { 137 | imageTexture.destroy(); 138 | } 139 | } 140 | 141 | private RectF convertToRectF(Rect iconRect) { 142 | RectF iconRectF = new RectF(); 143 | iconRectF.top = iconRect.top / (float) outVideoHeight; 144 | iconRectF.bottom = iconRect.bottom / (float) outVideoHeight; 145 | iconRectF.left = iconRect.left / (float) outVideoWidth; 146 | iconRectF.right = iconRect.right / (float) outVideoWidth; 147 | return iconRectF; 148 | } 149 | 150 | public static class ImageDrawData { 151 | public int resId = 0; 152 | public Rect rect; 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/image/ImageDrawConstants.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.image; 2 | 3 | /** 4 | * Created by ICE on 2017/11/6. 5 | */ 6 | 7 | public class ImageDrawConstants { 8 | public static final String Default_vertexShader_filter = "" + 9 | "attribute vec4 aCamPosition;\n" + 10 | "attribute vec2 aCamTextureCoord;\n" + 11 | "varying vec2 vCamTextureCoord;\n" + 12 | "void main(){\n" + 13 | " gl_Position= aCamPosition;\n" + 14 | " vCamTextureCoord = aCamTextureCoord;\n" + 15 | "}"; 16 | public static final String Default_fragmentshader_filter = "" + 17 | "precision highp float;\n" + 18 | "varying highp vec2 vCamTextureCoord;\n" + 19 | "uniform sampler2D uCamTexture;\n" + 20 | "void main(){\n" + 21 | " vec4 color = texture2D(uCamTexture, vCamTextureCoord);\n" + 22 | " gl_FragColor = color;\n" + 23 | "}"; 24 | 25 | } 26 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/image/ImageTexture.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.image; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.opengl.GLES20; 6 | import android.support.annotation.IntegerRes; 7 | 8 | import com.icechn.videorecorder.tools.BitmapUtils; 9 | import com.icechn.videorecorder.tools.GLESTools; 10 | 11 | /** 12 | * Created by ICE on 2017/11/6. 13 | */ 14 | 15 | public class ImageTexture { 16 | private int imageTextureId; 17 | private int frameBufferTextureId; 18 | private int frameBuffer; 19 | private int imageSize[]; 20 | private int outWidth; 21 | private int outHeight; 22 | 23 | public ImageTexture(int outWidth, int outHeight) { 24 | imageSize = new int[2]; 25 | this.outWidth = outWidth; 26 | this.outHeight = outHeight; 27 | } 28 | 29 | public ImageTexture load(Context context, String filePath, boolean isAssetsFile) { 30 | if (isAssetsFile) { 31 | return loadBitmap(BitmapUtils.loadBitmapFromAssets(context, filePath)); 32 | } else { 33 | return loadBitmap(BitmapUtils.loadBitmapFromDisk(context, filePath)); 34 | } 35 | } 36 | 37 | public ImageTexture load(Context context, @IntegerRes int resId) { 38 | return loadBitmap(BitmapUtils.loadBitmapFromRaw(context, resId)); 39 | } 40 | 41 | public ImageTexture loadBitmap(Bitmap bitmap) { 42 | if (bitmap != null) { 43 | imageTextureId = GLESTools.loadTexture(bitmap, GLESTools.NO_TEXTURE); 44 | imageSize[0] = bitmap.getWidth(); 45 | imageSize[1] = bitmap.getHeight(); 46 | int[] frameBufferArr = new int[1]; 47 | int[] frameBufferTextureArr = new int[1]; 48 | GLESTools.createFrameBuff(frameBufferArr, 49 | frameBufferTextureArr, 50 | outWidth, 51 | outHeight); 52 | frameBuffer = frameBufferArr[0]; 53 | frameBufferTextureId = frameBufferTextureArr[0]; 54 | bitmap.recycle(); 55 | } 56 | return this; 57 | } 58 | 59 | public void setImageTextureId(int imageTextureId) { 60 | this.imageTextureId = imageTextureId; 61 | } 62 | 63 | public int getImageTextureId() { 64 | return imageTextureId; 65 | } 66 | public int getTextureId() { 67 | return frameBufferTextureId; 68 | } 69 | public int getFrameBuffer() { 70 | return frameBuffer; 71 | } 72 | 73 | public int getImageWidth() { 74 | return imageSize[0]; 75 | } 76 | 77 | public int getImageHeight() { 78 | return imageSize[1]; 79 | } 80 | 81 | public float getImageRatio() { 82 | return 1.0f * imageSize[0] / imageSize[1]; 83 | } 84 | 85 | public void destroy() { 86 | GLES20.glDeleteTextures(2, new int[]{imageTextureId, frameBufferTextureId}, 0); 87 | GLES20.glDeleteFramebuffers(1, new int[]{frameBuffer}, 0); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/softaudiofilter/BaseSoftAudioFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.softaudiofilter; 2 | 3 | /** 4 | * Created by lake on 14/06/16. 5 | * Librestreaming project. 6 | */ 7 | public class BaseSoftAudioFilter { 8 | protected int SIZE; 9 | protected int SIZE_HALF; 10 | 11 | public void onInit(int size) { 12 | SIZE = size; 13 | SIZE_HALF = size/2; 14 | } 15 | 16 | /** 17 | * 18 | * @param orignBuff 19 | * @param targetBuff 20 | * @param presentationTimeMs 21 | * @param sequenceNum 22 | * @return false to use orignBuff,true to use targetBuff 23 | */ 24 | public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) { 25 | return false; 26 | } 27 | 28 | public void onDestroy() { 29 | 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/filter/softaudiofilter/SetVolumeAudioFilter.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.filter.softaudiofilter; 2 | 3 | /** 4 | * Created by lake on 14/06/16. 5 | * Librestreaming project. 6 | */ 7 | public class SetVolumeAudioFilter extends BaseSoftAudioFilter { 8 | private float volumeScale=1.0f; 9 | 10 | public SetVolumeAudioFilter() { 11 | } 12 | 13 | /** 14 | * @param scale 0.0~ 15 | */ 16 | public void setVolumeScale(float scale) { 17 | volumeScale = scale; 18 | } 19 | 20 | @Override 21 | public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) { 22 | for (int i = 0; i < SIZE; i += 2) { 23 | short origin = (short) (((orignBuff[i + 1] << 8) | orignBuff[i] & 0xff)); 24 | origin = (short) (origin * volumeScale); 25 | orignBuff[i + 1] = (byte) (origin >> 8); 26 | orignBuff[i] = (byte) (origin); 27 | } 28 | return false; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/model/AudioBuff.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.model; 2 | 3 | public class AudioBuff { 4 | public boolean isReadyToFill; 5 | public int audioFormat = -1; 6 | public byte[] buff; 7 | 8 | public AudioBuff(int audioFormat, int size) { 9 | isReadyToFill = true; 10 | this.audioFormat = audioFormat; 11 | buff = new byte[size]; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/model/MediaCodecGLWapper.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.model; 2 | 3 | public class MediaCodecGLWapper extends ScreenGLWapper { 4 | } 5 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/model/MediaConfig.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.model; 2 | 3 | public class MediaConfig { 4 | 5 | public static final int Rending_Model_OpenGLES = MediaMakerConfig.RENDERING_MODE_OPENGLES; 6 | 7 | public static class DirectionMode { 8 | public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = MediaMakerConfig.FLAG_DIRECTION_FLIP_HORIZONTAL; 9 | public static final int FLAG_DIRECTION_FLIP_VERTICAL = MediaMakerConfig.FLAG_DIRECTION_FLIP_VERTICAL; 10 | public static final int FLAG_DIRECTION_ROATATION_0 = MediaMakerConfig.FLAG_DIRECTION_ROATATION_0; 11 | public static final int FLAG_DIRECTION_ROATATION_90 = MediaMakerConfig.FLAG_DIRECTION_ROATATION_90; 12 | public static final int FLAG_DIRECTION_ROATATION_180 = MediaMakerConfig.FLAG_DIRECTION_ROATATION_180; 13 | public static final int FLAG_DIRECTION_ROATATION_270 = MediaMakerConfig.FLAG_DIRECTION_ROATATION_270; 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/model/MediaMakerConfig.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.model; 2 | 3 | import android.util.Log; 4 | 5 | import java.lang.reflect.Field; 6 | import java.lang.reflect.Modifier; 7 | 8 | public class MediaMakerConfig { 9 | 10 | public static final int RENDERING_MODE_OPENGLES = 2; 11 | /** 12 | * same with jni 13 | */ 14 | public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = 0x01; 15 | public static final int FLAG_DIRECTION_FLIP_VERTICAL = 0x02; 16 | public static final int FLAG_DIRECTION_ROATATION_0 = 0x10; 17 | public static final int FLAG_DIRECTION_ROATATION_90 = 0x20; 18 | public static final int FLAG_DIRECTION_ROATATION_180 = 0x40; 19 | public static final int FLAG_DIRECTION_ROATATION_270 = 0x80; 20 | 21 | public boolean done; 22 | public boolean printDetailMsg; 23 | public int renderingMode; 24 | public int frontCameraDirectionMode; 25 | public int backCameraDirectionMode; 26 | public boolean isPortrait; 27 | public int previewVideoWidth; 28 | public int previewVideoHeight; 29 | public int videoWidth; 30 | public int videoHeight; 31 | public int videoFPS; 32 | public int videoGOP; 33 | public float cropRatio; 34 | public int previewColorFormat; 35 | public int previewBufferSize; 36 | public int mediacodecAVCColorFormat; 37 | public int mediacdoecAVCBitRate; 38 | public int videoBufferQueueNum; 39 | public int audioBufferQueueNum; 40 | public int audioRecoderFormat; 41 | public int audioRecoderSampleRate; 42 | public int audioRecoderChannelConfig; 43 | public int audioRecoderSliceSize; 44 | public int audioRecoderSource; 45 | public int audioRecoderBufferSize; 46 | public int previewMaxFps; 47 | public int previewMinFps; 48 | public int mediacodecAVCFrameRate; 49 | public int mediacodecAVCIFrameInterval; 50 | public int mediacodecAVCProfile; 51 | public int mediacodecAVClevel; 52 | 53 | public int mediacodecAACProfile; 54 | public int mediacodecAACSampleRate; 55 | public int mediacodecAACChannelCount; 56 | public int mediacodecAACBitRate; 57 | public int mediacodecAACMaxInputSize; 58 | 59 | //face detect 60 | public boolean isFaceDetectEnable = false; 61 | public boolean isSquare = false; 62 | 63 | public boolean saveVideoEnable = false; 64 | public String saveVideoPath; 65 | 66 | public MediaMakerConfig() { 67 | done = false; 68 | printDetailMsg = false; 69 | videoWidth = -1; 70 | videoHeight = -1; 71 | videoFPS=-1; 72 | videoGOP=1; 73 | previewColorFormat = -1; 74 | mediacodecAVCColorFormat = -1; 75 | mediacdoecAVCBitRate = -1; 76 | videoBufferQueueNum = -1; 77 | audioBufferQueueNum = -1; 78 | mediacodecAVCFrameRate = -1; 79 | mediacodecAVCIFrameInterval = -1; 80 | mediacodecAVCProfile = -1; 81 | mediacodecAVClevel = -1; 82 | mediacodecAACProfile = -1; 83 | mediacodecAACSampleRate = -1; 84 | mediacodecAACChannelCount = -1; 85 | mediacodecAACBitRate = -1; 86 | mediacodecAACMaxInputSize = -1; 87 | } 88 | 89 | public void dump() { 90 | Log.e("",this.toString()); 91 | } 92 | 93 | @Override 94 | public String toString() { 95 | StringBuilder sb = new StringBuilder(); 96 | sb.append("ResParameter:"); 97 | Field[] fields = this.getClass().getDeclaredFields(); 98 | for (Field field : fields) { 99 | if (Modifier.isStatic(field.getModifiers())) { 100 | continue; 101 | } 102 | field.setAccessible(true); 103 | try { 104 | sb.append(field.getName()); 105 | sb.append('='); 106 | sb.append(field.get(this)); 107 | sb.append(';'); 108 | } catch (IllegalAccessException e) { 109 | } 110 | } 111 | return sb.toString(); 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/model/OffScreenGLWapper.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.model; 2 | 3 | import android.opengl.EGLConfig; 4 | import android.opengl.EGLContext; 5 | import android.opengl.EGLDisplay; 6 | import android.opengl.EGLSurface; 7 | 8 | public class OffScreenGLWapper{ 9 | public EGLConfig eglConfig; 10 | public EGLDisplay eglDisplay; 11 | public EGLSurface eglSurface; 12 | public EGLContext eglContext; 13 | 14 | public int cam2dProgram; 15 | public int cam2dTextureMatrix; 16 | public int cam2dTextureLoc; 17 | public int cam2dPostionLoc; 18 | public int cam2dTextureCoordLoc; 19 | 20 | public int camProgram; 21 | public int camTextureLoc; 22 | public int camPostionLoc; 23 | public int camTextureCoordLoc; 24 | 25 | } 26 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/model/RecordConfig.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.model; 2 | 3 | 4 | import android.hardware.Camera; 5 | 6 | public class RecordConfig { 7 | private Size targetVideoSize; 8 | private int videoBufferQueueNum; 9 | private int bitRate; 10 | private int renderingMode; 11 | private int defaultCamera; 12 | private int frontCameraDirectionMode; 13 | private int backCameraDirectionMode; 14 | private int videoFPS; 15 | private int videoGOP; 16 | private boolean printDetailMsg; 17 | 18 | 19 | private RecordConfig() { 20 | } 21 | 22 | public static RecordConfig obtain() { 23 | RecordConfig res = new RecordConfig(); 24 | res.setRenderingMode(MediaConfig.Rending_Model_OpenGLES); 25 | res.setTargetVideoSize(new Size(640, 480)); 26 | res.setVideoFPS(25); 27 | res.setVideoGOP(1); 28 | res.setVideoBufferQueueNum(5); 29 | res.setBitRate(2000000); 30 | res.setPrintDetailMsg(false); 31 | res.setDefaultCamera(Camera.CameraInfo.CAMERA_FACING_BACK); 32 | res.setBackCameraDirectionMode(MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0); 33 | res.setFrontCameraDirectionMode(MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0); 34 | return res; 35 | } 36 | 37 | 38 | /** 39 | * set the default camera to start stream 40 | */ 41 | public void setDefaultCamera(int defaultCamera) { 42 | this.defaultCamera = defaultCamera; 43 | } 44 | 45 | /** 46 | * set front camera rotation & flip 47 | * @param frontCameraDirectionMode {@link MediaConfig.DirectionMode} 48 | */ 49 | public void setFrontCameraDirectionMode(int frontCameraDirectionMode) { 50 | this.frontCameraDirectionMode = frontCameraDirectionMode; 51 | } 52 | /** 53 | * set front camera rotation & flip 54 | * @param backCameraDirectionMode {@link MediaConfig.DirectionMode} 55 | */ 56 | public void setBackCameraDirectionMode(int backCameraDirectionMode) { 57 | this.backCameraDirectionMode = backCameraDirectionMode; 58 | } 59 | 60 | /** 61 | * set renderingMode when using soft mode
62 | * no use for hard mode 63 | * @param renderingMode {@link MediaConfig#Rending_Model_OpenGLES} 64 | */ 65 | public void setRenderingMode(int renderingMode) { 66 | this.renderingMode = renderingMode; 67 | } 68 | 69 | /** 70 | * no use for now 71 | * @param printDetailMsg 72 | */ 73 | public void setPrintDetailMsg(boolean printDetailMsg) { 74 | this.printDetailMsg = printDetailMsg; 75 | } 76 | 77 | /** 78 | * set the target video size.
79 | * real video size may different from it.Depend on device. 80 | * @param videoSize 81 | */ 82 | public void setTargetVideoSize(Size videoSize) { 83 | targetVideoSize = videoSize; 84 | } 85 | 86 | /** 87 | * set video buffer number for soft mode.
88 | * num larger:video Smoother,more memory. 89 | * @param num 90 | */ 91 | public void setVideoBufferQueueNum(int num) { 92 | videoBufferQueueNum = num; 93 | } 94 | 95 | /** 96 | * set video bitrate 97 | * @param bitRate 98 | */ 99 | public void setBitRate(int bitRate) { 100 | this.bitRate = bitRate; 101 | } 102 | 103 | public int getVideoFPS() { 104 | return videoFPS; 105 | } 106 | 107 | public void setVideoFPS(int videoFPS) { 108 | this.videoFPS = videoFPS; 109 | } 110 | 111 | public int getVideoGOP(){ 112 | return videoGOP; 113 | } 114 | 115 | public void setVideoGOP(int videoGOP){ 116 | this.videoGOP = videoGOP; 117 | } 118 | 119 | public int getVideoBufferQueueNum() { 120 | return videoBufferQueueNum; 121 | } 122 | 123 | public int getBitRate() { 124 | return bitRate; 125 | } 126 | 127 | public Size getTargetVideoSize() { 128 | return targetVideoSize; 129 | } 130 | 131 | public int getDefaultCamera() { 132 | return defaultCamera; 133 | } 134 | 135 | public int getBackCameraDirectionMode() { 136 | return backCameraDirectionMode; 137 | } 138 | 139 | public int getFrontCameraDirectionMode() { 140 | return frontCameraDirectionMode; 141 | } 142 | 143 | public int getRenderingMode() { 144 | return renderingMode; 145 | } 146 | 147 | public boolean isPrintDetailMsg() { 148 | return printDetailMsg; 149 | } 150 | 151 | private boolean square = false; 152 | public void setSquare(boolean enable) { 153 | this.square = enable; 154 | } 155 | public boolean isSquare() { 156 | return this.square; 157 | } 158 | 159 | public boolean isSaveVideoEnable() { 160 | return true; 161 | } 162 | 163 | private String saveVideoPath = null; 164 | public void setSaveVideoPath(String path) { 165 | this.saveVideoPath = path; 166 | } 167 | public String getSaveVideoPath() { 168 | return this.saveVideoPath; 169 | } 170 | 171 | } 172 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/model/ScreenGLWapper.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.model; 2 | 3 | import android.opengl.EGLConfig; 4 | import android.opengl.EGLContext; 5 | import android.opengl.EGLDisplay; 6 | import android.opengl.EGLSurface; 7 | 8 | public class ScreenGLWapper { 9 | public EGLDisplay eglDisplay; 10 | public EGLConfig eglConfig; 11 | public EGLSurface eglSurface; 12 | public EGLContext eglContext; 13 | 14 | public int drawProgram; 15 | public int drawTextureLoc; 16 | public int drawPostionLoc; 17 | public int drawTextureCoordLoc; 18 | } 19 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/model/Size.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.model; 2 | 3 | public final class Size { 4 | /** 5 | * Create a new immutable Size instance. 6 | * 7 | * @param width The width of the size, in pixels 8 | * @param height The height of the size, in pixels 9 | */ 10 | public Size(int width, int height) { 11 | mWidth = width; 12 | mHeight = height; 13 | } 14 | 15 | /** 16 | * Get the width of the size (in pixels). 17 | * 18 | * @return width 19 | */ 20 | public int getWidth() { 21 | return mWidth; 22 | } 23 | 24 | /** 25 | * Get the height of the size (in pixels). 26 | * 27 | * @return height 28 | */ 29 | public int getHeight() { 30 | return mHeight; 31 | } 32 | 33 | /** 34 | * Check if this size is equal to another size. 35 | *

36 | * Two sizes are equal if and only if both their widths and heights are 37 | * equal. 38 | *

39 | *

40 | * A size object is never equal to any other type of object. 41 | *

42 | * 43 | * @return {@code true} if the objects were equal, {@code false} otherwise 44 | */ 45 | @Override 46 | public boolean equals(final Object obj) { 47 | if (obj == null) { 48 | return false; 49 | } 50 | if (this == obj) { 51 | return true; 52 | } 53 | if (obj instanceof Size) { 54 | Size other = (Size) obj; 55 | return mWidth == other.mWidth && mHeight == other.mHeight; 56 | } 57 | return false; 58 | } 59 | 60 | /** 61 | * Return the size represented as a string with the format {@code "WxH"} 62 | * 63 | * @return string representation of the size 64 | */ 65 | @Override 66 | public String toString() { 67 | return mWidth + "x" + mHeight; 68 | } 69 | 70 | private static NumberFormatException invalidSize(String s) { 71 | throw new NumberFormatException("Invalid Size: \"" + s + "\""); 72 | } 73 | 74 | /** 75 | * {@inheritDoc} 76 | */ 77 | @Override 78 | public int hashCode() { 79 | // assuming most sizes are <2^16, doing a rotate will give us perfect hashing 80 | return mHeight ^ ((mWidth << (Integer.SIZE / 2)) | (mWidth >>> (Integer.SIZE / 2))); 81 | } 82 | 83 | private final int mWidth; 84 | private final int mHeight; 85 | } 86 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/tools/BitmapUtils.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.tools; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.graphics.BitmapFactory; 6 | import android.graphics.ImageFormat; 7 | import android.graphics.Rect; 8 | import android.graphics.YuvImage; 9 | import android.os.Environment; 10 | import android.util.Log; 11 | 12 | import java.io.BufferedOutputStream; 13 | import java.io.ByteArrayOutputStream; 14 | import java.io.File; 15 | import java.io.FileInputStream; 16 | import java.io.FileOutputStream; 17 | import java.io.IOException; 18 | import java.io.InputStream; 19 | 20 | public class BitmapUtils { 21 | public static Bitmap loadBitmapFromAssets(Context context, String filePath) { 22 | InputStream inputStream = null; 23 | try { 24 | inputStream = context.getResources().getAssets().open(filePath); 25 | } catch (IOException e) { 26 | e.printStackTrace(); 27 | } 28 | if (inputStream == null) return null; 29 | BitmapFactory.Options options = new BitmapFactory.Options(); 30 | options.inScaled = false; 31 | Bitmap bitmap = BitmapFactory.decodeStream(inputStream); 32 | return bitmap; 33 | } 34 | public static Bitmap loadBitmapFromDisk(Context context, String filePath) { 35 | InputStream inputStream = null; 36 | try { 37 | inputStream = new FileInputStream(filePath); 38 | } catch (IOException e) { 39 | e.printStackTrace(); 40 | } 41 | if (inputStream == null) return null; 42 | BitmapFactory.Options options = new BitmapFactory.Options(); 43 | options.inScaled = false; 44 | Bitmap bitmap = BitmapFactory.decodeStream(inputStream); 45 | return bitmap; 46 | } 47 | 48 | public static Bitmap loadBitmapFromRaw(Context context, int resourceId) { 49 | BitmapFactory.Options options = new BitmapFactory.Options(); 50 | options.inScaled = false; 51 | Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options); 52 | return bitmap; 53 | } 54 | 55 | public static void saveBitmap(byte[] buffer, int width, int height) { 56 | try { 57 | // 调用image.compressToJpeg()将YUV格式图像数据data转为jpg格式 58 | YuvImage image = new YuvImage(buffer, ImageFormat.NV21, width, 59 | height, null); 60 | if (image != null) { 61 | ByteArrayOutputStream outstream = new ByteArrayOutputStream(); 62 | image.compressToJpeg(new Rect(0, 0, width, height), 80, outstream); 63 | 64 | ByteArrayOutputStream stream = new ByteArrayOutputStream(); 65 | image.compressToJpeg(new Rect(0,0,width,height),80,stream); 66 | Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(),0,stream.size()); 67 | 68 | saveBitmap(bmp); 69 | 70 | outstream.flush(); 71 | } 72 | } catch (Exception ex) { 73 | Log.e("Sys", "Error:" + ex.getMessage()); 74 | } 75 | } 76 | 77 | //图片保存 78 | private static void saveBitmap(Bitmap b){ 79 | String path = Environment.getExternalStorageDirectory()+ "/Omoshiroi/photo/"; 80 | File folder=new File(path); 81 | if(!folder.exists()&&!folder.mkdirs()){ 82 | Log.i("SaveBitmap", "save pic fail"); 83 | return; 84 | } 85 | long dataTake = System.currentTimeMillis(); 86 | final String jpegName=path+ dataTake +".jpg"; 87 | try { 88 | FileOutputStream fout = new FileOutputStream(jpegName); 89 | BufferedOutputStream bos = new BufferedOutputStream(fout); 90 | b.compress(Bitmap.CompressFormat.JPEG, 100, bos); 91 | bos.flush(); 92 | bos.close(); 93 | } catch (IOException e) { 94 | e.printStackTrace(); 95 | } 96 | Log.i("SaveBitmap", "save pic success:"+jpegName); 97 | 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/tools/ByteArrayTools.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.tools; 2 | 3 | public class ByteArrayTools { 4 | public static void intToByteArrayFull(byte[] dst, int pos, int interger) { 5 | dst[pos] = (byte) ((interger >> 24) & 0xFF); 6 | dst[pos + 1] = (byte) ((interger >> 16) & 0xFF); 7 | dst[pos + 2] = (byte) ((interger >> 8) & 0xFF); 8 | dst[pos + 3] = (byte) ((interger) & 0xFF); 9 | } 10 | 11 | public static void intToByteArrayTwoByte(byte[] dst, int pos, int interger) { 12 | dst[pos] = (byte) ((interger >> 8) & 0xFF); 13 | dst[pos + 1] = (byte) ((interger) & 0xFF); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/tools/GLESTools.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.tools; 2 | 3 | import android.content.res.Resources; 4 | import android.graphics.Bitmap; 5 | import android.opengl.GLES20; 6 | import android.opengl.GLUtils; 7 | import android.util.Log; 8 | 9 | import java.io.BufferedReader; 10 | import java.io.InputStream; 11 | import java.io.InputStreamReader; 12 | 13 | public class GLESTools { 14 | public static int FLOAT_SIZE_BYTES = 4; 15 | public static int SHORT_SIZE_BYTES = 2; 16 | 17 | public static String readTextFile(Resources res, int resId) { 18 | InputStream inputStream = res.openRawResource(resId); 19 | BufferedReader br = new BufferedReader(new InputStreamReader(inputStream)); 20 | String line; 21 | StringBuilder result = new StringBuilder(); 22 | try { 23 | while ((line = br.readLine()) != null) { 24 | result.append(line); 25 | result.append("\n"); 26 | } 27 | } catch (Exception e) { 28 | e.printStackTrace(); 29 | return null; 30 | } 31 | return result.toString(); 32 | } 33 | 34 | public static int createProgram(Resources res, int vertexShaderResId, int fragmentShaderResId) { 35 | String vertexShaderCode = readTextFile(res, vertexShaderResId); 36 | String fragmentShaderCode = readTextFile(res, fragmentShaderResId); 37 | return createProgram(vertexShaderCode, fragmentShaderCode); 38 | } 39 | 40 | public static int createProgram(String vertexShaderCode, String fragmentShaderCode) { 41 | if (vertexShaderCode == null || fragmentShaderCode == null) { 42 | throw new RuntimeException("invalid shader code"); 43 | } 44 | int vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); 45 | int fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); 46 | 47 | GLES20.glShaderSource(vertexShader, vertexShaderCode); 48 | GLES20.glShaderSource(fragmentShader, fragmentShaderCode); 49 | int[] status = new int[1]; 50 | GLES20.glCompileShader(vertexShader); 51 | GLES20.glGetShaderiv(vertexShader, GLES20.GL_COMPILE_STATUS, status, 0); 52 | if (GLES20.GL_FALSE == status[0]) { 53 | throw new RuntimeException("vertext shader compile,failed:" + GLES20.glGetShaderInfoLog(vertexShader)); 54 | } 55 | GLES20.glCompileShader(fragmentShader); 56 | GLES20.glGetShaderiv(fragmentShader, GLES20.GL_COMPILE_STATUS, status, 0); 57 | if (GLES20.GL_FALSE == status[0]) { 58 | throw new RuntimeException("fragment shader compile,failed:" + GLES20.glGetShaderInfoLog(fragmentShader)); 59 | } 60 | int program = GLES20.glCreateProgram(); 61 | GLES20.glAttachShader(program, vertexShader); 62 | GLES20.glAttachShader(program, fragmentShader); 63 | GLES20.glLinkProgram(program); 64 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0); 65 | if (GLES20.GL_FALSE == status[0]) { 66 | throw new RuntimeException("link program,failed:" + GLES20.glGetProgramInfoLog(program)); 67 | } 68 | return program; 69 | } 70 | 71 | public static void checkGlError(String op) { 72 | int error = GLES20.glGetError(); 73 | if (error != GLES20.GL_NO_ERROR) { 74 | String msg = op + ": glError 0x" + Integer.toHexString(error); 75 | Log.d("",msg); 76 | throw new RuntimeException(msg); 77 | } 78 | } 79 | 80 | public static final int NO_TEXTURE = -1; 81 | 82 | public static int loadTexture(final Bitmap image, final int reUseTexture) { 83 | int[] texture = new int[1]; 84 | if (reUseTexture == NO_TEXTURE) { 85 | GLES20.glGenTextures(1, texture, 0); 86 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]); 87 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 88 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 89 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 90 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); 91 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 92 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); 93 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 94 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); 95 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, image, 0); 96 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 97 | } else { 98 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, reUseTexture); 99 | GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, image); 100 | texture[0] = reUseTexture; 101 | } 102 | return texture[0]; 103 | } 104 | 105 | public static void createFrameBuff(int[] frameBuffer, int[] frameBufferTex, int width, int height) { 106 | GLES20.glGenFramebuffers(1, frameBuffer, 0); 107 | GLES20.glGenTextures(1, frameBufferTex, 0); 108 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTex[0]); 109 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); 110 | GLESTools.checkGlError("createCamFrameBuff"); 111 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 112 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 113 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 114 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); 115 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 116 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); 117 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, 118 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); 119 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]); 120 | GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, frameBufferTex[0], 0); 121 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); 122 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); 123 | GLESTools.checkGlError("createCamFrameBuff"); 124 | } 125 | 126 | //通过路径加载Assets中的文本内容 127 | public static String uRes(Resources mRes, String path) { 128 | StringBuilder result = new StringBuilder(); 129 | try { 130 | InputStream is = mRes.getAssets().open(path); 131 | int ch; 132 | byte[] buffer = new byte[1024]; 133 | while (-1 != (ch = is.read(buffer))) { 134 | result.append(new String(buffer, 0, ch)); 135 | } 136 | } catch (Exception e) { 137 | return null; 138 | } 139 | return result.toString().replaceAll("\\r\\n", "\n"); 140 | } 141 | 142 | } 143 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/tools/TimeHandler.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.tools; 2 | 3 | import android.os.Handler; 4 | import android.os.Looper; 5 | import android.os.Message; 6 | 7 | public class TimeHandler 8 | extends Handler { 9 | public static final int WHAT_233=0; 10 | private long delayTimeInMils; 11 | private boolean freeNow; 12 | private Task task; 13 | private boolean shouldContinue; 14 | 15 | public TimeHandler(Looper looper, Task task) { 16 | super(looper); 17 | this.task = task; 18 | freeNow=true; 19 | shouldContinue=false; 20 | } 21 | 22 | public void clearMsg() { 23 | while (hasMessages(WHAT_233)) { 24 | removeMessages(WHAT_233); 25 | } 26 | shouldContinue = false; 27 | freeNow = true; 28 | } 29 | 30 | public void sendSingleMsg(long timeDelayed) { 31 | clearMsg(); 32 | freeNow = false; 33 | shouldContinue = false; 34 | sendEmptyMessageDelayed(0, timeDelayed); 35 | } 36 | 37 | public void sendLoopMsg(long timeDelayed, long timeDelayedInLoop) { 38 | clearMsg(); 39 | freeNow = false; 40 | delayTimeInMils = timeDelayedInLoop; 41 | shouldContinue = true; 42 | sendEmptyMessageDelayed(0, timeDelayed); 43 | } 44 | 45 | public void handleMessage(Message paramMessage) { 46 | if (task != null) { 47 | task.run(); 48 | } 49 | if (shouldContinue) { 50 | sendEmptyMessageDelayed(0, delayTimeInMils); 51 | } 52 | } 53 | 54 | public boolean isFreeNow() { 55 | return freeNow; 56 | } 57 | 58 | public interface Task { 59 | void run(); 60 | } 61 | } -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/tools/VideoSplicer.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.tools; 2 | 3 | import android.media.MediaCodec; 4 | import android.media.MediaCodec.BufferInfo; 5 | import android.media.MediaExtractor; 6 | import android.media.MediaFormat; 7 | import android.media.MediaMuxer; 8 | import android.util.Log; 9 | 10 | import java.io.IOException; 11 | import java.nio.ByteBuffer; 12 | import java.util.ArrayList; 13 | import java.util.Iterator; 14 | 15 | /** 16 | * Created by ICE on 2018/2/6. 17 | */ 18 | 19 | //@TargetApi(18) 20 | public class VideoSplicer { 21 | 22 | private final String TAG = "VideoSplicer"; 23 | private ArrayList mVideoList; 24 | private String mOutFilename; 25 | 26 | private MediaMuxer mMuxer; 27 | private ByteBuffer mReadBuf; 28 | private int mOutAudioTrackIndex; 29 | private int mOutVideoTrackIndex; 30 | private MediaFormat mAudioFormat; 31 | private MediaFormat mVideoFormat; 32 | 33 | public VideoSplicer(ArrayList videoList, String outFilename) { 34 | mVideoList = videoList; 35 | this.mOutFilename = outFilename; 36 | mReadBuf = ByteBuffer.allocate(1048576); 37 | } 38 | 39 | public boolean joinVideo() { 40 | boolean getAudioFormat = false; 41 | boolean getVideoFormat = false; 42 | Iterator videoIterator = mVideoList.iterator(); 43 | 44 | //--------step 1 MediaExtractor拿到多媒体信息,用于MediaMuxer创建文件 45 | while (videoIterator.hasNext()) { 46 | String videoPath = (String) videoIterator.next(); 47 | MediaExtractor extractor = new MediaExtractor(); 48 | 49 | try { 50 | extractor.setDataSource(videoPath); 51 | } catch (Exception ex) { 52 | ex.printStackTrace(); 53 | } 54 | 55 | int trackIndex; 56 | if (!getVideoFormat) { 57 | trackIndex = this.selectTrack(extractor, "video/"); 58 | if (trackIndex < 0) { 59 | Log.e(TAG, "No video track found in " + videoPath); 60 | } else { 61 | extractor.selectTrack(trackIndex); 62 | mVideoFormat = extractor.getTrackFormat(trackIndex); 63 | getVideoFormat = true; 64 | } 65 | } 66 | 67 | if (!getAudioFormat) { 68 | trackIndex = this.selectTrack(extractor, "audio/"); 69 | if (trackIndex < 0) { 70 | Log.e(TAG, "No audio track found in " + videoPath); 71 | } else { 72 | extractor.selectTrack(trackIndex); 73 | mAudioFormat = extractor.getTrackFormat(trackIndex); 74 | getAudioFormat = true; 75 | } 76 | } 77 | 78 | extractor.release(); 79 | if (getVideoFormat && getAudioFormat) { 80 | break; 81 | } 82 | } 83 | 84 | try { 85 | mMuxer = new MediaMuxer(this.mOutFilename, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 86 | } catch (IOException e) { 87 | e.printStackTrace(); 88 | } 89 | if (getVideoFormat) { 90 | mOutVideoTrackIndex = mMuxer.addTrack(mVideoFormat); 91 | } 92 | if (getAudioFormat) { 93 | mOutAudioTrackIndex = mMuxer.addTrack(mAudioFormat); 94 | } 95 | mMuxer.start(); 96 | //--------step 1 end---------------------------// 97 | 98 | 99 | //--------step 2 遍历文件,MediaExtractor读取帧数据,MediaMuxer写入帧数据,并记录帧信息 100 | long ptsOffset = 0L; 101 | Iterator trackIndex = mVideoList.iterator(); 102 | while (trackIndex.hasNext()) { 103 | String videoPath = (String) trackIndex.next(); 104 | boolean hasVideo = true; 105 | boolean hasAudio = true; 106 | MediaExtractor videoExtractor = new MediaExtractor(); 107 | 108 | try { 109 | videoExtractor.setDataSource(videoPath); 110 | } catch (Exception var27) { 111 | var27.printStackTrace(); 112 | } 113 | 114 | int inVideoTrackIndex = this.selectTrack(videoExtractor, "video/"); 115 | if (inVideoTrackIndex < 0) { 116 | hasVideo = false; 117 | } 118 | 119 | videoExtractor.selectTrack(inVideoTrackIndex); 120 | MediaExtractor audioExtractor = new MediaExtractor(); 121 | 122 | try { 123 | audioExtractor.setDataSource(videoPath); 124 | } catch (Exception e) { 125 | e.printStackTrace(); 126 | } 127 | 128 | int inAudioTrackIndex = this.selectTrack(audioExtractor, "audio/"); 129 | if (inAudioTrackIndex < 0) { 130 | hasAudio = false; 131 | } 132 | 133 | audioExtractor.selectTrack(inAudioTrackIndex); 134 | boolean bMediaDone = false; 135 | long presentationTimeUs = 0L; 136 | long audioPts = 0L; 137 | long videoPts = 0L; 138 | 139 | while (!bMediaDone) { 140 | if (!hasVideo && !hasAudio) { 141 | break; 142 | } 143 | 144 | int outTrackIndex; 145 | MediaExtractor extractor; 146 | int currenttrackIndex; 147 | if ((!hasVideo || audioPts - videoPts <= 50000L) && hasAudio) { 148 | currenttrackIndex = inAudioTrackIndex; 149 | outTrackIndex = mOutAudioTrackIndex; 150 | extractor = audioExtractor; 151 | } else { 152 | currenttrackIndex = inVideoTrackIndex; 153 | outTrackIndex = mOutVideoTrackIndex; 154 | extractor = videoExtractor; 155 | } 156 | 157 | mReadBuf.rewind(); 158 | int chunkSize = extractor.readSampleData(mReadBuf, 0);//读取帧数据 159 | if (chunkSize < 0) { 160 | if (currenttrackIndex == inVideoTrackIndex) { 161 | hasVideo = false; 162 | } else if (currenttrackIndex == inAudioTrackIndex) { 163 | hasAudio = false; 164 | } 165 | } else { 166 | if (extractor.getSampleTrackIndex() != currenttrackIndex) { 167 | Log.e(TAG, "WEIRD: got sample from track " + extractor.getSampleTrackIndex() + ", expected " + currenttrackIndex); 168 | } 169 | 170 | presentationTimeUs = extractor.getSampleTime();//读取帧的pts 171 | if (currenttrackIndex == inVideoTrackIndex) { 172 | videoPts = presentationTimeUs; 173 | } else { 174 | audioPts = presentationTimeUs; 175 | } 176 | 177 | BufferInfo info = new BufferInfo(); 178 | info.offset = 0; 179 | info.size = chunkSize; 180 | info.presentationTimeUs = ptsOffset + presentationTimeUs;//pts重新计算 181 | if ((extractor.getSampleFlags() & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) { 182 | info.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME; 183 | } 184 | 185 | mReadBuf.rewind(); 186 | Log.i(TAG, String.format("write sample track %d, size %d, pts %d flag %d", new Object[]{Integer.valueOf(outTrackIndex), Integer.valueOf(info.size), Long.valueOf(info.presentationTimeUs), Integer.valueOf(info.flags)})); 187 | mMuxer.writeSampleData(outTrackIndex, mReadBuf, info);//写入文件 188 | extractor.advance(); 189 | } 190 | } 191 | 192 | //记录当前文件的最后一个pts,作为下一个文件的pts offset 193 | ptsOffset += videoPts > audioPts ? videoPts : audioPts; 194 | ptsOffset += 10000L;//前一个文件的最后一帧与后一个文件的第一帧,差10ms,只是估计值,不准确,但能用 195 | 196 | Log.i(TAG, "finish one file, ptsOffset " + ptsOffset); 197 | 198 | videoExtractor.release(); 199 | audioExtractor.release(); 200 | } 201 | 202 | if (mMuxer != null) { 203 | try { 204 | mMuxer.stop(); 205 | mMuxer.release(); 206 | } catch (Exception e) { 207 | Log.e(TAG, "Muxer close error. No data was written"); 208 | } 209 | 210 | mMuxer = null; 211 | } 212 | 213 | Log.i(TAG, "video join finished"); 214 | return true; 215 | } 216 | 217 | private int selectTrack(MediaExtractor extractor, String mimePrefix) { 218 | int numTracks = extractor.getTrackCount(); 219 | 220 | for (int i = 0; i < numTracks; ++i) { 221 | MediaFormat format = extractor.getTrackFormat(i); 222 | String mime = format.getString("mime"); 223 | if (mime.startsWith(mimePrefix)) { 224 | return i; 225 | } 226 | } 227 | 228 | return -1; 229 | } 230 | } 231 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/ui/AspectTextureView.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.ui; 2 | 3 | import android.content.Context; 4 | import android.util.AttributeSet; 5 | import android.view.TextureView; 6 | import android.view.View; 7 | 8 | public class AspectTextureView extends TextureView { 9 | public static final int MODE_FITXY = 0; 10 | public static final int MODE_INSIDE = 1; 11 | public static final int MODE_OUTSIDE = 2; 12 | private double targetAspect = -1; 13 | private int aspectMode = MODE_OUTSIDE; 14 | 15 | public AspectTextureView(Context context) { 16 | super(context); 17 | } 18 | 19 | public AspectTextureView(Context context, AttributeSet attrs) { 20 | super(context, attrs); 21 | } 22 | 23 | public AspectTextureView(Context context, AttributeSet attrs, int defStyleAttr) { 24 | super(context, attrs, defStyleAttr); 25 | } 26 | 27 | /** 28 | * @param mode {@link #MODE_FITXY},{@link #MODE_INSIDE},{@link #MODE_OUTSIDE} 29 | * @param aspectRatio width/height 30 | */ 31 | public void setAspectRatio(int mode, double aspectRatio) { 32 | if (mode != MODE_INSIDE && mode != MODE_OUTSIDE && mode != MODE_FITXY) { 33 | throw new IllegalArgumentException("illegal mode"); 34 | } 35 | if (aspectRatio < 0) { 36 | throw new IllegalArgumentException("illegal aspect ratio"); 37 | } 38 | if (targetAspect != aspectRatio || aspectMode != mode) { 39 | targetAspect = aspectRatio; 40 | aspectMode = mode; 41 | requestLayout(); 42 | } 43 | } 44 | 45 | @Override 46 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { 47 | if (targetAspect > 0) { 48 | int initialWidth = MeasureSpec.getSize(widthMeasureSpec); 49 | int initialHeight = MeasureSpec.getSize(heightMeasureSpec); 50 | 51 | double viewAspectRatio = (double) initialWidth / initialHeight; 52 | double aspectDiff = targetAspect / viewAspectRatio - 1; 53 | 54 | if (Math.abs(aspectDiff) > 0.01 && aspectMode != MODE_FITXY) { 55 | if (aspectMode == MODE_INSIDE) { 56 | if (aspectDiff > 0) { 57 | initialHeight = (int) (initialWidth / targetAspect); 58 | } else { 59 | initialWidth = (int) (initialHeight * targetAspect); 60 | } 61 | } else if (aspectMode == MODE_OUTSIDE) { 62 | if (aspectDiff > 0) { 63 | initialWidth = (int) (initialHeight * targetAspect); 64 | } else { 65 | initialHeight = (int) (initialWidth / targetAspect); 66 | } 67 | } 68 | widthMeasureSpec = MeasureSpec.makeMeasureSpec(initialWidth, MeasureSpec.EXACTLY); 69 | heightMeasureSpec = MeasureSpec.makeMeasureSpec(initialHeight, MeasureSpec.EXACTLY); 70 | } 71 | } 72 | super.onMeasure(widthMeasureSpec, heightMeasureSpec); 73 | } 74 | 75 | @Override 76 | public void layout(int l, int t, int r, int b) { 77 | View p = (View) getParent(); 78 | if (p != null) { 79 | int pw = p.getMeasuredWidth(); 80 | int ph = p.getMeasuredHeight(); 81 | int w = getMeasuredWidth(); 82 | int h = getMeasuredHeight(); 83 | t = (ph - h) / 2; 84 | l = (pw - w) / 2; 85 | r += l; 86 | b += t; 87 | } 88 | super.layout(l, t, r, b); 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /app/src/main/java/com/icechn/videorecorder/ui/RecordingActivity.java: -------------------------------------------------------------------------------- 1 | package com.icechn.videorecorder.ui; 2 | 3 | import android.content.Intent; 4 | import android.content.res.Configuration; 5 | import android.graphics.Rect; 6 | import android.graphics.SurfaceTexture; 7 | import android.hardware.Camera; 8 | import android.os.Bundle; 9 | import android.os.Environment; 10 | import android.os.Handler; 11 | import android.support.v7.app.AppCompatActivity; 12 | import android.util.Log; 13 | import android.view.TextureView; 14 | import android.view.View; 15 | import android.widget.Button; 16 | import android.widget.Toast; 17 | 18 | import com.icechn.videorecorder.R; 19 | import com.icechn.videorecorder.client.RecorderClient; 20 | import com.icechn.videorecorder.core.listener.IVideoChange; 21 | import com.icechn.videorecorder.filter.image.DrawMultiImageFilter; 22 | import com.icechn.videorecorder.filter.image.DrawMultiImageFilter.ImageDrawData; 23 | import com.icechn.videorecorder.filter.softaudiofilter.SetVolumeAudioFilter; 24 | import com.icechn.videorecorder.model.MediaConfig; 25 | import com.icechn.videorecorder.model.RecordConfig; 26 | import com.icechn.videorecorder.model.Size; 27 | 28 | import java.util.ArrayList; 29 | 30 | 31 | public class RecordingActivity extends AppCompatActivity implements 32 | TextureView.SurfaceTextureListener, View.OnClickListener, IVideoChange { 33 | public static final String IS_SQUARE = "is_square"; 34 | protected RecorderClient mRecorderClient; 35 | protected AspectTextureView mTextureView; 36 | protected Handler mainHander; 37 | protected Button btn_toggle; 38 | protected boolean started; 39 | protected String mSaveVideoPath = null; 40 | protected boolean mIsSquare = false; 41 | RecordConfig recordConfig; 42 | 43 | @Override 44 | protected void onCreate(Bundle savedInstanceState) { 45 | Intent i = getIntent(); 46 | mIsSquare = i.getBooleanExtra(IS_SQUARE, false); 47 | mSaveVideoPath = Environment.getExternalStorageDirectory().getPath() + "/live_save_video" + System.currentTimeMillis() + ".mp4"; 48 | started = false; 49 | super.onCreate(savedInstanceState); 50 | setContentView(R.layout.activity_streaming); 51 | mTextureView = (AspectTextureView) findViewById(R.id.preview_textureview); 52 | mTextureView.setKeepScreenOn(true); 53 | mTextureView.setSurfaceTextureListener(this); 54 | 55 | btn_toggle = (Button) findViewById(R.id.btn_toggle); 56 | btn_toggle.setOnClickListener(this); 57 | 58 | findViewById(R.id.btn_swap).setOnClickListener(this); 59 | findViewById(R.id.btn_flash).setOnClickListener(this); 60 | 61 | prepareStreamingClient(); 62 | onSetFilters(); 63 | } 64 | 65 | @Override 66 | protected void onResume() { 67 | super.onResume(); 68 | } 69 | 70 | @Override 71 | protected void onPause() { 72 | super.onPause(); 73 | } 74 | 75 | @Override 76 | protected void onDestroy() { 77 | if (mainHander != null) { 78 | mainHander.removeCallbacksAndMessages(null); 79 | } 80 | if (started) { 81 | mRecorderClient.stopRecording(); 82 | } 83 | if (mRecorderClient != null) { 84 | mRecorderClient.destroy(); 85 | } 86 | super.onDestroy(); 87 | } 88 | 89 | private void prepareStreamingClient() { 90 | mRecorderClient = new RecorderClient(); 91 | 92 | recordConfig = RecordConfig.obtain(); 93 | if (mIsSquare) { 94 | recordConfig.setTargetVideoSize(new Size(480, 480)); 95 | } else { 96 | recordConfig.setTargetVideoSize(new Size(640, 480)); 97 | } 98 | recordConfig.setSquare(true); 99 | recordConfig.setBitRate(750 * 1024); 100 | recordConfig.setVideoFPS(20); 101 | recordConfig.setVideoGOP(1); 102 | recordConfig.setRenderingMode(MediaConfig.Rending_Model_OpenGLES); 103 | //camera 104 | recordConfig.setDefaultCamera(Camera.CameraInfo.CAMERA_FACING_FRONT); 105 | int frontDirection, backDirection; 106 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 107 | Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_FRONT, cameraInfo); 108 | frontDirection = cameraInfo.orientation; 109 | Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_BACK, cameraInfo); 110 | backDirection = cameraInfo.orientation; 111 | if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) { 112 | recordConfig.setFrontCameraDirectionMode((frontDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90) | MediaConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL); 113 | recordConfig.setBackCameraDirectionMode((backDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270)); 114 | } else { 115 | recordConfig.setBackCameraDirectionMode((backDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180)); 116 | recordConfig.setFrontCameraDirectionMode((frontDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0) | MediaConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL); 117 | } 118 | //save video 119 | recordConfig.setSaveVideoPath(mSaveVideoPath); 120 | 121 | if (!mRecorderClient.prepare(this, recordConfig)) { 122 | mRecorderClient = null; 123 | Log.e("RecordingActivity", "prepare,failed!!"); 124 | Toast.makeText(this, "StreamingClient prepare failed", Toast.LENGTH_LONG).show(); 125 | finish(); 126 | return; 127 | } 128 | 129 | //resize textureview 130 | Size s = mRecorderClient.getVideoSize(); 131 | mTextureView.setAspectRatio(AspectTextureView.MODE_INSIDE, ((double) s.getWidth()) / s.getHeight()); 132 | 133 | mRecorderClient.setVideoChangeListener(this); 134 | 135 | mRecorderClient.setSoftAudioFilter(new SetVolumeAudioFilter()); 136 | } 137 | 138 | protected void onSetFilters() { 139 | ArrayList infos = new ArrayList<>(); 140 | ImageDrawData data = new ImageDrawData(); 141 | data.resId = R.drawable.t; 142 | data.rect = new Rect(100, 100, 238, 151); 143 | infos.add(data); 144 | mRecorderClient.setHardVideoFilter(new DrawMultiImageFilter(this, infos)); 145 | } 146 | 147 | @Override 148 | public void onVideoSizeChanged(int width, int height) { 149 | mTextureView.setAspectRatio(AspectTextureView.MODE_INSIDE, ((double) width) / height); 150 | } 151 | 152 | @Override 153 | public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 154 | if (mRecorderClient != null) { 155 | mRecorderClient.startPreview(surface, width, height); 156 | } 157 | } 158 | 159 | @Override 160 | public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { 161 | if (mRecorderClient != null) { 162 | mRecorderClient.updatePreview(width, height); 163 | } 164 | } 165 | 166 | @Override 167 | public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { 168 | if (mRecorderClient != null) { 169 | mRecorderClient.stopPreview(true); 170 | } 171 | return false; 172 | } 173 | 174 | @Override 175 | public void onSurfaceTextureUpdated(SurfaceTexture surface) { 176 | 177 | } 178 | 179 | @Override 180 | public void onClick(View v) { 181 | switch (v.getId()) { 182 | case R.id.btn_toggle: 183 | if (!started) { 184 | btn_toggle.setText("stop"); 185 | mRecorderClient.startRecording(); 186 | } else { 187 | btn_toggle.setText("start"); 188 | mRecorderClient.stopRecording(); 189 | Toast.makeText(RecordingActivity.this, "视频文件已保存至"+ mSaveVideoPath, Toast.LENGTH_SHORT).show(); 190 | } 191 | started = !started; 192 | break; 193 | case R.id.btn_swap: 194 | mRecorderClient.swapCamera(); 195 | findViewById(R.id.btn_flash).setVisibility(mRecorderClient.isFrontCamera() ? View.GONE : View.VISIBLE); 196 | break; 197 | case R.id.btn_flash: 198 | mRecorderClient.toggleFlashLight(); 199 | break; 200 | } 201 | } 202 | 203 | 204 | } 205 | -------------------------------------------------------------------------------- /app/src/main/res/drawable-xhdpi/nose_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/drawable-xhdpi/nose_0.png -------------------------------------------------------------------------------- /app/src/main/res/drawable-xhdpi/t.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/drawable-xhdpi/t.png -------------------------------------------------------------------------------- /app/src/main/res/drawable-xhdpi/teeth_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/drawable-xhdpi/teeth_0.png -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 13 | 18 |