├── .gitignore ├── .idea └── vcs.xml ├── LICENSE ├── README.md ├── build.gradle ├── demo ├── build.gradle └── src │ ├── androidTest │ └── java │ │ └── us │ │ └── pinguo │ │ └── svideoDemo │ │ ├── ApplicationTest.java │ │ ├── SurfaceTest.java │ │ ├── Test.java │ │ └── TimeStapTest.java │ ├── main │ ├── AndroidManifest.xml │ ├── java │ │ └── us │ │ │ └── pinguo │ │ │ └── svideoDemo │ │ │ ├── MainActivity.java │ │ │ ├── MyApplication.java │ │ │ ├── PreviewActivity.java │ │ │ ├── PreviewActivity2.java │ │ │ ├── SegYuvRecordActivity.java │ │ │ ├── TextureRecordActivity.java │ │ │ ├── YuvRecordActivity.java │ │ │ ├── mvp │ │ │ ├── Presenter.java │ │ │ └── ViewController.java │ │ │ ├── record │ │ │ ├── CameraPresenter.java │ │ │ └── IRecordView.java │ │ │ ├── texturerecord │ │ │ ├── RecordHelper.java │ │ │ ├── RenderThread.java │ │ │ ├── RenderThreadHandler.java │ │ │ └── gles │ │ │ │ ├── EglCore.java │ │ │ │ ├── EglSurface.java │ │ │ │ ├── GLRendering.java │ │ │ │ └── glUtils.java │ │ │ └── ui │ │ │ ├── BottomMenuView.java │ │ │ ├── BottomSegMenuView.java │ │ │ ├── IBottomMenuView.java │ │ │ ├── SVideoTouchController.java │ │ │ ├── SegProgressBar.java │ │ │ └── VideoProgressLayout.java │ └── res │ │ ├── drawable │ │ ├── ic_check_white_24dp.xml │ │ ├── ic_delete.xml │ │ ├── ic_movie_filter.xml │ │ ├── ic_switch_camera.xml │ │ ├── shutter_drawable.xml │ │ └── svideo_progress_drawable.xml │ │ ├── layout │ │ ├── activity_main.xml │ │ ├── activity_preview.xml │ │ ├── activity_segyuvrecord.xml │ │ ├── activity_texture_record.xml │ │ ├── activity_yuvrecord.xml │ │ ├── layout_bottom.xml │ │ └── layout_seg_bottom.xml │ │ ├── mipmap-hdpi │ │ └── ic_launcher.png │ │ ├── mipmap-mdpi │ │ └── ic_launcher.png │ │ ├── mipmap-xhdpi │ │ └── ic_launcher.png │ │ ├── mipmap-xxhdpi │ │ └── ic_launcher.png │ │ ├── mipmap-xxxhdpi │ │ └── ic_launcher.png │ │ ├── values-w820dp │ │ └── dimens.xml │ │ └── values │ │ ├── colors.xml │ │ ├── dimens.xml │ │ ├── strings.xml │ │ └── styles.xml │ └── test │ └── java │ └── us │ └── pinguo │ └── svideo │ └── ExampleUnitTest.java ├── gradle.properties ├── library ├── CMakeLists.txt ├── build.gradle └── src │ ├── androidTest │ └── java │ │ └── us │ │ └── pinguo │ │ └── svideo │ │ ├── ApplicationTest.java │ │ └── VideoAdapterTest.java │ ├── main │ ├── AndroidManifest.xml │ ├── cpp │ │ └── NSVUtil.cpp │ ├── java │ │ └── us │ │ │ └── pinguo │ │ │ └── svideo │ │ │ ├── bean │ │ │ └── VideoInfo.java │ │ │ ├── encoder │ │ │ ├── MediaAudioEncoder.java │ │ │ ├── MediaAudioEncoderApii16.java │ │ │ ├── MediaEncoder.java │ │ │ ├── MediaEncoderApi16.java │ │ │ ├── OnRecordProgressListener.java │ │ │ ├── VideoEncoderApi16.java │ │ │ ├── VideoEncoderApi21.java │ │ │ ├── VideoEncoderApi21Async.java │ │ │ ├── VideoEncoderFromBuffer.java │ │ │ ├── VideoMediaEncoderApi21Thread.java │ │ │ ├── VideoMediaEncoderThread.java │ │ │ ├── VideoMediaEncoderThreadApi16.java │ │ │ ├── VideoSurfaceEncoder.java │ │ │ ├── VideoSurfaceEncoderApi21.java │ │ │ ├── VideoSurfaceEncoderAsyncApi21.java │ │ │ └── VideoSurfaceEncoderController.java │ │ │ ├── interfaces │ │ │ ├── ICameraProxyForRecord.java │ │ │ ├── IReporter.java │ │ │ ├── ISVideoRecorder.java │ │ │ ├── IVideoPathGenerator.java │ │ │ ├── OnRecordListener.java │ │ │ ├── OnSurfaceCreatedCallback.java │ │ │ ├── PreviewDataCallback.java │ │ │ ├── PreviewSurfaceListener.java │ │ │ ├── SimpleRecordListener.java │ │ │ └── SurfaceCreatedCallback.java │ │ │ ├── recorder │ │ │ ├── OnRecordFailListener.java │ │ │ ├── RecordCancelException.java │ │ │ ├── RecordFailException.java │ │ │ ├── SAbsVideoRecorder.java │ │ │ ├── SMediaCodecRecorder.java │ │ │ ├── SSegmentRecorder.java │ │ │ ├── SSurfaceRecorder.java │ │ │ └── SVideoMediaRecorder.java │ │ │ └── utils │ │ │ ├── DateVideoNameGenerator.java │ │ │ ├── NSVUtil.java │ │ │ ├── RL.java │ │ │ ├── RecordSemaphore.java │ │ │ ├── SVideoUtil.java │ │ │ ├── SegVideoNameGenerator.java │ │ │ ├── TimeOutThread.java │ │ │ └── gles │ │ │ ├── Drawable2d.java │ │ │ ├── EglCore.java │ │ │ ├── EglRecordEnv.java │ │ │ ├── EglSurfaceBase.java │ │ │ ├── FlatShadedProgram.java │ │ │ ├── FullFrameRect.java │ │ │ ├── GeneratedTexture.java │ │ │ ├── GlUtil.java │ │ │ ├── OffscreenSurface.java │ │ │ ├── Sprite2d.java │ │ │ ├── Texture2dProgram.java │ │ │ ├── TextureMovieEncoder2.java │ │ │ ├── VideoEncoderCore.java │ │ │ └── WindowSurface.java │ ├── jniLibs │ │ ├── arm64-v8a │ │ │ └── libNSVUtil.so │ │ └── armeabi-v7a │ │ │ └── libNSVUtil.so │ └── res │ │ └── values │ │ └── strings.xml │ └── test │ └── java │ └── us │ └── pinguo │ └── svideo │ └── ExampleUnitTest.java ├── publish.sh ├── publish_init.sh ├── readme ├── ISVideoRecorder.png ├── demo1.png └── demo2.png ├── releaseNote ├── 1.5.3-SNAPSHOT.txt ├── 1.5.3-SNAP_SHOT[D[D[D[[3~[3~[[3~[3~[3~[3~[3~[C[C[CSHOT.txt └── 1.5.3.txt └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | # Built application files 2 | *.apk 3 | *.ap_ 4 | 5 | # Files for the ART/Dalvik VM 6 | *.dex 7 | 8 | # Java class files 9 | *.class 10 | 11 | # Generated files 12 | bin/ 13 | gen/ 14 | out/ 15 | 16 | # Gradle files 17 | .gradle/ 18 | build/ 19 | 20 | # Local configuration file (sdk path, etc) 21 | local.properties 22 | 23 | # Proguard folder generated by Eclipse 24 | proguard/ 25 | 26 | # Log Files 27 | *.log 28 | 29 | # Android Studio Navigation editor temp files 30 | .navigation/ 31 | 32 | # Android Studio captures folder 33 | captures/ 34 | 35 | # Intellij 36 | *.iml 37 | .idea/workspace.xml 38 | 39 | # Keystore files 40 | *.jks 41 | .idea/ 42 | gradle/ 43 | gradlew 44 | gradlew.bat 45 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SVideoRecorder 2 | 3 | From [Camera360](https://play.google.com/store/apps/details?id=vStudio.Android.Camera360) 4 | 5 | [SVideoRecorder](https://github.com/yellowcath/SVideoRecorder)使用Android原生的MediaCodec进行视频录制,对比大量使用FFmpeg进行录制的库优点如下: 6 | 7 | - **体积小** :编译后的aar只有187K,ffmpeg一个so就7、8M,精简之后也差不多还有一半大小 8 | - **速度快** :在huaweiP9上,720P的一帧 9 | 10 | FFmpeg编码时间:50~60ms 11 | 12 | MediaCodec(YUV)编码时间:20~25ms 13 | 14 | MediaCodec(Surface)编码时间:10~15ms 15 | 16 | - **CPU占用低** :ffmpeg录制时占用CPU低端机明显卡顿,MediaCodec录制时几乎无影响 17 | 18 | 缺点是只支持Android4.3+(Android4.1和4.2已有MediaCodec,但是官方不保证可用) 19 | 20 | ## 使用 21 | 22 | SVideoRecorder需要如下权限 23 | 24 | > <uses-permission android:name="android.permission.RECORD_AUDIO" /> 25 | 26 | ## Gradle 27 | 28 | 在根目录下的build.gradle里添加maven仓库 29 | ``` groovy 30 | allprojects { 31 | repositories { 32 | ... 33 | maven { url 'https://www.jitpack.io' } 34 | } 35 | } 36 | ``` 37 | 38 | 添加依赖 39 | ``` groovy 40 | dependencies { 41 | implementation 'com.github.yellowcath:SVideoRecorder:2.0.4' 42 | } 43 | ``` 44 | 45 | ------------------- 46 | [TOC] 47 | 48 | ## 功能简介 49 | 1、录制相机原始视频(YUV) 50 | 51 | 2、录制Surface,用户可自行在相机原始预览数据上添加滤镜、贴纸等特效,再直接录制下来 52 | 53 | 3、支持分段录制 54 | 55 | 4、支持分段录制时进行回退 56 | 57 | ## 使用 58 | 主要类图如下 59 | ![enter image description here](https://github.com/yellowcath/SVideoRecorder/raw/develop-git/readme/ISVideoRecorder.png) 60 | SMediaCodecRecorder:接收YUV数据进行录制 61 | 62 | SSurfaceRecorder:提供一个Surface,录制绘制到该Surface上的图像数据 63 | 64 | SSegmentRecorder:对上述两个类进行包装,扩展出分段录制的能力 65 | 66 | ### 初始化 67 | ``` java 68 | //实现ICameraProxyForRecord接口,提供预览参数 69 | ICameraProxyForRecord cameraProxyForRecord = new ICameraProxyForRecord() { 70 | @Override 71 | public void addSurfaceDataListener(PreviewSurfaceListener listener, SurfaceCreatedCallback callback) { 72 | //SSurfaceRecorder调用 73 | RecordHelper.setPreviewSurfaceListener(previewSurfaceListener, surfaceCreatedCallback); 74 | } 75 | @Override 76 | public void removeSurfaceDataListener(PreviewSurfaceListener listener) { 77 | //SSurfaceRecorder调用 78 | RecordHelper.setPreviewSurfaceListener(null, null); 79 | } 80 | @Override 81 | public void addPreviewDataCallback(PreviewDataCallback callback) { 82 | //SMediaCodecRecorder调用 83 | mCallback = callback; 84 | } 85 | @Override 86 | public void removePreviewDataCallback(PreviewDataCallback callback) { 87 | //SMediaCodecRecorder调用 88 | mCallback = null; 89 | } 90 | @Override 91 | public int getPreviewWidth() { 92 | return mPreviewSize.width; 93 | } 94 | @Override 95 | public int getPreviewHeight() { 96 | return mPreviewSize.height; 97 | } 98 | @Override 99 | public int getVideoRotation() { 100 | return mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK ? 90 : 270; 101 | } 102 | }; 103 | mRecorder = new SMediaCodecRecorder(this, cameraProxyForRecord); 104 | mRecorder.addRecordListener(this); 105 | ``` 106 | ### 数据帧来源 107 | SMediaCodecRecorder 108 | ``` java 109 | @Override 110 | public void onPreviewFrame(byte[] data, Camera camera) { 111 | if (mCallback != null) { 112 | long timeUs = System.nanoTime() / 1000; 113 | mCallback.onPreviewData(data, timeUs); 114 | } 115 | } 116 | ``` 117 | SSurfaceRecorder 118 | > Demo里提供两种方式(详见RecordHelper.java): 119 | 120 | > 1、 drawBlitFrameBuffer,将预览界面的图像数据直接拷贝到MediaCodec的Surface里,要求GLES3.0,部分老机型可能支持不太好 121 | 122 | > 2、drawBlit2X,直接将预览界面的图像数据重复绘制一次到MediaCodec的Surface,考虑到性能问题,这里需要使用FBO 123 | 124 | ### 调用 125 | ``` java 126 | //开始录制 127 | mRecorder.startRecord(); 128 | //结束录制,成功后回调OnRecordSuccess 129 | mRecorder.stopRecord(); 130 | //暂停录制,只用于SSegmentRecorder 131 | mRecorder.pauseRecord(); 132 | //恢复录制,只用于SSegmentRecorder 133 | mRecorder.resumeRecord(); 134 | //取消,回调OnRecordFail() 135 | mRecorder.cancelRecord(); 136 | ``` 137 | 138 | ## Demo 139 | 140 | 1、正常录YUV格式视频 141 | 142 | 2、分段录YUV格式视频 143 | 144 | 3、分段录带特效视频(Surface) 145 | 146 | ![enter image description here](https://github.com/yellowcath/SVideoRecorder/raw/develop-git/readme/demo1.png) 147 | ![enter image description here](https://github.com/yellowcath/SVideoRecorder/raw/develop-git/readme/demo2.png) 148 | 149 | 150 | 151 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | // Top-level build file where you can add configuration options common to all sub-projects/modules. 2 | 3 | buildscript { 4 | repositories { 5 | jcenter() 6 | google() 7 | maven { url 'https://www.jitpack.io' } 8 | } 9 | dependencies { 10 | classpath 'com.android.tools.build:gradle:3.0.0' 11 | classpath 'com.github.dcendents:android-maven-gradle-plugin:2.0' 12 | // NOTE: Do not place your application dependencies here; they belong 13 | // in the individual module build.gradle files 14 | } 15 | } 16 | 17 | allprojects { 18 | repositories { 19 | maven { url 'https://www.jitpack.io' } 20 | jcenter() 21 | google() 22 | } 23 | } 24 | 25 | task clean(type: Delete) { 26 | delete rootProject.buildDir 27 | } 28 | -------------------------------------------------------------------------------- /demo/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 26 5 | buildToolsVersion "26.0.2" 6 | sourceSets { 7 | main { 8 | jniLibs.srcDirs = ['libs'] 9 | } 10 | } 11 | defaultConfig { 12 | applicationId "us.pinguo.svideo" 13 | minSdkVersion 18 14 | targetSdkVersion 23 15 | versionCode 1 16 | versionName "1.0" 17 | vectorDrawables.useSupportLibrary true 18 | } 19 | buildTypes { 20 | release { 21 | minifyEnabled false 22 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 23 | } 24 | } 25 | } 26 | 27 | dependencies { 28 | implementation fileTree(include: ['*.jar'], dir: 'libs') 29 | testCompile 'junit:junit:4.12' 30 | implementation 'com.android.support:support-v4:26.1.0' 31 | implementation project(':library') 32 | // implementation 'com.github.yellowcath:SVideoRecorder:2.0.1' 33 | implementation 'com.android.support:appcompat-v7:26.1.0' 34 | implementation 'pub.devrel:easypermissions:1.3.0' 35 | } 36 | 37 | 38 | -------------------------------------------------------------------------------- /demo/src/androidTest/java/us/pinguo/svideoDemo/ApplicationTest.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo; 2 | 3 | import android.annotation.TargetApi; 4 | import android.app.Application; 5 | import android.media.MediaCodec; 6 | import android.media.MediaExtractor; 7 | import android.media.MediaFormat; 8 | import android.os.Build; 9 | import android.test.ApplicationTestCase; 10 | import android.util.Log; 11 | 12 | import java.io.File; 13 | import java.io.FileInputStream; 14 | import java.io.IOException; 15 | import java.nio.ByteBuffer; 16 | 17 | /** 18 | * Testing Fundamentals 19 | */ 20 | public class ApplicationTest extends ApplicationTestCase { 21 | private static final String TAG = "hwLog"; 22 | 23 | public ApplicationTest() { 24 | super(Application.class); 25 | } 26 | 27 | @TargetApi(Build.VERSION_CODES.JELLY_BEAN) 28 | public void test() { 29 | /** 用来解码 */ 30 | MediaCodec mMediaCodec = null; 31 | /** 用来读取音频文件 */ 32 | MediaExtractor extractor; 33 | MediaFormat format = null; 34 | String mime = null; 35 | int sampleRate = 0, channels = 0, bitrate = 0; 36 | long presentationTimeUs = 0, duration = 0; 37 | String uri = "/mnt/sdcard2/C360VID_20160524_141411.mp4"; 38 | extractor = new MediaExtractor(); 39 | // 根据路径获取源文件 40 | try { 41 | extractor.setDataSource(new FileInputStream(new File(uri)).getFD()); 42 | } catch (Exception e) { 43 | Log.e(TAG, " 设置文件路径错误" + e.getMessage()); 44 | } 45 | try { 46 | // 音频文件信息 47 | format = extractor.getTrackFormat(1); 48 | mime = format.getString(MediaFormat.KEY_MIME); 49 | sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); 50 | // 声道个数:单声道或双声道 51 | channels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); 52 | // if duration is 0, we are probably playing a live stream 53 | duration = format.getLong(MediaFormat.KEY_DURATION); 54 | // System.out.println("歌曲总时间秒:"+duration/1000000); 55 | bitrate = format.getInteger(MediaFormat.KEY_BIT_RATE); 56 | } catch (Exception e) { 57 | Log.e(TAG, "音频文件信息读取出错:" + e.getMessage()); 58 | // 不要退出,下面进行判断 59 | } 60 | Log.d(TAG, "Track info: mime:" + mime + " 采样率sampleRate:" + sampleRate + " channels:" + channels + " bitrate:" 61 | + bitrate + " duration:" + duration); 62 | if (format == null || !mime.startsWith("video/")) { 63 | Log.e(TAG, "不是视频文件 end !"); 64 | return; 65 | } 66 | // 实例化一个指定类型的解码器,提供数据输出 67 | // Instantiate an encoder supporting output data of the given mime type 68 | try { 69 | mMediaCodec = MediaCodec.createDecoderByType(mime); 70 | } catch (IOException e) { 71 | e.printStackTrace(); 72 | } 73 | 74 | if (mMediaCodec == null) { 75 | Log.e(TAG, "创建解码器失败!"); 76 | return; 77 | } 78 | mMediaCodec.configure(format, null, null, 0); 79 | 80 | mMediaCodec.start(); 81 | // 用来存放目标文件的数据 82 | ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); 83 | // 解码后的数据 84 | ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers(); 85 | extractor.selectTrack(1); 86 | // ==========开始解码============= 87 | boolean sawInputEOS = false; 88 | boolean sawOutputEOS = false; 89 | final long kTimeOutUs = 10; 90 | MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 91 | while (!sawOutputEOS) { 92 | try { 93 | if (!sawInputEOS) { 94 | int inputBufIndex = mMediaCodec.dequeueInputBuffer(kTimeOutUs); 95 | if (inputBufIndex >= 0) { 96 | ByteBuffer dstBuf = inputBuffers[inputBufIndex]; 97 | 98 | int sampleSize = extractor.readSampleData(dstBuf, 0); 99 | if (sampleSize < 0) { 100 | Log.d(TAG, "saw input EOS. Stopping playback"); 101 | sawInputEOS = true; 102 | sampleSize = 0; 103 | } else { 104 | presentationTimeUs = extractor.getSampleTime(); 105 | Log.i(TAG, "presentationTimeUs:" + presentationTimeUs); 106 | } 107 | 108 | mMediaCodec.queueInputBuffer(inputBufIndex, 0, sampleSize, presentationTimeUs, 109 | sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); 110 | 111 | if (!sawInputEOS) { 112 | extractor.advance(); 113 | } 114 | 115 | } else { 116 | Log.e(TAG, "inputBufIndex " + inputBufIndex); 117 | } 118 | } // !sawInputEOS 119 | 120 | // decode to PCM and push it to the AudioTrack player 121 | int res = mMediaCodec.dequeueOutputBuffer(info, kTimeOutUs); 122 | 123 | if (res >= 0) { 124 | int outputBufIndex = res; 125 | ByteBuffer buf = outputBuffers[outputBufIndex]; 126 | final byte[] chunk = new byte[info.size]; 127 | buf.get(chunk); 128 | buf.clear(); 129 | if (chunk.length > 0) { 130 | 131 | // chunk解码后的音频流 132 | // TODO:处理... 133 | } 134 | mMediaCodec.releaseOutputBuffer(outputBufIndex, false); 135 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 136 | Log.d(TAG, "saw output EOS."); 137 | sawOutputEOS = true; 138 | } 139 | 140 | } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 141 | outputBuffers = mMediaCodec.getOutputBuffers(); 142 | Log.w(TAG, "[AudioDecoder]output buffers have changed."); 143 | } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 144 | MediaFormat oformat = mMediaCodec.getOutputFormat(); 145 | Log.w(TAG, "[AudioDecoder]output format has changed to " + oformat); 146 | } else { 147 | Log.w(TAG, "[AudioDecoder] dequeueOutputBuffer returned " + res); 148 | } 149 | 150 | } catch (RuntimeException e) { 151 | Log.e(TAG, "[decodeMP3] error:" + e.getMessage()); 152 | } 153 | } 154 | // ================================================================================= 155 | if (mMediaCodec != null) { 156 | mMediaCodec.stop(); 157 | mMediaCodec.release(); 158 | mMediaCodec = null; 159 | } 160 | if (extractor != null) { 161 | extractor.release(); 162 | extractor = null; 163 | } 164 | // clear source and the other globals 165 | duration = 0; 166 | mime = null; 167 | sampleRate = 0; 168 | channels = 0; 169 | bitrate = 0; 170 | presentationTimeUs = 0; 171 | duration = 0; 172 | } 173 | } -------------------------------------------------------------------------------- /demo/src/androidTest/java/us/pinguo/svideoDemo/SurfaceTest.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo; 2 | 3 | import android.annotation.TargetApi; 4 | import android.media.MediaCodec; 5 | import android.media.MediaCodecInfo; 6 | import android.media.MediaFormat; 7 | import android.opengl.GLES20; 8 | import android.os.Build; 9 | import android.view.Surface; 10 | import us.pinguo.svideo.utils.gles.EglRecordEnv; 11 | 12 | import java.io.IOException; 13 | 14 | /** 15 | * Created by huangwei on 2016/5/16. 16 | */ 17 | public class SurfaceTest extends ApplicationTest { 18 | public static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video 19 | private MediaCodec mMediaCodec = null; 20 | private int mWidth = 480; 21 | private int mHeight = 640; 22 | private Surface mInputSurface; 23 | 24 | public void test() { 25 | initInThread(); 26 | { 27 | EglRecordEnv eglRecordEnv = new EglRecordEnv(mInputSurface, null, false); 28 | eglRecordEnv.makeCurrent(); 29 | GLES20.glClearColor(1f, 0f, 0f, 1f); 30 | eglRecordEnv.swapBuffers(); 31 | eglRecordEnv.release(true); 32 | } 33 | { 34 | mMediaCodec.stop(); 35 | Surface surface = mMediaCodec.createInputSurface(); 36 | mMediaCodec.start(); 37 | EglRecordEnv eglRecordEnv = new EglRecordEnv(surface, null, false); 38 | eglRecordEnv.makeCurrent(); 39 | GLES20.glClearColor(1f, 0f, 0f, 1f); 40 | eglRecordEnv.swapBuffers(); 41 | eglRecordEnv.release(true); 42 | } 43 | System.out.println("asdasd"); 44 | } 45 | 46 | @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2) 47 | public void initInThread() { 48 | MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, 49 | this.mWidth, this.mHeight); 50 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1000000); 51 | mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 24); 52 | mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, 53 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 54 | mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 55 | 5); 56 | 57 | try { 58 | mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); 59 | mMediaCodec.configure(mediaFormat, null, null, 60 | MediaCodec.CONFIGURE_FLAG_ENCODE); 61 | mInputSurface = mMediaCodec.createInputSurface(); 62 | mMediaCodec.start(); 63 | } catch (IOException e) { 64 | } finally { 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /demo/src/androidTest/java/us/pinguo/svideoDemo/Test.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo; 2 | 3 | /** 4 | * Created by huangwei on 2016/5/25. 5 | */ 6 | public class Test extends ApplicationTest { 7 | public void test() { 8 | try { 9 | throw new RuntimeException("asdasd"); 10 | } catch (Exception e) { 11 | e.printStackTrace(); 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /demo/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/MainActivity.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo; 2 | 3 | import android.Manifest; 4 | import android.content.Intent; 5 | import android.os.Bundle; 6 | import android.support.annotation.Nullable; 7 | import android.support.v7.app.AppCompatActivity; 8 | import android.view.View; 9 | import pub.devrel.easypermissions.EasyPermissions; 10 | 11 | /** 12 | * Created by huangwei on 2018/8/16 0016. 13 | */ 14 | public class MainActivity extends AppCompatActivity implements View.OnClickListener { 15 | @Override 16 | protected void onCreate(@Nullable Bundle savedInstanceState) { 17 | super.onCreate(savedInstanceState); 18 | setContentView(R.layout.activity_main); 19 | 20 | findViewById(R.id.texture_record).setOnClickListener(this); 21 | findViewById(R.id.yuv_record).setOnClickListener(this); 22 | findViewById(R.id.yuv_record_subsection).setOnClickListener(this); 23 | 24 | requestPermission(); 25 | } 26 | 27 | private void requestPermission(){ 28 | String[] perms = {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO,Manifest.permission.WRITE_EXTERNAL_STORAGE}; 29 | if (EasyPermissions.hasPermissions(this, perms)) { 30 | // Already have permission, do the thing 31 | // ... 32 | } else { 33 | // Do not have permissions, request them now 34 | EasyPermissions.requestPermissions(this, "This Demo needs Camera & Audio & Write SDCard Permissions", 35 | 123, perms); 36 | } 37 | } 38 | @Override 39 | public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { 40 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 41 | 42 | // Forward results to EasyPermissions 43 | EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this); 44 | } 45 | @Override 46 | public void onClick(View v) { 47 | Intent intent = new Intent(); 48 | if (v.getId() == R.id.texture_record) { 49 | intent.setClass(this, TextureRecordActivity.class); 50 | } else if (v.getId() == R.id.yuv_record) { 51 | intent.setClass(this, YuvRecordActivity.class); 52 | } else if (v.getId() == R.id.yuv_record_subsection) { 53 | intent.setClass(this, SegYuvRecordActivity.class); 54 | } 55 | startActivity(intent); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/MyApplication.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo; 2 | 3 | import android.app.Application; 4 | import android.content.Context; 5 | import us.pinguo.svideo.utils.RL; 6 | 7 | /** 8 | * Created by huangwei on 2016/5/16. 9 | */ 10 | public class MyApplication extends Application { 11 | private static Context sAppContext; 12 | public static Context getAppContext(){ 13 | return sAppContext; 14 | } 15 | 16 | @Override 17 | public void onCreate() { 18 | super.onCreate(); 19 | RL.setLogEnable(true); 20 | sAppContext = getApplicationContext(); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/PreviewActivity.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo; 2 | 3 | import android.annotation.TargetApi; 4 | import android.media.MediaExtractor; 5 | import android.media.MediaFormat; 6 | import android.media.MediaMetadataRetriever; 7 | import android.media.MediaPlayer; 8 | import android.net.Uri; 9 | import android.os.Bundle; 10 | import android.support.annotation.Nullable; 11 | import android.support.v7.app.AppCompatActivity; 12 | import android.view.MenuItem; 13 | import android.widget.SeekBar; 14 | import android.widget.TextView; 15 | import android.widget.VideoView; 16 | import us.pinguo.svideoDemo.R; 17 | 18 | /** 19 | * Created by Bhuvnesh on 08-03-2017. 20 | */ 21 | @TargetApi(16) 22 | public class PreviewActivity extends AppCompatActivity { 23 | private VideoView videoView; 24 | private SeekBar seekBar; 25 | private int stopPosition; 26 | private static final String POSITION = "position"; 27 | static final String FILEPATH = "filepath"; 28 | 29 | @Override 30 | protected void onCreate(@Nullable Bundle savedInstanceState) { 31 | super.onCreate(savedInstanceState); 32 | setContentView(R.layout.activity_preview); 33 | getSupportActionBar().setDisplayHomeAsUpEnabled(true); 34 | getSupportActionBar().setDisplayShowHomeEnabled(true); 35 | videoView = (VideoView) findViewById(R.id.videoView); 36 | seekBar = (SeekBar) findViewById(R.id.seekBar); 37 | 38 | TextView tvInstruction = (TextView) findViewById(R.id.tvInstruction); 39 | String filePath = getIntent().getStringExtra(FILEPATH); 40 | 41 | String videoInfo = ""; 42 | try { 43 | MediaMetadataRetriever retriever = new MediaMetadataRetriever(); 44 | retriever.setDataSource(filePath); 45 | int bitrate = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)); 46 | retriever.release(); 47 | MediaExtractor extractor = new MediaExtractor(); 48 | extractor.setDataSource(filePath); 49 | MediaFormat format = extractor.getTrackFormat(selectTrack(extractor, false)); 50 | int frameRate = format.containsKey(MediaFormat.KEY_FRAME_RATE) ? format.getInteger(MediaFormat.KEY_FRAME_RATE) : -1; 51 | int width = format.getInteger(MediaFormat.KEY_WIDTH); 52 | int height = format.getInteger(MediaFormat.KEY_HEIGHT); 53 | int rotation = format.containsKey(MediaFormat.KEY_ROTATION) ? format.getInteger(MediaFormat.KEY_ROTATION) : -1; 54 | long duration = format.containsKey(MediaFormat.KEY_DURATION) ? format.getLong(MediaFormat.KEY_DURATION) : -1; 55 | videoInfo = String.format("size:%dX%d,framerate:%d,rotation:%d,bitrate:%d,duration:%.1fs", width, height, frameRate, rotation, bitrate, 56 | duration / 1000f / 1000f); 57 | extractor.release(); 58 | } catch (Exception e) { 59 | e.printStackTrace(); 60 | } 61 | tvInstruction.setText("Video stored at path " + filePath + "\n" + videoInfo); 62 | videoView.setVideoURI(Uri.parse(filePath)); 63 | videoView.start(); 64 | 65 | 66 | videoView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { 67 | 68 | @Override 69 | public void onPrepared(MediaPlayer mp) { 70 | mp.setLooping(true); 71 | seekBar.setMax(videoView.getDuration()); 72 | seekBar.postDelayed(onEverySecond, 1000); 73 | } 74 | }); 75 | seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { 76 | 77 | @Override 78 | public void onStopTrackingTouch(SeekBar seekBar) { 79 | } 80 | 81 | @Override 82 | public void onStartTrackingTouch(SeekBar seekBar) { 83 | } 84 | 85 | @Override 86 | public void onProgressChanged(SeekBar seekBar, int progress, 87 | boolean fromUser) { 88 | 89 | if (fromUser) { 90 | // this is when actually seekbar has been seeked to a new position 91 | videoView.seekTo(progress); 92 | } 93 | } 94 | }); 95 | 96 | 97 | } 98 | 99 | @Override 100 | protected void onPause() { 101 | super.onPause(); 102 | stopPosition = videoView.getCurrentPosition(); //stopPosition is an int 103 | videoView.pause(); 104 | } 105 | 106 | @Override 107 | protected void onResume() { 108 | super.onResume(); 109 | videoView.seekTo(stopPosition); 110 | videoView.start(); 111 | } 112 | 113 | private Runnable onEverySecond = new Runnable() { 114 | 115 | @Override 116 | public void run() { 117 | 118 | if (seekBar != null) { 119 | seekBar.setProgress(videoView.getCurrentPosition()); 120 | } 121 | 122 | if (videoView.isPlaying()) { 123 | seekBar.postDelayed(onEverySecond, 1000); 124 | } 125 | 126 | } 127 | }; 128 | 129 | @Override 130 | public boolean onOptionsItemSelected(MenuItem item) { 131 | // handle arrow click here 132 | if (item.getItemId() == android.R.id.home) { 133 | finish(); // close this activity and return to preview activity (if there is any) 134 | } 135 | 136 | return super.onOptionsItemSelected(item); 137 | } 138 | 139 | public static int selectTrack(MediaExtractor extractor, boolean audio) { 140 | int numTracks = extractor.getTrackCount(); 141 | for (int i = 0; i < numTracks; i++) { 142 | MediaFormat format = extractor.getTrackFormat(i); 143 | String mime = format.getString(MediaFormat.KEY_MIME); 144 | if (audio) { 145 | if (mime.startsWith("audio/")) { 146 | return i; 147 | } 148 | } else { 149 | if (mime.startsWith("video/")) { 150 | return i; 151 | } 152 | } 153 | } 154 | return -5; 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/PreviewActivity2.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo; 2 | 3 | import android.annotation.TargetApi; 4 | import android.media.MediaExtractor; 5 | import android.media.MediaFormat; 6 | import android.media.MediaMetadataRetriever; 7 | import android.media.MediaPlayer; 8 | import android.net.Uri; 9 | import android.os.Bundle; 10 | import android.support.annotation.Nullable; 11 | import android.support.v7.app.AppCompatActivity; 12 | import android.view.MenuItem; 13 | import android.widget.SeekBar; 14 | import android.widget.TextView; 15 | import android.widget.VideoView; 16 | import us.pinguo.svideo.bean.VideoInfo; 17 | import us.pinguo.svideo.interfaces.OnRecordListener; 18 | 19 | /** 20 | * Created by Bhuvnesh on 08-03-2017. 21 | */ 22 | @TargetApi(16) 23 | public class PreviewActivity2 extends AppCompatActivity { 24 | private VideoView videoView; 25 | private SeekBar seekBar; 26 | private int stopPosition; 27 | private static final String POSITION = "position"; 28 | static final String FILEPATH = "filepath"; 29 | 30 | @Override 31 | protected void onCreate(@Nullable Bundle savedInstanceState) { 32 | super.onCreate(savedInstanceState); 33 | setContentView(R.layout.activity_preview); 34 | getSupportActionBar().setDisplayHomeAsUpEnabled(true); 35 | getSupportActionBar().setDisplayShowHomeEnabled(true); 36 | videoView = (VideoView) findViewById(R.id.videoView); 37 | seekBar = (SeekBar) findViewById(R.id.seekBar); 38 | 39 | final TextView tvInstruction = (TextView) findViewById(R.id.tvInstruction); 40 | 41 | TextureRecordActivity.mRecorder.waitRecordSuccess(new OnRecordListener() { 42 | @Override 43 | public void onRecordSuccess(VideoInfo videoInfo) { 44 | videoView.setVideoURI(Uri.parse(videoInfo.getVideoPath())); 45 | videoView.start(); 46 | String filePath = videoInfo.getVideoPath(); 47 | 48 | String videoInfoStr = ""; 49 | try { 50 | MediaMetadataRetriever retriever = new MediaMetadataRetriever(); 51 | retriever.setDataSource(filePath); 52 | int bitrate = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)); 53 | retriever.release(); 54 | int frameRate = videoInfo.getFrameRate(); 55 | int width = videoInfo.getVideoWidth(); 56 | int height = videoInfo.getVideoHeight(); 57 | int rotation = videoInfo.getVideoRotation(); 58 | long duration = videoInfo.getDuration(); 59 | videoInfoStr = String.format("size:%dX%d,framerate:%d,rotation:%d,bitrate:%d,duration:%.1fs", width, height, frameRate, rotation, bitrate, 60 | duration / 1000f); 61 | } catch (Exception e) { 62 | e.printStackTrace(); 63 | } 64 | tvInstruction.setText("Video stored at path " + filePath + "\n" + videoInfoStr); 65 | } 66 | 67 | @Override 68 | public void onRecordStart() { 69 | 70 | } 71 | 72 | @Override 73 | public void onRecordFail(Throwable t) { 74 | 75 | } 76 | 77 | @Override 78 | public void onRecordStop() { 79 | 80 | } 81 | 82 | @Override 83 | public void onRecordPause() { 84 | 85 | } 86 | 87 | @Override 88 | public void onRecordResume() { 89 | 90 | } 91 | }); 92 | 93 | videoView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { 94 | 95 | @Override 96 | public void onPrepared(MediaPlayer mp) { 97 | mp.setLooping(true); 98 | seekBar.setMax(videoView.getDuration()); 99 | seekBar.postDelayed(onEverySecond, 1000); 100 | } 101 | }); 102 | seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { 103 | 104 | @Override 105 | public void onStopTrackingTouch(SeekBar seekBar) { 106 | } 107 | 108 | @Override 109 | public void onStartTrackingTouch(SeekBar seekBar) { 110 | } 111 | 112 | @Override 113 | public void onProgressChanged(SeekBar seekBar, int progress, 114 | boolean fromUser) { 115 | 116 | if (fromUser) { 117 | // this is when actually seekbar has been seeked to a new position 118 | videoView.seekTo(progress); 119 | } 120 | } 121 | }); 122 | 123 | 124 | } 125 | 126 | @Override 127 | protected void onPause() { 128 | super.onPause(); 129 | stopPosition = videoView.getCurrentPosition(); //stopPosition is an int 130 | videoView.pause(); 131 | } 132 | 133 | @Override 134 | protected void onResume() { 135 | super.onResume(); 136 | videoView.seekTo(stopPosition); 137 | videoView.start(); 138 | } 139 | 140 | private Runnable onEverySecond = new Runnable() { 141 | 142 | @Override 143 | public void run() { 144 | 145 | if (seekBar != null) { 146 | seekBar.setProgress(videoView.getCurrentPosition()); 147 | } 148 | 149 | if (videoView.isPlaying()) { 150 | seekBar.postDelayed(onEverySecond, 1000); 151 | } 152 | 153 | } 154 | }; 155 | 156 | @Override 157 | public boolean onOptionsItemSelected(MenuItem item) { 158 | // handle arrow click here 159 | if (item.getItemId() == android.R.id.home) { 160 | finish(); // close this activity and return to preview activity (if there is any) 161 | } 162 | 163 | return super.onOptionsItemSelected(item); 164 | } 165 | 166 | public static int selectTrack(MediaExtractor extractor, boolean audio) { 167 | int numTracks = extractor.getTrackCount(); 168 | for (int i = 0; i < numTracks; i++) { 169 | MediaFormat format = extractor.getTrackFormat(i); 170 | String mime = format.getString(MediaFormat.KEY_MIME); 171 | if (audio) { 172 | if (mime.startsWith("audio/")) { 173 | return i; 174 | } 175 | } else { 176 | if (mime.startsWith("video/")) { 177 | return i; 178 | } 179 | } 180 | } 181 | return -5; 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/TextureRecordActivity.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo; 2 | 3 | import android.app.Activity; 4 | import android.content.Intent; 5 | import android.os.Bundle; 6 | import android.view.SurfaceView; 7 | import android.view.View; 8 | import android.widget.ImageView; 9 | import us.pinguo.svideo.bean.VideoInfo; 10 | import us.pinguo.svideo.interfaces.ICameraProxyForRecord; 11 | import us.pinguo.svideo.interfaces.ISVideoRecorder; 12 | import us.pinguo.svideo.interfaces.OnRecordListener; 13 | import us.pinguo.svideo.interfaces.PreviewDataCallback; 14 | import us.pinguo.svideo.interfaces.PreviewSurfaceListener; 15 | import us.pinguo.svideo.interfaces.SurfaceCreatedCallback; 16 | import us.pinguo.svideo.recorder.SSegmentRecorder; 17 | import us.pinguo.svideo.recorder.SSurfaceRecorder; 18 | import us.pinguo.svideo.utils.RL; 19 | import us.pinguo.svideoDemo.record.CameraPresenter; 20 | import us.pinguo.svideoDemo.record.IRecordView; 21 | import us.pinguo.svideoDemo.texturerecord.RecordHelper; 22 | import us.pinguo.svideoDemo.ui.BottomSegMenuView; 23 | import us.pinguo.svideoDemo.ui.IBottomMenuView; 24 | 25 | /** 26 | * Created by huangwei on 2016/1/25. 27 | */ 28 | public class TextureRecordActivity extends Activity implements IRecordView, View.OnClickListener, OnRecordListener, IBottomMenuView { 29 | 30 | private SurfaceView mSurfaceView; 31 | static SSegmentRecorder mRecorder; 32 | private CameraPresenter mCameraPresenter; 33 | private BottomSegMenuView mBottomMenuView; 34 | private ImageView mFilterImg; 35 | 36 | @Override 37 | protected void onCreate(Bundle savedInstanceState) { 38 | super.onCreate(savedInstanceState); 39 | setContentView(R.layout.activity_texture_record); 40 | 41 | mCameraPresenter = new CameraPresenter(); 42 | mCameraPresenter.attachView(this); 43 | 44 | mFilterImg = findViewById(R.id.movie_filter); 45 | mFilterImg.setOnClickListener(this); 46 | mSurfaceView = (SurfaceView) findViewById(R.id.surfaceview); 47 | mSurfaceView.getHolder().addCallback(mCameraPresenter); 48 | 49 | SSurfaceRecorder recorder = initVideoRecorder(); 50 | mRecorder = new SSegmentRecorder(getApplicationContext(), recorder); 51 | mRecorder.addRecordListener(this); 52 | 53 | mBottomMenuView = findViewById(R.id.record_bottom_layout); 54 | mBottomMenuView.setBottomViewCallBack(this); 55 | mBottomMenuView.enableSVideoTouch(true); 56 | mBottomMenuView.enableVideoProgressLayout(); 57 | } 58 | 59 | private SSurfaceRecorder initVideoRecorder() { 60 | SSurfaceRecorder.MEDIACODEC_API21_ENABLE = false; 61 | SSurfaceRecorder.MEDIACODEC_API21_ASYNC_ENABLE = true; 62 | ICameraProxyForRecord cameraProxyForRecord = new ICameraProxyForRecord() { 63 | 64 | 65 | @Override 66 | public void addSurfaceDataListener(PreviewSurfaceListener previewSurfaceListener, SurfaceCreatedCallback surfaceCreatedCallback) { 67 | RecordHelper.setPreviewSurfaceListener(previewSurfaceListener, surfaceCreatedCallback); 68 | } 69 | 70 | @Override 71 | public void removeSurfaceDataListener(PreviewSurfaceListener previewSurfaceListener) { 72 | RecordHelper.setPreviewSurfaceListener(null, null); 73 | } 74 | 75 | @Override 76 | public void addPreviewDataCallback(PreviewDataCallback callback) { 77 | RecordHelper.setPreviewDataCallback(callback); 78 | } 79 | 80 | @Override 81 | public void removePreviewDataCallback(PreviewDataCallback callback) { 82 | RecordHelper.setPreviewDataCallback(null); 83 | } 84 | 85 | @Override 86 | public int getVideoRotation() { 87 | return 0; 88 | } 89 | 90 | @Override 91 | public int getPreviewWidth() { 92 | return mCameraPresenter.getPreviewSize().height; 93 | } 94 | 95 | @Override 96 | public int getPreviewHeight() { 97 | return mCameraPresenter.getPreviewSize().width; 98 | } 99 | }; 100 | return new SSurfaceRecorder(getApplicationContext(), cameraProxyForRecord); 101 | } 102 | 103 | @Override 104 | public void onClick(View v) { 105 | if (v == mFilterImg) { 106 | mFilterImg.setSelected(!mFilterImg.isSelected()); 107 | mFilterImg.setColorFilter(mFilterImg.isSelected() ? 0xFFFF0000 : 0xFFFFFFFF); 108 | mCameraPresenter.enableFilter(mFilterImg.isSelected()); 109 | } 110 | } 111 | 112 | @Override 113 | public void onRecordSuccess(VideoInfo videoInfo) { 114 | 115 | } 116 | 117 | @Override 118 | public void onRecordStart() { 119 | 120 | } 121 | 122 | @Override 123 | public void onRecordFail(Throwable t) { 124 | 125 | } 126 | 127 | @Override 128 | public void onRecordStop() { 129 | Intent intent = new Intent(this, PreviewActivity2.class); 130 | // intent.putExtra(PreviewActivity.FILEPATH, videoInfo.getVideoPath()); 131 | startActivity(intent); 132 | } 133 | 134 | @Override 135 | public void onRecordPause() { 136 | RL.i("onRecordPause"); 137 | } 138 | 139 | @Override 140 | public void onRecordResume() { 141 | RL.i("onRecordResume"); 142 | } 143 | 144 | @Override 145 | public ISVideoRecorder requestRecordListener() { 146 | return mRecorder; 147 | } 148 | 149 | @Override 150 | protected void onPause() { 151 | super.onPause(); 152 | mBottomMenuView.onPause(); 153 | } 154 | 155 | @Override 156 | protected void onResume() { 157 | super.onResume(); 158 | mBottomMenuView.onResume(); 159 | } 160 | 161 | @Override 162 | protected void onDestroy() { 163 | super.onDestroy(); 164 | mCameraPresenter.detachView(); 165 | mCameraPresenter = null; 166 | } 167 | 168 | @Override 169 | public void onBackPressed() { 170 | super.onBackPressed(); 171 | mRecorder.cancelRecord(); 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/mvp/Presenter.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.mvp; 2 | 3 | public interface Presenter { 4 | 5 | public void attachView(ViewController controller); 6 | 7 | public void detachView(); 8 | 9 | } 10 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/mvp/ViewController.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.mvp; 2 | 3 | public interface ViewController { 4 | } 5 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/record/CameraPresenter.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.record; 2 | 3 | import android.graphics.SurfaceTexture; 4 | import android.hardware.Camera; 5 | import android.view.SurfaceHolder; 6 | import us.pinguo.svideo.utils.RL; 7 | import us.pinguo.svideoDemo.mvp.Presenter; 8 | import us.pinguo.svideoDemo.mvp.ViewController; 9 | import us.pinguo.svideoDemo.texturerecord.RecordHelper; 10 | import us.pinguo.svideoDemo.texturerecord.RenderThread; 11 | import us.pinguo.svideoDemo.texturerecord.RenderThreadHandler; 12 | 13 | import java.io.IOException; 14 | import java.util.List; 15 | 16 | /** 17 | * Created by huangwei on 2016/7/15. 18 | */ 19 | public class CameraPresenter implements Presenter, SurfaceHolder.Callback, RenderThread.OnSurfaceTextureUpdatedListener { 20 | 21 | public final static int CAMERA_FACING = Camera.CameraInfo.CAMERA_FACING_BACK; 22 | private Camera mCamera; 23 | private Camera.Size mPreviewSize; 24 | private RenderThread mRenderThread; 25 | 26 | @Override 27 | public void attachView(ViewController controller) { 28 | startRenderThread(); 29 | openCamera(); 30 | setupPreviewSize(); 31 | } 32 | 33 | @Override 34 | public void detachView() { 35 | closeCamera(); 36 | } 37 | 38 | private void startRenderThread() { 39 | mRenderThread = new RenderThread(); 40 | mRenderThread.setName("Rendering thread"); 41 | mRenderThread.setCameraRotation(CAMERA_FACING == Camera.CameraInfo.CAMERA_FACING_BACK?270:90); 42 | mRenderThread.start(); 43 | } 44 | 45 | public Camera.Size getPreviewSize() { 46 | return mPreviewSize; 47 | } 48 | 49 | private void openCamera() { 50 | if (mCamera != null) { 51 | return; 52 | } 53 | Camera.CameraInfo info = new Camera.CameraInfo(); 54 | for (int k = 0; k < Camera.getNumberOfCameras(); k++) { 55 | Camera.getCameraInfo(k, info); 56 | if (info.facing == CAMERA_FACING) { 57 | mCamera = Camera.open(k); 58 | break; 59 | } 60 | } 61 | if (mCamera == null) { 62 | throw new RuntimeException("Can't open frontal camera"); 63 | } 64 | Camera.Parameters parameters = mCamera.getParameters(); 65 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); 66 | mCamera.setParameters(parameters); 67 | } 68 | 69 | private void closeCamera() { 70 | if (mCamera != null) { 71 | mCamera.release(); 72 | mCamera = null; 73 | } 74 | } 75 | 76 | private void setupPreviewSize() { 77 | Camera.Parameters parameters = mCamera.getParameters(); 78 | mPreviewSize = getProperPreviewSize(parameters); 79 | parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height); 80 | RecordHelper.setPreviewSize(mPreviewSize.width, mPreviewSize.height); 81 | mRenderThread.setCameraImageSize(mPreviewSize.width, mPreviewSize.height); 82 | } 83 | 84 | private void startPreview(SurfaceTexture texture) { 85 | try { 86 | mCamera.setPreviewTexture(texture); 87 | } catch (IOException e) { 88 | e.printStackTrace(); 89 | } 90 | mCamera.startPreview(); 91 | } 92 | 93 | private void stopPreview() { 94 | if (mCamera != null) { 95 | mCamera.stopPreview(); 96 | } 97 | } 98 | 99 | @Override 100 | public void surfaceCreated(SurfaceHolder holder) { 101 | //pass this surfaceHolder to the renderer 102 | mRenderThread.waitUntilHandlerReady(); 103 | 104 | mRenderThread.setOnSurfaceTextureListener(this); 105 | 106 | RenderThreadHandler handler = mRenderThread.getHandler(); 107 | if (handler != null) { 108 | handler.sendSurfaceCreated(holder); 109 | } 110 | } 111 | 112 | @Override 113 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 114 | RenderThreadHandler handler = mRenderThread.getHandler(); 115 | if (handler != null) { 116 | handler.sendSurfaceChanged(holder, width, height); 117 | } 118 | } 119 | 120 | @Override 121 | public void surfaceDestroyed(SurfaceHolder holder) { 122 | RenderThreadHandler handler = mRenderThread.getHandler(); 123 | if (handler != null) { 124 | handler.sendSurfaceDestroyed(holder); 125 | } 126 | stopPreview(); 127 | } 128 | 129 | @Override 130 | public void onSurfaceTextureUpdated(SurfaceTexture texture) { 131 | startPreview(texture); 132 | } 133 | 134 | private Camera.Size getProperPreviewSize(Camera.Parameters parameters) { 135 | int min = 640 * 480; 136 | int max = 720 * 1280; 137 | List sizeList = parameters.getSupportedPreviewSizes(); 138 | Camera.Size previewSize = null; 139 | for (int i = 0; i < sizeList.size(); i++) { 140 | Camera.Size size = sizeList.get(i); 141 | int value = size.width * size.height; 142 | if (value >= min && value <= max) { 143 | previewSize = size; 144 | } 145 | } 146 | if (previewSize == null) { 147 | previewSize = sizeList.get(sizeList.size() / 2); 148 | } 149 | RL.i("getProperPreviewSize:" + previewSize.width + "X" + previewSize.height); 150 | return previewSize; 151 | } 152 | 153 | public void enableFilter(boolean enable) { 154 | mRenderThread.enableFilter(enable); 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/record/IRecordView.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.record; 2 | 3 | import us.pinguo.svideoDemo.mvp.ViewController; 4 | 5 | /** 6 | * Created by huangwei on 2016/7/15. 7 | */ 8 | public interface IRecordView extends ViewController { 9 | } 10 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/texturerecord/RenderThread.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.texturerecord; 2 | 3 | import android.graphics.SurfaceTexture; 4 | import android.opengl.GLES20; 5 | import android.os.Looper; 6 | import android.util.DisplayMetrics; 7 | import android.util.Log; 8 | import android.view.SurfaceHolder; 9 | import us.pinguo.svideoDemo.MyApplication; 10 | import us.pinguo.svideoDemo.texturerecord.gles.EglCore; 11 | import us.pinguo.svideoDemo.texturerecord.gles.EglSurface; 12 | import us.pinguo.svideoDemo.texturerecord.gles.GLRendering; 13 | import us.pinguo.svideoDemo.texturerecord.gles.glUtils; 14 | 15 | public class RenderThread extends Thread implements SurfaceTexture.OnFrameAvailableListener { 16 | private final static String TAG = "RenderThread"; 17 | private SurfaceHolder mSurfaceHolder; 18 | 19 | private RenderThreadHandler mHandler; 20 | private final Object mWaitReadyLock = new Object(); 21 | private final Object mLockObject = new Object(); 22 | boolean mReady = false; 23 | 24 | //rendering variables 25 | EglSurface mEGLSurface; 26 | EglCore mEGLCore; 27 | GLRendering mRenderer; 28 | int mTextureName = -1; 29 | private SurfaceTexture mSurfaceTexture; 30 | 31 | private OnSurfaceTextureUpdatedListener mSTListener = null; 32 | 33 | private int mCameraWidth, mCameraHeight, mCameraRotation; 34 | 35 | 36 | public RenderThread() { 37 | } 38 | 39 | @Override 40 | public void run() { 41 | Looper.prepare(); 42 | mHandler = new RenderThreadHandler(this); 43 | 44 | synchronized (mWaitReadyLock) { 45 | mReady = true; 46 | mWaitReadyLock.notify(); 47 | } 48 | 49 | mEGLCore = new EglCore(); 50 | 51 | Looper.loop(); 52 | 53 | mHandler = null; 54 | synchronized (mWaitReadyLock) { 55 | mReady = false; 56 | } 57 | } 58 | 59 | //waits until the looper handler is ready 60 | public void waitUntilHandlerReady() { 61 | synchronized (mWaitReadyLock) { 62 | while (!mReady) { 63 | try { 64 | mWaitReadyLock.wait(); 65 | } catch (Exception e) { 66 | 67 | } 68 | } 69 | } 70 | } 71 | 72 | public void shutdown() { 73 | Looper.myLooper().quit(); 74 | } 75 | 76 | public RenderThreadHandler getHandler() { 77 | return mHandler; 78 | } 79 | 80 | // these calls handle the SurfaceView Surface, not the SurfaceTexture coming from the camera 81 | public void surfaceCreated(SurfaceHolder surfaceHolder) { 82 | mSurfaceHolder = surfaceHolder; 83 | mEGLSurface = new EglSurface(mEGLCore); 84 | mEGLSurface.createWindowForSurface(mSurfaceHolder.getSurface()); 85 | mEGLSurface.makeCurrent(); 86 | 87 | mRenderer = new GLRendering(); 88 | 89 | //we have the context, so create the SurfaceTexture now 90 | mTextureName = 0; 91 | int texture[] = new int[1]; 92 | GLES20.glGenTextures(1, texture, 0); 93 | glUtils.checkGLError("gGenTexture"); 94 | mTextureName = texture[0]; 95 | mSurfaceTexture = new SurfaceTexture(mTextureName); 96 | 97 | mSurfaceTexture.setOnFrameAvailableListener(this); 98 | resetSurfaceTextureToListener(); 99 | } 100 | 101 | public void surfaceChanged(SurfaceHolder surfaceHolder, int w, int h) { 102 | GLES20.glViewport(0, 0, w, h); 103 | Log.d(TAG, "Viewport changed to: " + w + "x" + h); 104 | } 105 | 106 | public void surfaceDestroyed(SurfaceHolder surfaceHolder) { 107 | if (mEGLSurface != null) { 108 | mEGLSurface.releaseEglSurface(); 109 | mEGLSurface = null; 110 | } 111 | mEGLCore.makeNothingCurrent(); 112 | mRenderer = null; 113 | } 114 | 115 | @Override 116 | public void onFrameAvailable(SurfaceTexture surfaceTexture) { 117 | //setup and draw, using the last available pose if there's one 118 | 119 | if (mRenderer == null) { 120 | return; 121 | } 122 | if (surfaceTexture != mSurfaceTexture) { 123 | Log.i(TAG, "Unexpected surface texture"); 124 | return; 125 | } 126 | 127 | synchronized (mLockObject) { 128 | surfaceTexture.updateTexImage(); 129 | } 130 | displayPreview(); 131 | } 132 | 133 | private void displayPreview() { 134 | if (mRenderer == null) { 135 | return; 136 | } 137 | 138 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); 139 | glUtils.checkGLError("Clear target"); 140 | drawPreview(); 141 | boolean swapped = mEGLSurface.swapBuffers(); 142 | if (!swapped) { 143 | Log.e(TAG, "shutting down renderThread"); 144 | } 145 | 146 | DisplayMetrics displayMetrics = MyApplication.getAppContext().getResources().getDisplayMetrics(); 147 | RecordHelper.setRenderThread(this); 148 | RecordHelper.recordTexture(displayMetrics.widthPixels, displayMetrics.heightPixels); 149 | } 150 | 151 | public void drawPreview() { 152 | //the image appears upside down in landscape mode. 153 | int rotation = mCameraRotation; 154 | if (rotation != 90) { 155 | rotation = 180 - mCameraRotation; 156 | } 157 | mRenderer.drawBackground(mTextureName, rotation); 158 | } 159 | 160 | public void setOnSurfaceTextureListener(OnSurfaceTextureUpdatedListener listener) { 161 | mSTListener = listener; 162 | } 163 | 164 | public void resetSurfaceTextureToListener() { 165 | if (mSTListener != null) { 166 | mSTListener.onSurfaceTextureUpdated(mSurfaceTexture); 167 | } 168 | } 169 | 170 | public void setCameraRotation(int rotation) { 171 | mCameraRotation = rotation; 172 | } 173 | 174 | public void setCameraImageSize(int width, int height) { 175 | mCameraWidth = width; 176 | mCameraHeight = height; 177 | } 178 | 179 | public void enableFilter(boolean enable) { 180 | mRenderer.setEnableFilter(enable); 181 | } 182 | 183 | public interface OnSurfaceTextureUpdatedListener { 184 | void onSurfaceTextureUpdated(SurfaceTexture texture); 185 | } 186 | 187 | } -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/texturerecord/RenderThreadHandler.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.texturerecord; 2 | 3 | import android.os.Handler; 4 | import android.os.Message; 5 | import android.util.Log; 6 | import android.view.SurfaceHolder; 7 | 8 | import java.lang.ref.WeakReference; 9 | 10 | public class RenderThreadHandler extends Handler { 11 | final static private String TAG = "RenderThreadHandler"; 12 | private final int SET_SURFACE_TEXTURE_TO_LISTENER_MSG = 0; 13 | private final int SHUTDOWN_MSG = 2; 14 | 15 | private final int CAMERA_SIZE_MSG = 5; 16 | private final int CAMERA_ROTATION = 6; 17 | 18 | private final int SURFACE_CREATED_MSG = 10; 19 | private final int SURFACE_CHANGED_MSG = 11; 20 | private final int SURFACE_DESTROYED_MSG = 12; 21 | 22 | 23 | private WeakReference mRenderThreadWeakReference; 24 | public RenderThreadHandler(RenderThread thread) { 25 | mRenderThreadWeakReference = new WeakReference(thread); 26 | } 27 | 28 | public void sendCameraImageSize(int wid, int hei) { 29 | sendMessage(obtainMessage(CAMERA_SIZE_MSG, wid, hei)); 30 | } 31 | public void sendCameraRotation(int rotation) { 32 | sendMessage(obtainMessage(CAMERA_ROTATION, rotation, 0)); 33 | } 34 | 35 | //messages that get called from the main thread 36 | public void sendSurfaceCreated(SurfaceHolder holder) { 37 | sendMessage(obtainMessage(SURFACE_CREATED_MSG, holder)); 38 | } 39 | public void sendSurfaceChanged(SurfaceHolder holder, int wid, int hei) { 40 | sendMessage(obtainMessage(SURFACE_CHANGED_MSG, wid, hei, holder)); 41 | } 42 | public void sendSurfaceDestroyed(SurfaceHolder holder) { 43 | sendMessage(obtainMessage(SURFACE_DESTROYED_MSG, holder)); 44 | } 45 | public void resetSurfaceTextureToListener() { 46 | sendMessage(obtainMessage(SET_SURFACE_TEXTURE_TO_LISTENER_MSG)); 47 | } 48 | public void shutdown() {sendMessage(obtainMessage(SHUTDOWN_MSG));} 49 | 50 | @Override 51 | public void handleMessage(Message message) { 52 | int mess = message.what; 53 | RenderThread thread = mRenderThreadWeakReference.get(); 54 | if (thread == null) { 55 | Log.w(TAG, "CameraThreadHandler: thread is null"); 56 | return; 57 | } 58 | 59 | switch (mess) { 60 | case SHUTDOWN_MSG: 61 | thread.shutdown(); 62 | break; 63 | case SURFACE_CREATED_MSG: 64 | thread.surfaceCreated((SurfaceHolder) message.obj); 65 | break; 66 | case SURFACE_CHANGED_MSG: 67 | thread.surfaceChanged((SurfaceHolder) message.obj, message.arg1, message.arg2); 68 | break; 69 | case SURFACE_DESTROYED_MSG: 70 | thread.surfaceDestroyed((SurfaceHolder) message.obj); 71 | break; 72 | case SET_SURFACE_TEXTURE_TO_LISTENER_MSG: 73 | thread.resetSurfaceTextureToListener(); 74 | break; 75 | case CAMERA_SIZE_MSG: 76 | thread.setCameraImageSize(message.arg1, message.arg2); 77 | break; 78 | case CAMERA_ROTATION: 79 | thread.setCameraRotation(message.arg1); 80 | // Log.d(TAG, "Camera_Rotation: " + message.arg1); 81 | break; 82 | default: 83 | throw new RuntimeException("unknown message id: " + mess); 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/texturerecord/gles/EglCore.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.texturerecord.gles; 2 | 3 | // Utility functions to manage an EGL setup. Some bits of code are from Google's Grafika project, 4 | // released under Apache License v2.0. 5 | 6 | import android.view.Surface; 7 | 8 | import javax.microedition.khronos.egl.EGL10; 9 | import javax.microedition.khronos.egl.EGLConfig; 10 | import javax.microedition.khronos.egl.EGLContext; 11 | import javax.microedition.khronos.egl.EGLDisplay; 12 | import javax.microedition.khronos.egl.EGLSurface; 13 | 14 | 15 | public class EglCore { 16 | private static final String TAG = "EGL core"; 17 | 18 | private static final int EGL_OPENGL_ES2_BIT = 4; 19 | private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; 20 | 21 | private EGLDisplay mEGLDisplay = EGL10.EGL_NO_DISPLAY; 22 | private EGLContext mEGLContext = EGL10.EGL_NO_CONTEXT; 23 | private EGLConfig mEGLConfig = null; 24 | 25 | private EGL10 mEGL; 26 | 27 | public EglCore() { 28 | this(null); 29 | } 30 | 31 | public EglCore(EGLContext sharedContext) { 32 | if (mEGLDisplay != EGL10.EGL_NO_DISPLAY) { 33 | throw new RuntimeException("EGL already set up"); 34 | } 35 | 36 | mEGL = (EGL10)EGLContext.getEGL(); 37 | 38 | mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 39 | if (mEGLDisplay == EGL10.EGL_NO_DISPLAY) { 40 | throw new RuntimeException("Error getting EGLDisplay"); 41 | } 42 | 43 | if(!mEGL.eglInitialize(mEGLDisplay, null)) { 44 | mEGLDisplay = null; 45 | throw new RuntimeException(("EGL failed to initialise")); 46 | } 47 | 48 | if (mEGLContext == EGL10.EGL_NO_CONTEXT) { 49 | int[] attribList = { 50 | EGL10.EGL_RED_SIZE, 8, 51 | EGL10.EGL_GREEN_SIZE, 8, 52 | EGL10.EGL_BLUE_SIZE, 8, 53 | EGL10.EGL_ALPHA_SIZE, 8, 54 | EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, 55 | EGL10.EGL_NONE, 56 | }; 57 | 58 | EGLConfig[] eglConfigs = new EGLConfig[1]; 59 | int[] configCount = new int[1]; 60 | if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, eglConfigs, eglConfigs.length, 61 | configCount)) { 62 | throw new RuntimeException("Error choosing EGL configuration"); 63 | } 64 | int[] attribCtxt = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; 65 | mEGLContext = mEGL.eglCreateContext(mEGLDisplay, eglConfigs[0], EGL10.EGL_NO_CONTEXT, attribCtxt); 66 | mEGLConfig = eglConfigs[0]; 67 | checkEGLError("Creating context"); 68 | } 69 | } 70 | 71 | public void release() { 72 | if (mEGLDisplay != EGL10.EGL_NO_DISPLAY) { 73 | mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, 74 | EGL10.EGL_NO_CONTEXT); 75 | mEGL.eglDestroyContext(mEGLDisplay, mEGLContext); 76 | mEGL.eglTerminate(mEGLDisplay); 77 | 78 | mEGLDisplay = EGL10.EGL_NO_DISPLAY; 79 | mEGLContext = EGL10.EGL_NO_CONTEXT; 80 | mEGLConfig = null; 81 | } 82 | } 83 | 84 | public EGLSurface createWindowSurface(Surface surface) { 85 | // Create a window surface, and attach it to the Surface we received. 86 | int[] surfaceAttribs = { 87 | EGL10.EGL_NONE 88 | }; 89 | EGLSurface eglSurface = mEGL.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, 90 | surfaceAttribs); 91 | checkEGLError("eglCreateWindowSurface for Surface"); 92 | if (eglSurface == null) { 93 | throw new RuntimeException("surface was null"); 94 | } 95 | return eglSurface; 96 | } 97 | public void releaseSurface(EGLSurface eglSurface) { 98 | mEGL.eglDestroySurface(mEGLDisplay, eglSurface); 99 | } 100 | 101 | public void makeCurrent(EGLSurface eglSurface) { 102 | if (mEGLDisplay == EGL10.EGL_NO_DISPLAY) { 103 | // called makeCurrent() before create? 104 | throw new RuntimeException("surface to be made current without a display"); 105 | } 106 | if (!mEGL.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) { 107 | throw new RuntimeException("eglMakeCurrent failed"); 108 | } 109 | } 110 | 111 | public void makeNothingCurrent() { 112 | if (!mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, 113 | EGL10.EGL_NO_CONTEXT)) { 114 | throw new RuntimeException("eglMakeCurrent failed"); 115 | } 116 | } 117 | 118 | public boolean swapBuffers(EGLSurface eglSurface) { 119 | return mEGL.eglSwapBuffers(mEGLDisplay, eglSurface); 120 | } 121 | 122 | private void checkEGLError(String msg) { 123 | int err; 124 | if ((err = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) { 125 | throw new RuntimeException(msg + ": EGL Error: 0x"+ Integer.toHexString(err)); 126 | } 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/texturerecord/gles/EglSurface.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.texturerecord.gles; 2 | 3 | // Utility functions to manage an EGLSurface setup. Some bits of code are from Google's Grafika project, 4 | // released under Apache License v2.0. 5 | 6 | 7 | import android.util.Log; 8 | import android.view.Surface; 9 | 10 | import javax.microedition.khronos.egl.EGL10; 11 | import javax.microedition.khronos.egl.EGLSurface; 12 | 13 | public class EglSurface { 14 | private static final String TAG = "EglSurface"; 15 | 16 | private EglCore mEglCore = null; 17 | private EGLSurface mEGLSurface = EGL10.EGL_NO_SURFACE; 18 | 19 | public EglSurface(EglCore eglCore) { 20 | mEglCore = eglCore; 21 | } 22 | 23 | public void createWindowForSurface(Surface surface) { 24 | if (mEGLSurface != EGL10.EGL_NO_SURFACE) { 25 | throw new IllegalStateException("EGLSurface already created"); 26 | } 27 | mEGLSurface = mEglCore.createWindowSurface(surface); 28 | } 29 | 30 | /** 31 | * Release the EGL surface. 32 | */ 33 | public void releaseEglSurface() { 34 | mEglCore.releaseSurface(mEGLSurface); 35 | mEGLSurface = EGL10.EGL_NO_SURFACE; 36 | } 37 | 38 | public void makeCurrent() { 39 | mEglCore.makeCurrent(mEGLSurface); 40 | } 41 | 42 | public boolean swapBuffers() { 43 | boolean result = mEglCore.swapBuffers(mEGLSurface); 44 | if (!result) { 45 | Log.d(TAG, "swapBuffers() failed"); 46 | } 47 | return result; 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/texturerecord/gles/GLRendering.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.texturerecord.gles; 2 | 3 | import android.hardware.Camera; 4 | import android.opengl.GLES11Ext; 5 | import android.opengl.GLES20; 6 | import android.opengl.Matrix; 7 | import us.pinguo.svideoDemo.record.CameraPresenter; 8 | 9 | import java.nio.FloatBuffer; 10 | import java.nio.ShortBuffer; 11 | 12 | public class GLRendering { 13 | //region GL constants 14 | private static final int kPositionAttribute = 1; 15 | private static final int kTex0CoordAttribute = 2; 16 | 17 | private int _warper; 18 | private int[] _warpUniforms; 19 | private int _warpVBO, _warpIBO; 20 | private boolean enableFilter; 21 | 22 | public GLRendering() { 23 | initialise(); 24 | } 25 | 26 | public void drawBackground(int textureName, int rotation) { 27 | GLES20.glUseProgram(_warper); 28 | float[] rotmat = new float[16]; 29 | Matrix.setIdentityM(rotmat, 0); 30 | 31 | if (CameraPresenter.CAMERA_FACING == Camera.CameraInfo.CAMERA_FACING_BACK) { 32 | Matrix.orthoM(rotmat, 0, -1.0f, 1.0f, 1.0f, -1.0f, -50.0f, 100.0f); 33 | Matrix.rotateM(rotmat, 0, rotation + 180, 0, 0, 1); 34 | } else { 35 | Matrix.rotateM(rotmat, 0, rotation, 0, 0, 1); 36 | } 37 | 38 | GLES20.glUniformMatrix4fv(_warpUniforms[0], 1, false, rotmat, 0); 39 | GLES20.glUniform1i(_warpUniforms[3], enableFilter ? 1 : 0); 40 | 41 | GLES20.glActiveTexture(GLES20.GL_TEXTURE2); 42 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureName); 43 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); 44 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 45 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); 46 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); 47 | glUtils.checkGLError("texture parameters"); 48 | 49 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, _warpVBO); 50 | GLES20.glEnableVertexAttribArray(kPositionAttribute); 51 | GLES20.glVertexAttribPointer(kPositionAttribute, 3, GLES20.GL_FLOAT, false, 52 | 5 * glUtils.BYTES_PER_FLOAT, 0); 53 | GLES20.glEnableVertexAttribArray(kTex0CoordAttribute); 54 | GLES20.glVertexAttribPointer(kTex0CoordAttribute, 2, GLES20.GL_FLOAT, false, 55 | 5 * glUtils.BYTES_PER_FLOAT, 3 * glUtils.BYTES_PER_FLOAT); 56 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, _warpIBO); 57 | glUtils.checkGLError("VBO setup"); 58 | 59 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, 0); 60 | glUtils.checkGLError("Drawing background"); 61 | 62 | GLES20.glDisableVertexAttribArray(kTex0CoordAttribute); 63 | GLES20.glDisableVertexAttribArray(kPositionAttribute); 64 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); 65 | GLES20.glUseProgram(0); 66 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0); 67 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); 68 | } 69 | 70 | private void initialise() { 71 | //initialise the programs 72 | { 73 | final String basicWarpVertex = 74 | "precision highp float;\n" + 75 | "attribute vec4 position;\n" + 76 | "attribute vec2 textureCoord;\n" + 77 | "uniform mat4 matrix;\n" + 78 | "uniform mat4 textureMatrix;\n" + 79 | "varying highp vec2 texCoord;\n" + 80 | "void main() {\n" + 81 | " texCoord = (textureMatrix * vec4(textureCoord.x, textureCoord.y, 0.0, 1.0)).xy;\n" + 82 | " gl_Position = matrix * position; \n" + 83 | "}"; 84 | 85 | final String basicWarpFragment = 86 | "#extension GL_OES_EGL_image_external : require\n" + 87 | "uniform samplerExternalOES texture;\n" + 88 | "varying highp vec2 texCoord;\n" + 89 | "uniform int enableFilter;\n" + 90 | "void main() {\n" + 91 | " gl_FragColor = texture2D(texture, texCoord);\n" + 92 | " if(enableFilter>0)\n" + 93 | " gl_FragColor.r=1.0;\n" + 94 | "}"; 95 | 96 | String[] attributes = {"position", "textureCoord"}; 97 | int[] attribLoc = {kPositionAttribute, kTex0CoordAttribute}; 98 | String[] uniforms = {"matrix", "texture", "textureMatrix", "enableFilter"}; 99 | 100 | _warpUniforms = new int[uniforms.length]; 101 | 102 | _warper = glUtils.createProgram(basicWarpVertex, basicWarpFragment, 103 | attributes, attribLoc, uniforms, _warpUniforms); 104 | if (_warper <= 0) { 105 | throw new RuntimeException("Error creating warp program"); 106 | } 107 | GLES20.glUseProgram(_warper); 108 | GLES20.glUniformMatrix4fv(_warpUniforms[0], 1, false, glUtils.IDENTITY_MATRIX, 0); 109 | GLES20.glUniform1i(_warpUniforms[1], 2); //set the texture unit 110 | GLES20.glUniformMatrix4fv(_warpUniforms[2], 1, false, glUtils.IDENTITY_MATRIX, 0); 111 | glUtils.checkGLError("Creating program - setting uniform"); 112 | 113 | int[] tmp = new int[2]; 114 | GLES20.glGenBuffers(2, tmp, 0); 115 | _warpVBO = tmp[0]; 116 | _warpIBO = tmp[1]; 117 | float[] vertex = { 118 | -1.0f, -1.0f, 1.0f, 0.0f, 0.0f, 119 | 1.0f, -1.0f, 1.0f, 1.0f, 0.0f, 120 | 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 121 | -1.0f, 1.0f, 1.0f, 0.0f, 1.0f 122 | }; 123 | FloatBuffer ver = glUtils.createFloatBuffer(vertex); 124 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, _warpVBO); 125 | GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, ver.capacity() * glUtils.BYTES_PER_FLOAT, 126 | ver, GLES20.GL_DYNAMIC_DRAW); 127 | glUtils.checkGLError("Creating array buffer"); 128 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); 129 | 130 | short triangles[] = {2, 1, 0, 0, 3, 2}; 131 | ShortBuffer tri = glUtils.createShortBuffer(triangles); 132 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, _warpIBO); 133 | glUtils.checkGLError("Creating element buffer"); 134 | GLES20.glBufferData(GLES20.GL_ELEMENT_ARRAY_BUFFER, tri.capacity() * glUtils.BYTES_PER_SHORT, 135 | tri, GLES20.GL_STATIC_DRAW); 136 | glUtils.checkGLError("Creating element buffer"); 137 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0); 138 | } 139 | GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); 140 | GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0); 141 | 142 | GLES20.glLineWidth(3); 143 | GLES20.glBlendEquation(GLES20.GL_FUNC_ADD); 144 | GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); 145 | 146 | } 147 | 148 | public void setEnableFilter(boolean enableFilter) { 149 | this.enableFilter = enableFilter; 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/texturerecord/gles/glUtils.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.texturerecord.gles; 2 | 3 | import android.opengl.GLES20; 4 | import android.opengl.Matrix; 5 | import android.util.Log; 6 | 7 | import java.nio.ByteBuffer; 8 | import java.nio.ByteOrder; 9 | import java.nio.FloatBuffer; 10 | import java.nio.ShortBuffer; 11 | 12 | public class glUtils { 13 | private static final String TAG = "GL utils"; 14 | private glUtils() {} 15 | 16 | public static final float[] IDENTITY_MATRIX; 17 | static { 18 | IDENTITY_MATRIX = new float[16]; 19 | Matrix.setIdentityM(IDENTITY_MATRIX, 0); 20 | } 21 | public static final int BYTES_PER_FLOAT = 4; 22 | // public static final int BYTES_PER_INT = 4; 23 | public static final short BYTES_PER_SHORT = 2; 24 | 25 | public static void checkGLError(String msg) { 26 | int error = GLES20.glGetError(); 27 | if (error != GLES20.GL_NO_ERROR) { 28 | String str = msg + ": glError 0x" + Integer.toHexString(error); 29 | Log.e(TAG, str); 30 | int values[] = new int[2]; 31 | GLES20.glGetIntegerv(GLES20.GL_ARRAY_BUFFER_BINDING, values, 0); 32 | GLES20.glGetIntegerv(GLES20.GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING, values, 1); 33 | Log.e(TAG, "Current bound array buffer: " + values[0]); 34 | Log.e(TAG, "Current bound vertex attrib: "+ values[1]); 35 | throw new RuntimeException(msg); 36 | } 37 | } 38 | 39 | public static FloatBuffer createFloatBuffer(int size) { 40 | ByteBuffer buffer = ByteBuffer.allocateDirect(size * BYTES_PER_FLOAT); 41 | buffer.order(ByteOrder.nativeOrder()); 42 | return buffer.asFloatBuffer(); 43 | } 44 | /*public static IntBuffer createIntBuffer(int size) { 45 | ByteBuffer buffer = ByteBuffer.allocateDirect(size * BYTES_PER_INT); 46 | buffer.order(ByteOrder.nativeOrder()); 47 | return buffer.asIntBuffer(); 48 | }*/ 49 | public static FloatBuffer createFloatBuffer(float[] coords) { 50 | FloatBuffer fb = createFloatBuffer(coords.length); 51 | fb.put(coords); 52 | fb.position(0); 53 | return fb; 54 | } 55 | /*public static IntBuffer createIntBuffer(int[] data) { 56 | IntBuffer ib = createIntBuffer(data.length); 57 | ib.put(data); 58 | ib.position(0); 59 | return ib; 60 | }*/ 61 | public static ShortBuffer createShortBuffer(short[] data) { 62 | ShortBuffer sb = ByteBuffer.allocateDirect(data.length * BYTES_PER_SHORT) 63 | .order(ByteOrder.nativeOrder()) 64 | .asShortBuffer(); 65 | sb.put(data).position(0); 66 | return sb; 67 | } 68 | 69 | public static int createProgram(String vertSrc, String fragSrc, 70 | String[] attributeNames, int[] attributeBinding, 71 | String[] uniformNames, int[] uniformBinding) { 72 | 73 | int program = GLES20.glCreateProgram(); 74 | 75 | int status = 1; 76 | int[] vertSh = new int[1]; 77 | int[] fragSh = new int[1]; 78 | status *= compileShader(GLES20.GL_VERTEX_SHADER, vertSrc, vertSh); 79 | status *= compileShader(GLES20.GL_FRAGMENT_SHADER, fragSrc, fragSh); 80 | checkGLError("Compiling shaders"); 81 | 82 | GLES20.glAttachShader(program, vertSh[0]); 83 | checkGLError("Attach shader"); 84 | GLES20.glAttachShader(program, fragSh[0]); 85 | checkGLError("Attach shader fragment"); 86 | 87 | //Bind attributes 88 | for(int i=0; i 0) { 98 | for (int i=0; i< uniformNames.length; i++) { 99 | // if (uniformsLocations.at(i).first.length()) { 100 | int loc = GLES20.glGetUniformLocation(program, 101 | uniformNames[i]); 102 | checkGLError("glGetUniformLocation - " + uniformNames[i]); 103 | if (loc < 0) Log.e(TAG, "Bad uniform " + uniformNames[i]); 104 | uniformBinding[i] = loc; 105 | } 106 | } else { 107 | GLES20.glDeleteProgram(program); 108 | program = 0; 109 | } 110 | 111 | if (vertSh[0] > 0) { 112 | GLES20.glDeleteShader(vertSh[0]); 113 | GLES20.glDetachShader(program, vertSh[0]); 114 | } 115 | if (fragSh[0] > 0) { 116 | GLES20.glDeleteShader(fragSh[0]); 117 | GLES20.glDetachShader(program, fragSh[0]); 118 | } 119 | checkGLError("Shaders deleted"); 120 | return program; 121 | } 122 | 123 | private static int compileShader(int target, String source, int[] output) { 124 | output[0] = GLES20.glCreateShader(target); 125 | 126 | // const GLchar *str = src.c_str(); 127 | GLES20.glShaderSource(output[0], source); 128 | GLES20.glCompileShader(output[0]); 129 | checkGLError("Compile shader"); 130 | int[] status = new int[1]; 131 | GLES20.glGetShaderiv(output[0], GLES20.GL_COMPILE_STATUS, status, 0); 132 | if(status[0] == 0){ 133 | Log.e(TAG, "Failed to compile shader: " + GLES20.glGetShaderInfoLog(output[0])); 134 | GLES20.glDeleteShader(output[0]); 135 | } 136 | return status[0]; 137 | } 138 | 139 | private static int linkProgram(int program) { 140 | int[] status = new int[1]; 141 | GLES20.glLinkProgram(program); 142 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0); 143 | if (status[0] != GLES20.GL_TRUE) { 144 | Log.e(TAG, "Error linking program: " + GLES20.glGetProgramInfoLog(program)); 145 | return 0; 146 | } 147 | return 1; 148 | } 149 | 150 | private static int validateProgram(int program) { 151 | int[] status = new int[1]; 152 | GLES20.glValidateProgram(program); 153 | 154 | GLES20.glGetProgramiv(program, GLES20.GL_VALIDATE_STATUS, status, 0); 155 | if (status[0] != GLES20.GL_TRUE) { 156 | Log.e(TAG, "Error validating program: " + GLES20.glGetProgramInfoLog(program)); 157 | return 0; 158 | } 159 | return 1; 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/ui/BottomMenuView.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.ui; 2 | 3 | import android.content.Context; 4 | import android.util.AttributeSet; 5 | import android.view.View; 6 | import android.widget.ImageView; 7 | import android.widget.RelativeLayout; 8 | import us.pinguo.svideoDemo.R; 9 | 10 | /** 11 | * Created by huangwei on 2016/7/15. 12 | */ 13 | public class BottomMenuView extends RelativeLayout implements View.OnClickListener { 14 | 15 | private boolean mEnableSVideoTouch = false; 16 | private VideoProgressLayout mSVideoProgressBar; 17 | 18 | private SVideoTouchController mSVideoTouchListener; 19 | private IBottomMenuView mBottomViewCallBack; 20 | private ImageView mShutterBtn; 21 | private ImageView mSaveBtn; 22 | 23 | public BottomMenuView(Context context) { 24 | super(context); 25 | } 26 | 27 | public BottomMenuView(Context context, AttributeSet attrs) { 28 | super(context, attrs); 29 | } 30 | 31 | public BottomMenuView(Context context, AttributeSet attrs, int defStyleAttr) { 32 | super(context, attrs, defStyleAttr); 33 | } 34 | 35 | @Override 36 | protected void onFinishInflate() { 37 | super.onFinishInflate(); 38 | mShutterBtn = (ImageView) findViewById(R.id.shutter_btn); 39 | mSVideoProgressBar = (VideoProgressLayout) findViewById(R.id.video_progress_layout); 40 | mSaveBtn = findViewById(R.id.video_save); 41 | mSaveBtn.setVisibility(View.GONE); 42 | mSaveBtn.setOnClickListener(this); 43 | } 44 | /** 45 | * 设置底部bar回调 46 | * 47 | * @param callback 48 | */ 49 | public void setBottomViewCallBack(IBottomMenuView callback) { 50 | mBottomViewCallBack = callback; 51 | if (mBottomViewCallBack != null && mSVideoTouchListener != null) { 52 | mSVideoTouchListener.setSVideoRecorder(mBottomViewCallBack.requestRecordListener()); 53 | } 54 | } 55 | 56 | public void enableVideoProgressLayout() { 57 | mSVideoProgressBar.setVisibility(VISIBLE); 58 | } 59 | 60 | public void enableSVideoTouch(boolean enable) { 61 | if (mEnableSVideoTouch == enable) { 62 | return; 63 | } 64 | mEnableSVideoTouch = enable; 65 | if (enable) { 66 | if (mSVideoProgressBar != null && mEnableSVideoTouch && mSVideoTouchListener == null) { 67 | mSVideoTouchListener = new SVideoTouchController(mSVideoProgressBar, mShutterBtn,null,null); 68 | if (mBottomViewCallBack != null) { 69 | mSVideoTouchListener.setSVideoRecorder(mBottomViewCallBack.requestRecordListener()); 70 | } 71 | mShutterBtn.setOnTouchListener(mSVideoTouchListener); 72 | } 73 | } else { 74 | mSVideoTouchListener = null; 75 | mShutterBtn.setOnTouchListener(null); 76 | } 77 | } 78 | 79 | public void onResume() { 80 | if (mSVideoTouchListener != null) { 81 | mSVideoTouchListener.setForceRecordFalse(); 82 | } 83 | } 84 | 85 | public void onPause() { 86 | if (mSVideoTouchListener != null) { 87 | mSVideoTouchListener.onPause(); 88 | } 89 | } 90 | 91 | @Override 92 | public void onClick(View v) { 93 | if(v== mSaveBtn){ 94 | mSVideoTouchListener.stopRecord(); 95 | mSaveBtn.setVisibility(View.GONE); 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/ui/BottomSegMenuView.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.ui; 2 | 3 | import android.content.Context; 4 | import android.util.AttributeSet; 5 | import android.view.View; 6 | import android.widget.ImageView; 7 | import android.widget.RelativeLayout; 8 | import us.pinguo.svideoDemo.R; 9 | 10 | /** 11 | * Created by huangwei on 2016/7/15. 12 | */ 13 | public class BottomSegMenuView extends RelativeLayout implements View.OnClickListener { 14 | 15 | private boolean mEnableSVideoTouch = false; 16 | private VideoProgressLayout mSVideoProgressBar; 17 | 18 | private SVideoTouchController mSVideoTouchListener; 19 | private IBottomMenuView mBottomViewCallBack; 20 | private ImageView mShutterBtn; 21 | private ImageView mSaveBtn; 22 | private ImageView mDeleteBtn; 23 | 24 | public BottomSegMenuView(Context context) { 25 | super(context); 26 | } 27 | 28 | public BottomSegMenuView(Context context, AttributeSet attrs) { 29 | super(context, attrs); 30 | } 31 | 32 | public BottomSegMenuView(Context context, AttributeSet attrs, int defStyleAttr) { 33 | super(context, attrs, defStyleAttr); 34 | } 35 | 36 | @Override 37 | protected void onFinishInflate() { 38 | super.onFinishInflate(); 39 | mShutterBtn = (ImageView) findViewById(R.id.shutter_btn); 40 | mSVideoProgressBar = (VideoProgressLayout) findViewById(R.id.video_progress_layout); 41 | mSaveBtn = findViewById(R.id.video_save); 42 | mDeleteBtn = findViewById(R.id.video_delete); 43 | mSaveBtn.setVisibility(View.GONE); 44 | mDeleteBtn.setVisibility(View.GONE); 45 | mSaveBtn.setOnClickListener(this); 46 | mDeleteBtn.setOnClickListener(this); 47 | } 48 | 49 | /** 50 | * 设置底部bar回调 51 | * 52 | * @param callback 53 | */ 54 | public void setBottomViewCallBack(IBottomMenuView callback) { 55 | mBottomViewCallBack = callback; 56 | if (mBottomViewCallBack != null && mSVideoTouchListener != null) { 57 | mSVideoTouchListener.setSVideoRecorder(mBottomViewCallBack.requestRecordListener()); 58 | } 59 | } 60 | 61 | public void enableVideoProgressLayout() { 62 | mSVideoProgressBar.setVisibility(VISIBLE); 63 | } 64 | 65 | public void enableSVideoTouch(boolean enable) { 66 | if (mEnableSVideoTouch == enable) { 67 | return; 68 | } 69 | mEnableSVideoTouch = enable; 70 | if (enable) { 71 | if (mSVideoProgressBar != null && mEnableSVideoTouch && mSVideoTouchListener == null) { 72 | mSVideoTouchListener = new SVideoTouchController(mSVideoProgressBar, mShutterBtn, mSaveBtn,mDeleteBtn); 73 | if (mBottomViewCallBack != null) { 74 | mSVideoTouchListener.setSVideoRecorder(mBottomViewCallBack.requestRecordListener()); 75 | } 76 | mShutterBtn.setOnTouchListener(mSVideoTouchListener); 77 | } 78 | } else { 79 | mSVideoTouchListener = null; 80 | mShutterBtn.setOnTouchListener(null); 81 | } 82 | } 83 | 84 | public void onResume() { 85 | if (mSVideoTouchListener != null) { 86 | mSVideoTouchListener.setForceRecordFalse(); 87 | } 88 | } 89 | 90 | public void onPause() { 91 | if (mSVideoTouchListener != null) { 92 | mSVideoTouchListener.onPause(); 93 | } 94 | } 95 | 96 | @Override 97 | public void onClick(View v) { 98 | if (v == mSaveBtn) { 99 | mSVideoTouchListener.stopRecord(); 100 | mSaveBtn.setVisibility(View.GONE); 101 | } else if (v == mDeleteBtn) { 102 | mSVideoTouchListener.deleteLastSegment(); 103 | } 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/ui/IBottomMenuView.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.ui; 2 | 3 | import us.pinguo.svideo.interfaces.ISVideoRecorder; 4 | 5 | public interface IBottomMenuView { 6 | ISVideoRecorder requestRecordListener(); 7 | } 8 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/ui/SegProgressBar.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.ui; 2 | 3 | import android.content.Context; 4 | import android.graphics.Canvas; 5 | import android.graphics.Paint; 6 | import android.util.AttributeSet; 7 | import android.widget.ProgressBar; 8 | 9 | import java.util.LinkedList; 10 | import java.util.List; 11 | 12 | /** 13 | * Created by huangwei on 2018/8/17 0017. 14 | */ 15 | public class SegProgressBar extends ProgressBar { 16 | 17 | private List mMarkList = new LinkedList<>(); 18 | private int mMarkColor; 19 | private int mMarkWidth; 20 | private Paint mPaint = new Paint(); 21 | 22 | public SegProgressBar(Context context) { 23 | super(context); 24 | } 25 | 26 | public SegProgressBar(Context context, AttributeSet attrs) { 27 | super(context, attrs); 28 | } 29 | 30 | public SegProgressBar(Context context, AttributeSet attrs, int defStyleAttr) { 31 | super(context, attrs, defStyleAttr); 32 | } 33 | 34 | @Override 35 | protected void onFinishInflate() { 36 | super.onFinishInflate(); 37 | mMarkColor = 0xFFFFFFFF; 38 | mMarkWidth = (int) (getResources().getDisplayMetrics().density * 2); 39 | mPaint.setColor(mMarkColor); 40 | mPaint.setStyle(Paint.Style.FILL); 41 | } 42 | 43 | public void addMark(float markProgress) { 44 | mMarkList.add(markProgress); 45 | invalidate(); 46 | } 47 | 48 | public void removeLastMark() { 49 | if (mMarkList.size() > 0) { 50 | mMarkList.remove(mMarkList.size() - 1); 51 | invalidate(); 52 | } 53 | } 54 | 55 | public void setMarkColor(int markColor) { 56 | mMarkColor = markColor; 57 | mPaint.setColor(mMarkColor); 58 | } 59 | 60 | public void setMarkWidth(int markWidth) { 61 | mMarkWidth = markWidth; 62 | } 63 | 64 | @Override 65 | protected synchronized void onDraw(Canvas canvas) { 66 | super.onDraw(canvas); 67 | int left = getPaddingLeft(); 68 | int right = getWidth() - getPaddingRight(); 69 | int top = getPaddingTop(); 70 | int bottom = getHeight() - getPaddingBottom(); 71 | 72 | for (float progress : mMarkList) { 73 | int markPos = (int) (left + (right - left) * progress); 74 | canvas.drawRect(markPos - mMarkWidth / 2, top, markPos + mMarkWidth / 2, bottom, mPaint); 75 | } 76 | } 77 | 78 | public void clearAllMarks() { 79 | mMarkList.clear(); 80 | invalidate(); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /demo/src/main/java/us/pinguo/svideoDemo/ui/VideoProgressLayout.java: -------------------------------------------------------------------------------- 1 | package us.pinguo.svideoDemo.ui; 2 | 3 | import android.animation.Animator; 4 | import android.animation.AnimatorListenerAdapter; 5 | import android.animation.AnimatorSet; 6 | import android.animation.ObjectAnimator; 7 | import android.content.Context; 8 | import android.graphics.Color; 9 | import android.util.AttributeSet; 10 | import android.view.View; 11 | import android.widget.FrameLayout; 12 | import us.pinguo.svideo.utils.RL; 13 | import us.pinguo.svideoDemo.R; 14 | 15 | 16 | public class VideoProgressLayout extends FrameLayout { 17 | SegProgressBar mVideoProgressBar; 18 | View mVideoEndPointView; 19 | View mVideoProgressMinView; 20 | 21 | private int maxProgress; 22 | 23 | float ratio; 24 | 25 | private int minLength; 26 | 27 | private boolean isTransform; 28 | 29 | private AnimatorSet mAnimatorSet; 30 | 31 | public VideoProgressLayout(Context context) { 32 | super(context); 33 | } 34 | 35 | public VideoProgressLayout(Context context, AttributeSet attrs) { 36 | super(context, attrs); 37 | } 38 | 39 | public VideoProgressLayout(Context context, AttributeSet attrs, int defStyleAttr) { 40 | super(context, attrs, defStyleAttr); 41 | } 42 | 43 | @Override 44 | protected void onFinishInflate() { 45 | super.onFinishInflate(); 46 | mVideoProgressBar = findViewById(R.id.svideo_progress); 47 | mVideoEndPointView = findViewById(R.id.video_end_point); 48 | mVideoProgressMinView = findViewById(R.id.svideo_progress_min); 49 | } 50 | 51 | public void setMax(int max) { 52 | maxProgress = max; 53 | mVideoProgressBar.setMax(max); 54 | 55 | int screenWidth = getResources().getDisplayMetrics().widthPixels; 56 | ratio = screenWidth * 1.0f / maxProgress; 57 | 58 | //开始执行动画,一闪一闪亮晶晶 59 | animate(mVideoEndPointView); 60 | } 61 | 62 | public void setProgress(float progress) { 63 | float mProgress = progress * maxProgress; 64 | float translationX = ratio * mProgress; 65 | mVideoProgressBar.setProgress((int) mProgress); 66 | //加4px的意思是要领先于进度条4px 67 | mVideoEndPointView.setTranslationX(translationX + 4); 68 | if (!isTransform && translationX > minLength) { 69 | mVideoProgressMinView.setBackgroundColor(0xFFD400); 70 | isTransform = true; 71 | } 72 | } 73 | 74 | public void setProgressMinViewLeftMargin(int leftMargin) { 75 | MarginLayoutParams lp = (MarginLayoutParams) mVideoProgressMinView.getLayoutParams(); 76 | lp.leftMargin = leftMargin - lp.width / 2; 77 | mVideoProgressMinView.setLayoutParams(lp); 78 | minLength = lp.leftMargin; 79 | } 80 | 81 | public void stopProgress() { 82 | //把颜色变回来 83 | mVideoProgressMinView.setBackgroundColor(Color.WHITE); 84 | //状态重置 85 | isTransform = false; 86 | } 87 | 88 | // private void animate(final View view) { 89 | // Animation alphaAnimation = AnimationUtils.loadAnimation(getContext(), R.anim.video_progress_anim); 90 | // view.startAnimation(alphaAnimation); 91 | // } 92 | 93 | private void animate(final View view) { 94 | mAnimatorSet = new AnimatorSet(); 95 | ObjectAnimator alphaEnter = ObjectAnimator.ofFloat(view, "alpha", 0.0f, 1.0f); 96 | alphaEnter.setDuration(300); 97 | ObjectAnimator alphaOut = ObjectAnimator.ofFloat(view, "alpha", 1.0f, 0.0f); 98 | alphaOut.setDuration(300); 99 | mAnimatorSet.play(alphaEnter).after(alphaOut); 100 | mAnimatorSet.addListener(new AnimatorListenerAdapter() { 101 | @Override 102 | public void onAnimationEnd(Animator animation) { 103 | view.clearAnimation(); 104 | mAnimatorSet.start(); 105 | } 106 | }); 107 | mAnimatorSet.start(); 108 | } 109 | 110 | @Override 111 | protected void onDetachedFromWindow() { 112 | if (mAnimatorSet != null) { 113 | mAnimatorSet.removeAllListeners(); 114 | mAnimatorSet.cancel(); 115 | mAnimatorSet = null; 116 | RL.i("清除闪烁动画回调"); 117 | } 118 | super.onDetachedFromWindow(); 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /demo/src/main/res/drawable/ic_check_white_24dp.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /demo/src/main/res/drawable/ic_delete.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /demo/src/main/res/drawable/ic_movie_filter.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /demo/src/main/res/drawable/ic_switch_camera.xml: -------------------------------------------------------------------------------- 1 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /demo/src/main/res/drawable/shutter_drawable.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 9 | 10 | 12 | 13 | 14 | 15 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /demo/src/main/res/drawable/svideo_progress_drawable.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /demo/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 |