mEffects = new ArrayList<>();
18 |
19 | public EffectsManager() {
20 |
21 |
22 | }
23 |
24 |
25 | public void addEffect(VideoEffect effect) {
26 | if (effect == null) return;
27 | if (mEffects.contains(effect)) return;
28 | mEffects.add(effect);
29 | }
30 |
31 | public void removeEffect(VideoEffect effect) {
32 | if (effect == null) return;
33 | if (mEffects.contains(effect)) mEffects.remove(effect);
34 | }
35 |
36 | @Override
37 | public void onCameraStarted(Size size) {
38 | for (VideoEffect videoEffect : mEffects) {
39 | videoEffect.prepare(size);
40 | }
41 | }
42 |
43 | @Override
44 | public void onCameraStopped() {
45 | for (VideoEffect videoEffect : mEffects) {
46 | videoEffect.destroy();
47 | }
48 | }
49 |
50 | @Override
51 | public int onDrawTexture(int FBOin, int texIn) {
52 | int textureId = texIn;
53 | for (VideoEffect videoEffect : mEffects) {
54 | textureId = videoEffect.applyEffect(FBOin, textureId);
55 | }
56 | return textureId;
57 | }
58 |
59 | @Override
60 | public void onSizeChanged(Size size) {
61 | for (VideoEffect videoEffect : mEffects) {
62 | videoEffect.prepare(size);
63 | }
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/effects/VideoEffect.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.effects;
2 |
3 | import com.erlei.videorecorder.camera.Size;
4 |
5 | public interface VideoEffect {
6 |
7 | void prepare(Size size);
8 |
9 | int applyEffect(int fbo,int textureIdIn);
10 |
11 | void destroy();
12 | }
13 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/encoder/AudioEncoder.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.encoder;
2 |
3 | import android.media.AudioFormat;
4 | import android.media.MediaCodec;
5 | import android.media.MediaCodecInfo;
6 | import android.media.MediaFormat;
7 | import android.os.Looper;
8 | import android.os.Message;
9 | import android.support.annotation.IntRange;
10 |
11 | import com.erlei.videorecorder.util.LogUtil;
12 |
13 | import java.io.IOException;
14 |
15 | public class AudioEncoder extends MediaEncoder {
16 | private static final String TAG = "AudioEncoder";
17 | private static final String MIME_TYPE = "audio/mp4a-latm";
18 | private static final int DEFAULT_SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
19 | private static final int DEFAULT_BIT_RATE = 64000;
20 | private static final int DEFAULT_NUMBER_OF_CHANNELS = 1;
21 | private int mSampleRate;
22 | private int mBitRate;
23 | private int mChannelCount;
24 |
25 |
26 | public AudioEncoder(MediaEncoderCallBack callBack) {
27 | this(callBack, DEFAULT_SAMPLE_RATE, DEFAULT_BIT_RATE, DEFAULT_NUMBER_OF_CHANNELS);
28 | }
29 |
30 | public AudioEncoder(MediaEncoderCallBack callBack, int sampleRate, int bitRate, @IntRange(from = 1, to = 2) int channelCount) {
31 | super(callBack, TAG);
32 | mSampleRate = sampleRate;
33 | mBitRate = bitRate;
34 | mChannelCount = channelCount;
35 | }
36 |
37 | @Override
38 | protected long getSafePTSUs(long presentationTimeUs) {
39 | // return getJitterFreePTS(presentationTimeUs,3552);
40 | long result = System.nanoTime() / 1000L;
41 | if (result < mPrevOutputPTSUs)
42 | result = (mPrevOutputPTSUs - result) + result;
43 | return result;
44 | }
45 | long startPTS = 0;
46 | long totalSamplesNum = 0;
47 | private long getJitterFreePTS(long bufferPts, long bufferSamplesNum) {
48 | long correctedPts = 0;
49 | long bufferDuration = (1000000 * bufferSamplesNum) / (mSampleRate);
50 | bufferPts -= bufferDuration; // accounts for the delay of acquiring the audio buffer
51 | if (totalSamplesNum == 0) {
52 | // reset
53 | startPTS = bufferPts;
54 | totalSamplesNum = 0;
55 | }
56 | correctedPts = startPTS + (1000000 * totalSamplesNum) / (mSampleRate);
57 | if(bufferPts - correctedPts >= 2*bufferDuration) {
58 | // reset
59 | startPTS = bufferPts;
60 | totalSamplesNum = 0;
61 | correctedPts = startPTS;
62 | }
63 | totalSamplesNum += bufferSamplesNum;
64 | return correctedPts;
65 | }
66 |
67 | @Override
68 | protected synchronized MediaEncoderHandler initHandler(Looper looper, MediaEncoder encoder) {
69 | return new AudioEncoderHandler(looper, encoder);
70 | }
71 |
72 |
73 | @Override
74 | protected MediaCodec createEncoder() throws IOException {
75 | LogUtil.logd(TAG, "createEncoder");
76 | final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, mSampleRate, mChannelCount);
77 | audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
78 | audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, mChannelCount == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
79 | audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
80 | audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mChannelCount);
81 | LogUtil.loge(TAG, "format: " + audioFormat);
82 | MediaCodec encoder = MediaCodec.createEncoderByType(MIME_TYPE);
83 | encoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
84 | encoder.start();
85 | LogUtil.logd(TAG, "createEncoder finishing");
86 | return encoder;
87 | }
88 |
89 | private class AudioEncoderHandler extends MediaEncoderHandler {
90 | AudioEncoderHandler(Looper looper, MediaEncoder encoder) {
91 | super(looper, encoder);
92 | }
93 |
94 | @Override
95 | protected void handleMessage(MediaEncoder encoder, Message msg) {
96 |
97 | }
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/encoder/MediaEncoderCallBack.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.encoder;
2 |
3 | import android.media.MediaCodec;
4 | import android.media.MediaFormat;
5 |
6 | import java.nio.ByteBuffer;
7 |
8 | public interface MediaEncoderCallBack {
9 |
10 | String getOutPutPath();
11 |
12 | void onPrepared(MediaEncoder mediaEncoder);
13 | void onStopped(MediaEncoder mediaEncoder);
14 |
15 | void sendEncodedData(int mediaTrack, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo);
16 |
17 | int addMediaTrack(MediaEncoder encoder, MediaFormat format);
18 | }
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/encoder/MuxerCallback.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.encoder;
2 |
3 | import android.support.annotation.Nullable;
4 |
5 | public interface MuxerCallback {
6 |
7 | void onPrepared();
8 |
9 | void onMuxerStarted(String output);
10 |
11 | void onMuxerStopped(String outPutPath);
12 | }
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/encoder/VideoEncoder.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.encoder;
2 |
3 | import android.media.MediaCodec;
4 | import android.media.MediaCodecInfo;
5 | import android.media.MediaFormat;
6 | import android.os.Looper;
7 | import android.os.Message;
8 | import android.view.Surface;
9 |
10 | import com.erlei.videorecorder.camera.Size;
11 | import com.erlei.videorecorder.util.LogUtil;
12 |
13 | import java.io.IOException;
14 | import java.util.Locale;
15 |
16 | public class VideoEncoder extends MediaEncoder {
17 |
18 | private static final String MIME_TYPE = "video/avc";
19 | private static final String TAG = "VideoEncoder";
20 | private static final int I_FRAME_INTERVAL = 10;
21 | private static final int FRAME_RATE = 30;
22 | private static final float BPP = 0.25f;
23 | private final Size mVideoSize;
24 | private final int mBitRate;
25 | private final int mIFrameInterval;
26 | private Surface mInputSurface;
27 |
28 | public VideoEncoder(MediaEncoderCallBack callBack, Size size, int bitRate,int iFrameInterval) {
29 | super(callBack, TAG);
30 | mVideoSize = size;
31 | mIFrameInterval = iFrameInterval;
32 | mBitRate = bitRate <= 0 ? calcBitRate() : bitRate;
33 | }
34 |
35 |
36 | @Override
37 | protected synchronized MediaEncoderHandler initHandler(Looper looper, MediaEncoder encoder) {
38 | return new VideoEncoderHandler(getLooper(), this);
39 | }
40 |
41 |
42 | @Override
43 | protected MediaCodec createEncoder() throws IOException {
44 | LogUtil.logd(TAG, "createEncoder");
45 | MediaFormat videoFormat = MediaFormat.createVideoFormat(MIME_TYPE, mVideoSize.getWidth(), mVideoSize.getHeight());
46 | videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
47 | videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
48 | videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
49 | videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameInterval);
50 | LogUtil.logd(TAG, "format: " + videoFormat);
51 |
52 |
53 | MediaCodec encoder = MediaCodec.createEncoderByType(MIME_TYPE);
54 | encoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
55 | mInputSurface = encoder.createInputSurface();
56 | encoder.start();
57 | LogUtil.logd(TAG, "createEncoder finishing");
58 | return encoder;
59 | }
60 |
61 | @Override
62 | protected void signalEndOfInputStream() {
63 | mEncoder.signalEndOfInputStream();
64 | }
65 |
66 | public Surface getInputSurface() {
67 | return mInputSurface;
68 | }
69 |
70 | private int calcBitRate() {
71 | final int bitrate = (int) (BPP * FRAME_RATE * mVideoSize.getWidth() * mVideoSize.getHeight());
72 | LogUtil.logd(TAG, String.format(Locale.getDefault(), "bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
73 | return bitrate;
74 | }
75 |
76 | private class VideoEncoderHandler extends MediaEncoderHandler {
77 |
78 | VideoEncoderHandler(Looper looper, VideoEncoder videoEncoder) {
79 | super(looper, videoEncoder);
80 | }
81 |
82 | @Override
83 | protected void handleMessage(MediaEncoder encoder, Message msg) {
84 |
85 | }
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/encoder1/MediaMuxerWrapper.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.encoder1;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: MediaMuxerWrapper.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import android.media.MediaCodec;
26 | import android.media.MediaFormat;
27 | import android.media.MediaMuxer;
28 | import android.text.TextUtils;
29 |
30 | import com.erlei.videorecorder.recorder.VideoRecorderHandler;
31 | import com.erlei.videorecorder.util.LogUtil;
32 |
33 | import java.io.IOException;
34 | import java.nio.ByteBuffer;
35 | import java.text.SimpleDateFormat;
36 | import java.util.GregorianCalendar;
37 | import java.util.Locale;
38 |
39 | public class MediaMuxerWrapper {
40 | private static final boolean DEBUG = LogUtil.LOG_ENABLE; // TODO set false on release
41 | private static final String TAG = LogUtil.TAG;
42 |
43 | private static final SimpleDateFormat mDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US);
44 | private final VideoRecorderHandler mViewHandler;
45 |
46 | private String mOutputPath;
47 | private final MediaMuxer mMediaMuxer; // API >= 18
48 | private int mEncoderCount, mStatredCount;
49 | private boolean mIsStarted;
50 | private MediaEncoder mVideoEncoder, mAudioEncoder;
51 |
52 | /**
53 | * Constructor
54 | *
55 | * @param output output file
56 | * @param viewHandler
57 | * @throws IOException
58 | */
59 | public MediaMuxerWrapper(String output, VideoRecorderHandler viewHandler) throws IOException {
60 | mViewHandler = viewHandler;
61 | if (TextUtils.isEmpty(output)) throw new IllegalArgumentException("output must not null");
62 | mMediaMuxer = new MediaMuxer(mOutputPath = output, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
63 | mEncoderCount = mStatredCount = 0;
64 | mIsStarted = false;
65 | }
66 |
67 | public String getOutputPath() {
68 | return mOutputPath;
69 | }
70 |
71 | public void prepare() throws IOException {
72 | if (mVideoEncoder != null)
73 | mVideoEncoder.prepare();
74 | if (mAudioEncoder != null)
75 | mAudioEncoder.prepare();
76 | }
77 |
78 | public void startRecording() {
79 | if (mVideoEncoder != null)
80 | mVideoEncoder.startRecording();
81 | if (mAudioEncoder != null)
82 | mAudioEncoder.startRecording();
83 | }
84 |
85 | public void stopRecording() {
86 | if (mVideoEncoder != null)
87 | mVideoEncoder.stopRecording();
88 | mVideoEncoder = null;
89 | if (mAudioEncoder != null)
90 | mAudioEncoder.stopRecording();
91 | mAudioEncoder = null;
92 | }
93 |
94 | public synchronized boolean isStarted() {
95 | return mIsStarted;
96 | }
97 |
98 | //**********************************************************************
99 | //**********************************************************************
100 |
101 | /**
102 | * assign encoder to this calss. this is called from encoder.
103 | *
104 | * @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
105 | */
106 | /*package*/ void addEncoder(final MediaEncoder encoder) {
107 | if (encoder instanceof MediaVideoEncoder) {
108 | if (mVideoEncoder != null)
109 | throw new IllegalArgumentException("Video encoder already added.");
110 | mVideoEncoder = encoder;
111 | } else if (encoder instanceof MediaAudioEncoder) {
112 | if (mAudioEncoder != null)
113 | throw new IllegalArgumentException("Video encoder already added.");
114 | mAudioEncoder = encoder;
115 | } else
116 | throw new IllegalArgumentException("unsupported encoder");
117 | mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0);
118 | }
119 |
120 | /**
121 | * request start recording from encoder
122 | *
123 | * @return true when muxer is ready to write
124 | */
125 | /*package*/
126 | synchronized boolean start() {
127 | if (DEBUG) LogUtil.logd(TAG, "start:");
128 | mStatredCount++;
129 | if ((mEncoderCount > 0) && (mStatredCount == mEncoderCount)) {
130 | mMediaMuxer.start();
131 | mIsStarted = true;
132 | notifyAll();
133 | mViewHandler.onMuxerStarted(mOutputPath);
134 | if (DEBUG) LogUtil.logd(TAG, "MediaMuxer started:");
135 | }
136 | return mIsStarted;
137 | }
138 |
139 | /**
140 | * request stop recording from encoder when encoder received EOS
141 | */
142 | /*package*/
143 | synchronized void stop() {
144 | if (DEBUG) LogUtil.logd(TAG, "stop:mStatredCount=" + mStatredCount);
145 | mStatredCount--;
146 | if ((mEncoderCount > 0) && (mStatredCount <= 0)) {
147 | try {
148 | mMediaMuxer.stop();
149 | } catch (Exception e) {
150 | LogUtil.loge(TAG, "MediaMuxer stopped: error" + e);
151 | e.printStackTrace();
152 | } finally {
153 | if (mViewHandler != null) {
154 | mViewHandler.onMuxerStopped(getOutputPath());
155 | }
156 | try {
157 | mMediaMuxer.release();
158 | } catch (Exception e) {
159 | e.printStackTrace();
160 | }
161 | }
162 | mIsStarted = false;
163 | if (DEBUG) LogUtil.logd(TAG, "MediaMuxer stopped:");
164 | }
165 | }
166 |
167 | /**
168 | * assign encoder to muxer
169 | *
170 | * @param format
171 | * @return minus value indicate error
172 | */
173 | /*package*/
174 | synchronized int addTrack(final MediaFormat format) {
175 | if (mIsStarted)
176 | throw new IllegalStateException("muxer already started");
177 | final int trackIx = mMediaMuxer.addTrack(format);
178 | if (DEBUG)
179 | LogUtil.logi(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format);
180 | return trackIx;
181 | }
182 |
183 | /**
184 | * write encoded data to muxer
185 | *
186 | * @param trackIndex
187 | * @param byteBuf
188 | * @param bufferInfo
189 | */
190 | /*package*/
191 | synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
192 | if (mStatredCount > 0)
193 | mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
194 | }
195 |
196 | //**********************************************************************
197 | //**********************************************************************
198 |
199 | /**
200 | * get current date and time as String
201 | *
202 | * @return
203 | */
204 | private static final String getDateTimeString() {
205 | final GregorianCalendar now = new GregorianCalendar();
206 | return mDateTimeFormat.format(now.getTime());
207 | }
208 |
209 | }
210 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/CoordinateTransform.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.gles;
2 |
3 | import android.content.Context;
4 | import android.content.res.Configuration;
5 | import android.opengl.Matrix;
6 | import android.support.annotation.IntDef;
7 | import android.view.WindowManager;
8 |
9 | import com.erlei.videorecorder.recorder.CameraController;
10 | import com.erlei.videorecorder.util.LogUtil;
11 |
12 | import java.lang.annotation.Retention;
13 | import java.lang.annotation.RetentionPolicy;
14 | import java.util.Arrays;
15 |
16 | import static android.view.Surface.ROTATION_270;
17 | import static android.view.Surface.ROTATION_90;
18 |
19 |
20 | @SuppressWarnings({"WeakerAccess", "unused"})
21 | public abstract class CoordinateTransform {
22 | protected final CameraController mCameraController;
23 | protected static final String TAG = LogUtil.TAG;
24 |
25 | protected float sVertexCoords[] = {
26 | -1.0f, -1.0f, // 0 bottom left
27 | 1.0f, -1.0f, // 1 bottom right
28 | -1.0f, 1.0f, // 2 top left
29 | 1.0f, 1.0f, // 3 top right
30 | };
31 |
32 | protected float TEXTURE_ROTATED_0[] = {
33 | 0.0f, 0.0f, // bottom left
34 | 1.0f, 0.0f, // bottom right
35 | 0.0f, 1.0f, // top left
36 | 1.0f, 1.0f, // top right
37 | };
38 | protected float TEXTURE_ROTATED_90[] = {
39 | 1.0f, 0.0f, // bottom right
40 | 1.0f, 1.0f, // top right
41 | 0.0f, 0.0f, // bottom left
42 | 0.0f, 1.0f, // top left
43 | };
44 | protected float TEXTURE_ROTATED_180[] = {
45 | 1.0f, 1.0f, // top right
46 | 0.0f, 1.0f, // top left
47 | 1.0f, 0.0f, // bottom right
48 | 0.0f, 0.0f, // bottom left
49 | };
50 | protected float TEXTURE_ROTATED_270[] = {
51 | 0.0f, 1.0f, // top left
52 | 0.0f, 0.0f, // bottom left
53 | 1.0f, 1.0f, // top right
54 | 1.0f, 0.0f, // bottom right
55 | };
56 |
57 | protected float sTexCoord2D[] = {
58 | 0.0f, 1.0f,
59 | 1.0f, 1.0f,
60 | 0.0f, 0.0f,
61 | 1.0f, 0.0f,
62 | };
63 |
64 | @android.support.annotation.Size(max = 16, min = 16)
65 | public abstract float[] getMVPMatrixOES();
66 |
67 | @android.support.annotation.Size(max = 16, min = 16)
68 | public float[] getMVPMatrix2D(){
69 | float[] floats = new float[16];
70 | Matrix.setIdentityM(floats,0);
71 | return floats;
72 | }
73 |
74 | @android.support.annotation.Size(max = 16, min = 16)
75 | public float[] getTextureMatrix2D(){
76 | float[] floats = new float[16];
77 | Matrix.setIdentityM(floats,0);
78 | return floats;
79 | }
80 |
81 | @android.support.annotation.Size(max = 16, min = 16)
82 | public float[] getTextureMatrixOES(){
83 | float[] floats = new float[16];
84 | Matrix.setIdentityM(floats,0);
85 | return floats;
86 | }
87 |
88 | @android.support.annotation.Size(max = 8, min = 8)
89 | public abstract float[] getOESTextureCoordinate();
90 |
91 | @android.support.annotation.Size(max = 8, min = 8)
92 | public abstract float[] get2DTextureCoordinate();
93 |
94 | @android.support.annotation.Size(max = 8, min = 8)
95 | public abstract float[] getVertexCoordinate();
96 |
97 | public static final int CENTER_INSIDE = 0, CENTER_CROP = 1, FIT_XY = 2;
98 | public static final int FLIP_NONE = 0, FLIP_HORIZONTAL = 1, FLIP_VERTICAL = 2;
99 |
100 | @IntDef({CENTER_CROP, CENTER_INSIDE, FIT_XY})
101 | @Retention(RetentionPolicy.SOURCE)
102 | public @interface ScaleType {
103 | }
104 |
105 | @IntDef({FLIP_NONE, FLIP_HORIZONTAL, FLIP_VERTICAL})
106 | @Retention(RetentionPolicy.SOURCE)
107 | public @interface FlipType {
108 | }
109 |
110 | public CoordinateTransform(CameraController controller) {
111 | this.mCameraController = controller;
112 | }
113 |
114 | /**
115 | * @return activity is landscape
116 | */
117 | public boolean isLandscape() {
118 | WindowManager windowManager = (WindowManager) mCameraController.getContext().getSystemService(Context.WINDOW_SERVICE);
119 | if (windowManager != null && windowManager.getDefaultDisplay() != null) {
120 | int rotation = windowManager.getDefaultDisplay().getRotation();
121 | LogUtil.logd(TAG, "rotation=" + rotation);
122 | return rotation == ROTATION_90 || rotation == ROTATION_270;
123 | }
124 | return mCameraController.getContext().getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE;
125 | }
126 |
127 | /**
128 | * @param coordinate 需要翻转的坐标数组
129 | * @return 获取翻转之后的坐标
130 | */
131 | public float[] getFlip(float[] coordinate, @FlipType int flipType) {
132 | LogUtil.logd(TAG, "coordinate=" + Arrays.toString(coordinate));
133 | float[] dest = null;
134 | switch (flipType) {
135 | case FLIP_HORIZONTAL:
136 | dest = new float[]{
137 | coordinate[0], flip(coordinate[1]),
138 | coordinate[2], flip(coordinate[3]),
139 | coordinate[4], flip(coordinate[5]),
140 | coordinate[6], flip(coordinate[7]),
141 | };
142 | break;
143 | case FLIP_VERTICAL:
144 | dest = new float[]{
145 | flip(coordinate[0]), coordinate[1],
146 | flip(coordinate[2]), coordinate[3],
147 | flip(coordinate[4]), coordinate[5],
148 | flip(coordinate[6]), coordinate[7],
149 | };
150 | break;
151 | case FLIP_NONE:
152 | break;
153 | }
154 | LogUtil.logd(TAG, "coordinate=" + Arrays.toString(dest));
155 | return dest == null ? coordinate : dest;
156 | }
157 |
158 | private static float flip(final float i) {
159 | return i == 0.0f ? 1.0f : 0.0f;
160 | }
161 |
162 | /**
163 | * 不使用 mvpMatrix 的话 , 可以用这个 配合 getFlip 实现预览 ,
164 | */
165 | public float[] getRotate() {
166 | if (mCameraController.isFront()) {
167 | switch (mCameraController.getDisplayOrientation()) {
168 | case 0:
169 | return TEXTURE_ROTATED_180;
170 | case 90:
171 | return TEXTURE_ROTATED_270;
172 | case 180:
173 | return TEXTURE_ROTATED_0;
174 | case 270:
175 | return TEXTURE_ROTATED_90;
176 | }
177 | } else {
178 | switch (mCameraController.getDisplayOrientation()) {
179 | case 0:
180 | return TEXTURE_ROTATED_0;
181 | case 90:
182 | return TEXTURE_ROTATED_90;
183 | case 180:
184 | return TEXTURE_ROTATED_180;
185 | case 270:
186 | return TEXTURE_ROTATED_270;
187 | }
188 |
189 | }
190 | return TEXTURE_ROTATED_90;
191 | }
192 |
193 | /**
194 | * @param scale 缩放大小
195 | * @param coords 矩阵
196 | * @return 缩小矩阵
197 | */
198 | public float[] setScale(float scale, float[] coords) {
199 | for (int i = 0; i < coords.length; i++) {
200 | coords[i] = ((coords[i] - 0.5f) * scale) + 0.5f;
201 | }
202 | return coords;
203 | }
204 |
205 |
206 | /**
207 | * @param m 矩阵
208 | * @param angle 角度
209 | * @return 旋转矩阵
210 | */
211 | public static float[] rotate(float[] m, float angle) {
212 | Matrix.rotateM(m, 0, angle, 0, 0, 1);
213 | return m;
214 | }
215 |
216 | /**
217 | * @param m 矩阵
218 | * @param x x 是否翻转
219 | * @param y y 是否翻转
220 | * @return 镜面翻转矩阵
221 | */
222 | public static float[] flip(float[] m, boolean x, boolean y) {
223 | if (x || y) {
224 | Matrix.scaleM(m, 0, x ? -1 : 1, y ? -1 : 1, 1);
225 | }
226 | return m;
227 | }
228 |
229 | }
230 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/DefaultCoordinateTransform.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.gles;
2 |
3 |
4 | import android.opengl.Matrix;
5 |
6 | import com.erlei.videorecorder.camera.Size;
7 | import com.erlei.videorecorder.recorder.CameraController;
8 | import com.erlei.videorecorder.util.LogUtil;
9 |
10 | @SuppressWarnings({"WeakerAccess", "unused"})
11 | public class DefaultCoordinateTransform extends CoordinateTransform {
12 |
13 | private float[] mVertexCoordinate;
14 | private float[] mTextureCoordinate;
15 |
16 | public DefaultCoordinateTransform(CameraController view) {
17 | super(view);
18 | mTextureCoordinate = TEXTURE_ROTATED_0;
19 | mVertexCoordinate = sVertexCoords;
20 | }
21 |
22 | @Override
23 | public float[] getMVPMatrixOES() {
24 | Size cameraSize = mCameraController.getCameraSize();
25 | Size surfaceSize = mCameraController.getSurfaceSize();
26 |
27 | //忘了当初为啥这样写的了 T-T , 不判断横竖屏的话竖屏预览会变形 , 横屏没毛病 ,
28 | //大概是跟Camera的物理方向是横屏的有关 , 用矩阵应该也行 ,我还是简单粗暴的判断一下算了
29 | if (!isLandscape()) cameraSize = new Size(cameraSize.getHeight(), cameraSize.getWidth());
30 |
31 | float cameraWidth = cameraSize.getWidth();
32 | float cameraHeight = cameraSize.getHeight();
33 | LogUtil.logd(TAG, "cameraSize = " + cameraWidth + "x" + cameraHeight);
34 | float surfaceWidth = surfaceSize.getWidth();
35 | float surfaceHeight = surfaceSize.getHeight();
36 | LogUtil.logd(TAG, "surfaceSize = " + surfaceWidth + "x" + surfaceHeight);
37 | float cameraAspectRatio = cameraWidth / cameraHeight;
38 | float surfaceAspectRatio = surfaceWidth / surfaceHeight;
39 | LogUtil.logd(TAG, "cameraAspectRatio = " + cameraAspectRatio + "\t\t surfaceAspectRatio = " + surfaceAspectRatio);
40 | // 模型矩阵
41 | float[] modelMatrix = new float[16];
42 | Matrix.setIdentityM(modelMatrix, 0);
43 |
44 | //由于mTexture.getTransformMatrix(mTexMatrix); 已经处理了纹理矩阵 , 所以不用考虑纹理方向
45 | //所以只需要处理下纹理变形的问题 , 需要将纹理的比例恢复到原比例 (cameraSize)
46 | //1 . 恢复纹理比例
47 | Matrix.scaleM(modelMatrix, 0, cameraWidth / surfaceWidth, cameraHeight / surfaceHeight, 0f);
48 | LogUtil.logd(TAG, "scalex = " + (cameraWidth / surfaceWidth) + "\t\t scaley = " + (cameraHeight / surfaceHeight));
49 |
50 | //2 . CENTER_CROP (see ImageView CENTER_CROP)
51 | float scale;
52 | float dx = 0, dy = 0;
53 | if (cameraWidth * surfaceHeight > surfaceWidth * cameraHeight) {
54 | scale = surfaceHeight / cameraHeight;
55 | dx = (surfaceWidth - cameraWidth * scale) * 0.5f;
56 | } else {
57 | scale = surfaceWidth / cameraWidth;
58 | dy = (surfaceHeight - cameraHeight * scale) * 0.5f;
59 | }
60 | LogUtil.logd(TAG, "scale = " + scale + "\t\t dx = " + (dx / cameraWidth) + "\t\t dy = " + (dy / cameraHeight));
61 | Matrix.scaleM(modelMatrix, 0, scale, scale, 0f);
62 | // TODO: 2018/5/22 这个地方最好需要平移一下 , 不过不平移也看不出来 ,
63 | // 算法错误 ,需要重新大量测试一下 , 在高分辨率下平移没问题 , 低分辨率平移有毛病
64 | //Matrix.translateM(modelMatrix, 0, dx / cameraWidth, dy / cameraHeight, 0f);
65 | return modelMatrix;
66 |
67 | }
68 |
69 |
70 | /**
71 | * @return 获取纹理坐标
72 | */
73 | @Override
74 | public float[] getOESTextureCoordinate() {
75 | return mTextureCoordinate;
76 | }
77 |
78 | @Override
79 | public float[] get2DTextureCoordinate() {
80 | return mTextureCoordinate;
81 | }
82 |
83 | @Override
84 | public float[] getVertexCoordinate() {
85 | return mVertexCoordinate;
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/Drawable2d.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.erlei.videorecorder.gles;
18 |
19 | import java.nio.FloatBuffer;
20 |
21 | /**
22 | * Base class for stuff we like to draw.
23 | */
24 | public class Drawable2d {
25 | private static final int SIZEOF_FLOAT = 4;
26 |
27 | /**
28 | * Simple equilateral triangle (1.0 per side). Centered on (0,0).
29 | */
30 | private static final float TRIANGLE_COORDS[] = {
31 | 0.0f, 0.577350269f, // 0 top
32 | -0.5f, -0.288675135f, // 1 bottom left
33 | 0.5f, -0.288675135f // 2 bottom right
34 | };
35 | private static final float TRIANGLE_TEX_COORDS[] = {
36 | 0.5f, 0.0f, // 0 top center
37 | 0.0f, 1.0f, // 1 bottom left
38 | 1.0f, 1.0f, // 2 bottom right
39 | };
40 | private static final FloatBuffer TRIANGLE_BUF =
41 | GLUtil.createFloatBuffer(TRIANGLE_COORDS);
42 | private static final FloatBuffer TRIANGLE_TEX_BUF =
43 | GLUtil.createFloatBuffer(TRIANGLE_TEX_COORDS);
44 |
45 | /**
46 | * Simple square, specified as a triangle strip. The square is centered on (0,0) and has
47 | * a size of 1x1.
48 | *
49 | * Triangles are 0-1-2 and 2-1-3 (counter-clockwise winding).
50 | */
51 | private static final float RECTANGLE_COORDS[] = {
52 | -0.5f, -0.5f, // 0 bottom left
53 | 0.5f, -0.5f, // 1 bottom right
54 | -0.5f, 0.5f, // 2 top left
55 | 0.5f, 0.5f, // 3 top right
56 | };
57 | private static final float RECTANGLE_TEX_COORDS[] = {
58 | 0.0f, 1.0f, // 0 bottom left
59 | 1.0f, 1.0f, // 1 bottom right
60 | 0.0f, 0.0f, // 2 top left
61 | 1.0f, 0.0f // 3 top right
62 | };
63 | private static final FloatBuffer RECTANGLE_BUF =
64 | GLUtil.createFloatBuffer(RECTANGLE_COORDS);
65 | private static final FloatBuffer RECTANGLE_TEX_BUF =
66 | GLUtil.createFloatBuffer(RECTANGLE_TEX_COORDS);
67 |
68 | /**
69 | * A "full" square, extending from -1 to +1 in both dimensions. When the model/view/projection
70 | * matrix is identity, this will exactly cover the viewport.
71 | *
72 | * The texture coordinates are Y-inverted relative to RECTANGLE. (This seems to work out
73 | * right with external textures from SurfaceTexture.)
74 | */
75 | private static final float FULL_RECTANGLE_COORDS[] = {
76 | -1.0f, -1.0f, // 0 bottom left
77 | 1.0f, -1.0f, // 1 bottom right
78 | -1.0f, 1.0f, // 2 top left
79 | 1.0f, 1.0f, // 3 top right
80 | };
81 | private static final float FULL_RECTANGLE_TEX_COORDS[] = {
82 | 0.0f, 0.0f, // 0 bottom left
83 | 1.0f, 0.0f, // 1 bottom right
84 | 0.0f, 1.0f, // 2 top left
85 | 1.0f, 1.0f // 3 top right
86 | };
87 | private static final FloatBuffer FULL_RECTANGLE_BUF =
88 | GLUtil.createFloatBuffer(FULL_RECTANGLE_COORDS);
89 | private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
90 | GLUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS);
91 |
92 |
93 | private FloatBuffer mVertexArray;
94 | private FloatBuffer mTexCoordArray;
95 | private int mVertexCount;
96 | private int mCoordsPerVertex;
97 | private int mVertexStride;
98 | private int mTexCoordStride;
99 | private Prefab mPrefab;
100 |
101 | /**
102 | * Enum values for constructor.
103 | */
104 | public enum Prefab {
105 | TRIANGLE, RECTANGLE, FULL_RECTANGLE
106 | }
107 |
108 | /**
109 | * Prepares a drawable from a "pre-fabricated" shape definition.
110 | *
111 | * Does no EGL/GL operations, so this can be done at any time.
112 | */
113 | public Drawable2d(Prefab shape) {
114 | switch (shape) {
115 | case TRIANGLE:
116 | mVertexArray = TRIANGLE_BUF;
117 | mTexCoordArray = TRIANGLE_TEX_BUF;
118 | mCoordsPerVertex = 2;
119 | mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT;
120 | mVertexCount = TRIANGLE_COORDS.length / mCoordsPerVertex;
121 | break;
122 | case RECTANGLE:
123 | mVertexArray = RECTANGLE_BUF;
124 | mTexCoordArray = RECTANGLE_TEX_BUF;
125 | mCoordsPerVertex = 2;
126 | mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT;
127 | mVertexCount = RECTANGLE_COORDS.length / mCoordsPerVertex;
128 | break;
129 | case FULL_RECTANGLE:
130 | mVertexArray = FULL_RECTANGLE_BUF;
131 | mTexCoordArray = FULL_RECTANGLE_TEX_BUF;
132 | mCoordsPerVertex = 2;
133 | mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT;
134 | mVertexCount = FULL_RECTANGLE_COORDS.length / mCoordsPerVertex;
135 | break;
136 | default:
137 | throw new RuntimeException("Unknown shape " + shape);
138 | }
139 | mTexCoordStride = 2 * SIZEOF_FLOAT;
140 | mPrefab = shape;
141 | }
142 |
143 | /**
144 | * Returns the array of vertices.
145 | *
146 | * To avoid allocations, this returns internal state. The caller must not modify it.
147 | */
148 | public FloatBuffer getVertexArray() {
149 | return mVertexArray;
150 | }
151 |
152 | /**
153 | * Returns the array of texture coordinates.
154 | *
155 | * To avoid allocations, this returns internal state. The caller must not modify it.
156 | */
157 | public FloatBuffer getTexCoordArray() {
158 | return mTexCoordArray;
159 | }
160 |
161 | /**
162 | * Returns the number of vertices stored in the vertex array.
163 | */
164 | public int getVertexCount() {
165 | return mVertexCount;
166 | }
167 |
168 | /**
169 | * Returns the width, in bytes, of the data for each vertex.
170 | */
171 | public int getVertexStride() {
172 | return mVertexStride;
173 | }
174 |
175 | /**
176 | * Returns the width, in bytes, of the data for each texture coordinate.
177 | */
178 | public int getTexCoordStride() {
179 | return mTexCoordStride;
180 | }
181 |
182 | /**
183 | * Returns the number of position coordinates per vertex. This will be 2 or 3.
184 | */
185 | public int getCoordsPerVertex() {
186 | return mCoordsPerVertex;
187 | }
188 |
189 | @Override
190 | public String toString() {
191 | if (mPrefab != null) {
192 | return "[Drawable2d: " + mPrefab + "]";
193 | } else {
194 | return "[Drawable2d: ...]";
195 | }
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/EglSurfaceBase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.erlei.videorecorder.gles;
18 |
19 | import android.graphics.Bitmap;
20 | import android.opengl.EGL14;
21 | import android.opengl.EGLSurface;
22 | import android.opengl.GLES20;
23 | import android.util.Log;
24 |
25 | import com.erlei.videorecorder.util.LogUtil;
26 |
27 | import java.io.BufferedOutputStream;
28 | import java.io.File;
29 | import java.io.FileOutputStream;
30 | import java.io.IOException;
31 | import java.nio.ByteBuffer;
32 | import java.nio.ByteOrder;
33 |
34 | /**
35 | * Common base class for EGL surfaces.
36 | *
37 | * There can be multiple surfaces associated with a single context.
38 | */
39 | public class EglSurfaceBase {
40 | protected static final String TAG = LogUtil.TAG;
41 |
42 | // EglCore object we're associated with. It may be associated with multiple surfaces.
43 | protected EglCore mEglCore;
44 |
45 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
46 | private int mWidth = -1;
47 | private int mHeight = -1;
48 |
49 | protected EglSurfaceBase(EglCore eglCore) {
50 | mEglCore = eglCore;
51 | }
52 |
53 | /**
54 | * Creates a window surface.
55 | *
56 | * @param surface May be a Surface or SurfaceTexture.
57 | */
58 | public void createWindowSurface(Object surface) {
59 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
60 | throw new IllegalStateException("surface already created");
61 | }
62 | mEGLSurface = mEglCore.createWindowSurface(surface);
63 |
64 | // Don't cache width/height here, because the size of the underlying surface can change
65 | // out from under us (see e.g. HardwareScalerActivity).
66 | //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
67 | //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
68 | }
69 |
70 | /**
71 | * Creates an off-screen surface.
72 | */
73 | public void createOffscreenSurface(int width, int height) {
74 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
75 | throw new IllegalStateException("surface already created");
76 | }
77 | mEGLSurface = mEglCore.createOffscreenSurface(width, height);
78 | mWidth = width;
79 | mHeight = height;
80 | }
81 |
82 | /**
83 | * Returns the surface's width, in pixels.
84 | *
85 | * If this is called on a window surface, and the underlying surface is in the process
86 | * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
87 | * callback). The size should match after the next buffer swap.
88 | */
89 | public int getWidth() {
90 | if (mWidth < 0) {
91 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
92 | } else {
93 | return mWidth;
94 | }
95 | }
96 |
97 | /**
98 | * Returns the surface's height, in pixels.
99 | */
100 | public int getHeight() {
101 | if (mHeight < 0) {
102 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
103 | } else {
104 | return mHeight;
105 | }
106 | }
107 |
108 | /**
109 | * Release the EGL surface.
110 | */
111 | public void releaseEglSurface() {
112 | mEglCore.releaseSurface(mEGLSurface);
113 | mEGLSurface = EGL14.EGL_NO_SURFACE;
114 | mWidth = mHeight = -1;
115 | }
116 |
117 | /**
118 | * Makes our EGL context and surface current.
119 | */
120 | public void makeCurrent() {
121 | mEglCore.makeCurrent(mEGLSurface);
122 | }
123 |
124 | /**
125 | * Makes our EGL context and surface current for drawing, using the supplied surface
126 | * for reading.
127 | */
128 | public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
129 | mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
130 | }
131 |
132 | /**
133 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
134 | *
135 | * @return false on failure
136 | */
137 | public boolean swapBuffers() {
138 | boolean result = mEglCore.swapBuffers(mEGLSurface);
139 | if (!result) {
140 | Log.d(TAG, "WARNING: swapBuffers() failed");
141 | }
142 | return result;
143 | }
144 |
145 | /**
146 | * Sends the presentation time stamp to EGL.
147 | *
148 | * @param nsecs Timestamp, in nanoseconds.
149 | */
150 | public void setPresentationTime(long nsecs) {
151 | mEglCore.setPresentationTime(mEGLSurface, nsecs);
152 | }
153 |
154 | /**
155 | * Saves the EGL surface to a file.
156 | *
157 | * Expects that this object's EGL surface is current.
158 | */
159 | public void saveFrame(File file) throws IOException {
160 | if (!mEglCore.isCurrent(mEGLSurface)) {
161 | throw new RuntimeException("Expected EGL context/surface is not current");
162 | }
163 |
164 | // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
165 | // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
166 | // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
167 | // Bitmap "copy pixels" method wants the same format GL provides.
168 | //
169 | // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
170 | // here often.
171 | //
172 | // Making this even more interesting is the upside-down nature of GL, which means
173 | // our output will look upside down relative to what appears on screen if the
174 | // typical GL conventions are used.
175 |
176 | String filename = file.toString();
177 |
178 | int width = getWidth();
179 | int height = getHeight();
180 | ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
181 | buf.order(ByteOrder.LITTLE_ENDIAN);
182 | GLES20.glReadPixels(0, 0, width, height,
183 | GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
184 | GLUtil.checkGlError("glReadPixels");
185 | buf.rewind();
186 |
187 | BufferedOutputStream bos = null;
188 | try {
189 | bos = new BufferedOutputStream(new FileOutputStream(filename));
190 | Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
191 | bmp.copyPixelsFromBuffer(buf);
192 | bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
193 | bmp.recycle();
194 | } finally {
195 | if (bos != null) bos.close();
196 | }
197 | Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
198 | }
199 |
200 | public void release() {
201 | releaseEglSurface();
202 | }
203 | }
204 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/FlatShadedProgram.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.erlei.videorecorder.gles;
18 |
19 | import android.opengl.GLES20;
20 | import android.util.Log;
21 |
22 | import com.erlei.videorecorder.util.LogUtil;
23 |
24 | import java.nio.FloatBuffer;
25 |
26 | /**
27 | * GL program and supporting functions for flat-shaded rendering.
28 | */
29 | public class FlatShadedProgram {
30 | private static final String TAG = LogUtil.TAG;
31 |
32 | private static final String VERTEX_SHADER =
33 | "uniform mat4 uMVPMatrix;" +
34 | "attribute vec4 aPosition;" +
35 | "void main() {" +
36 | " gl_Position = uMVPMatrix * aPosition;" +
37 | "}";
38 |
39 | private static final String FRAGMENT_SHADER =
40 | "precision mediump float;" +
41 | "uniform vec4 uColor;" +
42 | "void main() {" +
43 | " gl_FragColor = uColor;" +
44 | "}";
45 |
46 | // Handles to the GL program and various components of it.
47 | private int mProgramHandle = -1;
48 | private int muColorLoc = -1;
49 | private int muMVPMatrixLoc = -1;
50 | private int maPositionLoc = -1;
51 |
52 |
53 | /**
54 | * Prepares the program in the current EGL context.
55 | */
56 | public FlatShadedProgram() {
57 | mProgramHandle = GLUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
58 | if (mProgramHandle == 0) {
59 | throw new RuntimeException("Unable to create program");
60 | }
61 | Log.d(TAG, "Created program " + mProgramHandle);
62 |
63 | // get locations of attributes and uniforms
64 |
65 | maPositionLoc = GLES20.glGetAttribLocation(mProgramHandle, "aPosition");
66 | GLUtil.checkLocation(maPositionLoc, "aPosition");
67 | muMVPMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uMVPMatrix");
68 | GLUtil.checkLocation(muMVPMatrixLoc, "uMVPMatrix");
69 | muColorLoc = GLES20.glGetUniformLocation(mProgramHandle, "uColor");
70 | GLUtil.checkLocation(muColorLoc, "uColor");
71 | }
72 |
73 | /**
74 | * Releases the program.
75 | */
76 | public void release() {
77 | GLES20.glDeleteProgram(mProgramHandle);
78 | mProgramHandle = -1;
79 | }
80 |
81 | /**
82 | * Issues the draw call. Does the full setup on every call.
83 | *
84 | * @param mvpMatrix The 4x4 projection matrix.
85 | * @param color A 4-element color vector.
86 | * @param vertexBuffer Buffer with vertex data.
87 | * @param firstVertex Index of first vertex to use in vertexBuffer.
88 | * @param vertexCount Number of vertices in vertexBuffer.
89 | * @param coordsPerVertex The number of coordinates per vertex (e.g. x,y is 2).
90 | * @param vertexStride Width, in bytes, of the data for each vertex (often vertexCount *
91 | * sizeof(float)).
92 | */
93 | public void draw(float[] mvpMatrix, float[] color, FloatBuffer vertexBuffer,
94 | int firstVertex, int vertexCount, int coordsPerVertex, int vertexStride) {
95 | GLUtil.checkGlError("draw start");
96 |
97 | // Select the program.
98 | GLES20.glUseProgram(mProgramHandle);
99 | GLUtil.checkGlError("glUseProgram");
100 |
101 | // Copy the model / view / projection matrix over.
102 | GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mvpMatrix, 0);
103 | GLUtil.checkGlError("glUniformMatrix4fv");
104 |
105 | // Copy the color vector in.
106 | GLES20.glUniform4fv(muColorLoc, 1, color, 0);
107 | GLUtil.checkGlError("glUniform4fv ");
108 |
109 | // Enable the "aPosition" vertex attribute.
110 | GLES20.glEnableVertexAttribArray(maPositionLoc);
111 | GLUtil.checkGlError("glEnableVertexAttribArray");
112 |
113 | // Connect vertexBuffer to "aPosition".
114 | GLES20.glVertexAttribPointer(maPositionLoc, coordsPerVertex,
115 | GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
116 | GLUtil.checkGlError("glVertexAttribPointer");
117 |
118 | // Draw the rect.
119 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, firstVertex, vertexCount);
120 | GLUtil.checkGlError("glDrawArrays");
121 |
122 | // Done -- disable vertex array and program.
123 | GLES20.glDisableVertexAttribArray(maPositionLoc);
124 | GLES20.glUseProgram(0);
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/GLUtil.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.gles;
2 |
3 | import android.graphics.Bitmap;
4 | import android.opengl.GLES20;
5 | import android.opengl.GLES30;
6 | import android.opengl.GLUtils;
7 | import android.opengl.Matrix;
8 | import android.util.Log;
9 |
10 | import com.erlei.videorecorder.util.LogUtil;
11 |
12 | import java.nio.ByteBuffer;
13 | import java.nio.ByteOrder;
14 | import java.nio.FloatBuffer;
15 |
16 | public class GLUtil {
17 | public static final int NO_TEXTURE = -1;
18 | private static final String TAG = "GLUtil";
19 | private static final int SIZEOF_FLOAT = 4;
20 | /** Identity matrix for general use. Don't modify or life will get weird. */
21 | public static final float[] IDENTITY_MATRIX;
22 | public static int GL_VERSION = -1;
23 |
24 | static {
25 | IDENTITY_MATRIX = new float[16];
26 | Matrix.setIdentityM(IDENTITY_MATRIX, 0);
27 | System.loadLibrary("glutil-lib");
28 | }
29 | public static int loadTexture(final Bitmap img, final int usedTexId, final boolean recycle) {
30 | int textures[] = new int[1];
31 | if (usedTexId == NO_TEXTURE) {
32 | GLES20.glGenTextures(1, textures, 0);
33 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
34 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
35 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
36 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
37 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
38 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
39 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
40 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
41 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
42 |
43 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img, 0);
44 | } else {
45 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
46 | GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, img);
47 | textures[0] = usedTexId;
48 | }
49 | if (recycle) {
50 | img.recycle();
51 | }
52 | return textures[0];
53 | }
54 | public static void checkGlError(String op) {
55 | int error = GLES20.glGetError();
56 | if (error != GLES20.GL_NO_ERROR) {
57 | String msg = op + ": glError 0x" + Integer.toHexString(error);
58 | LogUtil.logd(msg);
59 | throw new RuntimeException(msg);
60 | }
61 | }
62 | public static void checkLocation(int location, String label) {
63 | if (location < 0) {
64 | throw new RuntimeException("Unable to locate '" + label + "' in program");
65 | }
66 | }
67 | /**
68 | * Creates a new program from the supplied vertex and fragment shaders.
69 | *
70 | * @return A handle to the program, or 0 on failure.
71 | */
72 | public static int createProgram(String vertexSource, String fragmentSource) {
73 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
74 | if (vertexShader == 0) {
75 | return 0;
76 | }
77 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
78 | if (pixelShader == 0) {
79 | return 0;
80 | }
81 |
82 | int program = GLES20.glCreateProgram();
83 | checkGlError("glCreateProgram");
84 | if (program == 0) {
85 | Log.e(TAG, "Could not create program");
86 | }
87 | GLES20.glAttachShader(program, vertexShader);
88 | checkGlError("glAttachShader");
89 | GLES20.glAttachShader(program, pixelShader);
90 | checkGlError("glAttachShader");
91 | GLES20.glLinkProgram(program);
92 | int[] linkStatus = new int[1];
93 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
94 | if (linkStatus[0] != GLES20.GL_TRUE) {
95 | Log.e(TAG, "Could not link program: ");
96 | Log.e(TAG, GLES20.glGetProgramInfoLog(program));
97 | GLES20.glDeleteProgram(program);
98 | program = 0;
99 | }
100 | return program;
101 | }
102 |
103 | /**
104 | * Compiles the provided shader source.
105 | *
106 | * @return A handle to the shader, or 0 on failure.
107 | */
108 | public static int loadShader(int shaderType, String source) {
109 | int shader = GLES20.glCreateShader(shaderType);
110 | checkGlError("glCreateShader type=" + shaderType);
111 | GLES20.glShaderSource(shader, source);
112 | GLES20.glCompileShader(shader);
113 | int[] compiled = new int[1];
114 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
115 | if (compiled[0] == 0) {
116 | Log.e(TAG, "Could not compile shader " + shaderType + ":");
117 | Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
118 | GLES20.glDeleteShader(shader);
119 | shader = 0;
120 | }
121 | return shader;
122 | }
123 |
124 | /**
125 | * Creates a texture from raw data.
126 | *
127 | * @param data Image data, in a "direct" ByteBuffer.
128 | * @param width Texture width, in pixels (not bytes).
129 | * @param height Texture height, in pixels.
130 | * @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA).
131 | * @return Handle to texture.
132 | */
133 | public static int createImageTexture(ByteBuffer data, int width, int height, int format) {
134 | int[] textureHandles = new int[1];
135 | int textureHandle;
136 |
137 | GLES20.glGenTextures(1, textureHandles, 0);
138 | textureHandle = textureHandles[0];
139 | checkGlError("glGenTextures");
140 |
141 | // Bind the texture handle to the 2D texture target.
142 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);
143 |
144 | // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
145 | // is smaller or larger than the source image.
146 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
147 | GLES20.GL_LINEAR);
148 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
149 | GLES20.GL_LINEAR);
150 | checkGlError("loadImageTexture");
151 |
152 | // Load the data from the buffer into the texture handle.
153 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
154 | width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
155 | checkGlError("loadImageTexture");
156 |
157 | return textureHandle;
158 | }
159 |
160 | /**
161 | * Allocates a direct float buffer, and populates it with the float array data.
162 | */
163 | public static FloatBuffer createFloatBuffer(float[] coords) {
164 | // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
165 | ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT);
166 | bb.order(ByteOrder.nativeOrder());
167 | FloatBuffer fb = bb.asFloatBuffer();
168 | fb.put(coords);
169 | fb.position(0);
170 | return fb;
171 | }
172 |
173 | /**
174 | * Writes GL version info to the log.
175 | */
176 | public static void logVersionInfo() {
177 | Log.i(TAG, "vendor : " + GLES20.glGetString(GLES20.GL_VENDOR));
178 | Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER));
179 | Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION));
180 |
181 | if (false) {
182 | int[] values = new int[1];
183 | GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0);
184 | int majorVersion = values[0];
185 | GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0);
186 | int minorVersion = values[0];
187 | if (GLES30.glGetError() == GLES30.GL_NO_ERROR) {
188 | Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion);
189 | }
190 | }
191 | }
192 | public static native void glReadPixels(
193 | int x,
194 | int y,
195 | int width,
196 | int height,
197 | int format,
198 | int type,
199 | int offset
200 | );
201 | }
202 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/OffscreenSurface.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.erlei.videorecorder.gles;
18 |
19 | /**
20 | * Off-screen EGL surface (pbuffer).
21 | *
22 | * It's good practice to explicitly release() the surface, preferably from a "finally" block.
23 | */
24 | public class OffscreenSurface extends EglSurfaceBase {
25 | /**
26 | * Creates an off-screen surface with the specified width and height.
27 | */
28 | public OffscreenSurface(EglCore eglCore, int width, int height) {
29 | super(eglCore);
30 | createOffscreenSurface(width, height);
31 | }
32 | }
33 |
34 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/ScaledDrawable2d.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.erlei.videorecorder.gles;
17 |
18 | import com.erlei.videorecorder.util.LogUtil;
19 |
20 | import java.nio.ByteBuffer;
21 | import java.nio.ByteOrder;
22 | import java.nio.FloatBuffer;
23 |
24 | /**
25 | * Tweaked version of Drawable2d that rescales the texture coordinates to provide a
26 | * "zoom" effect.
27 | */
28 | public class ScaledDrawable2d extends Drawable2d {
29 | private static final String TAG = LogUtil.TAG;
30 |
31 | private static final int SIZEOF_FLOAT = 4;
32 |
33 | private FloatBuffer mTweakedTexCoordArray;
34 | private float mScale = 1.0f;
35 | private boolean mRecalculate;
36 |
37 |
38 | /**
39 | * Trivial constructor.
40 | */
41 | public ScaledDrawable2d(Prefab shape) {
42 | super(shape);
43 | mRecalculate = true;
44 | }
45 |
46 | /**
47 | * Set the scale factor.
48 | */
49 | public void setScale(float scale) {
50 | if (scale < 0.0f || scale > 1.0f) {
51 | throw new RuntimeException("invalid scale " + scale);
52 | }
53 | mScale = scale;
54 | mRecalculate = true;
55 | }
56 |
57 | /**
58 | * Returns the array of texture coordinates. The first time this is called, we generate
59 | * a modified version of the array from the parent class.
60 | *
61 | * To avoid allocations, this returns internal state. The caller must not modify it.
62 | */
63 | @Override
64 | public FloatBuffer getTexCoordArray() {
65 | if (mRecalculate) {
66 | //Log.v(TAG, "Scaling to " + mScale);
67 | FloatBuffer parentBuf = super.getTexCoordArray();
68 | int count = parentBuf.capacity();
69 |
70 | if (mTweakedTexCoordArray == null) {
71 | ByteBuffer bb = ByteBuffer.allocateDirect(count * SIZEOF_FLOAT);
72 | bb.order(ByteOrder.nativeOrder());
73 | mTweakedTexCoordArray = bb.asFloatBuffer();
74 | }
75 |
76 | // Texture coordinates range from 0.0 to 1.0, inclusive. We do a simple scale
77 | // here, but we could get much fancier if we wanted to (say) zoom in and pan
78 | // around.
79 | FloatBuffer fb = mTweakedTexCoordArray;
80 | float scale = mScale;
81 | for (int i = 0; i < count; i++) {
82 | float fl = parentBuf.get(i);
83 | fl = ((fl - 0.5f) * scale) + 0.5f;
84 | fb.put(i, fl);
85 | }
86 |
87 | mRecalculate = false;
88 | }
89 |
90 | return mTweakedTexCoordArray;
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/ShaderProgram.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.gles;
2 |
3 | import android.graphics.PointF;
4 | import android.opengl.GLES20;
5 |
6 | import java.nio.FloatBuffer;
7 |
8 | /**
9 | * Created by lll on 2018/8/7 .
10 | * Email : lllemail@foxmail.com
11 | * Describe : 渲染程序
12 | */
13 | public class ShaderProgram {
14 |
15 | protected int mProgram;
16 |
17 | public ShaderProgram() {
18 | this("");
19 | }
20 |
21 | public ShaderProgram(String vertex) {
22 | this(vertex, "fragment");
23 | }
24 |
25 | public ShaderProgram(String vertex, String fragment) {
26 | mProgram = GLUtil.createProgram(vertex, fragment);
27 | }
28 |
29 | public void use() {
30 | use(true);
31 | }
32 |
33 | public void use(boolean use) {
34 | GLES20.glUseProgram(use ? mProgram : 0);
35 | }
36 |
37 |
38 | public void setInteger(final int location, final int intValue) {
39 | GLES20.glUniform1i(location, intValue);
40 | }
41 |
42 | public void setFloat(final int location, final float floatValue) {
43 | GLES20.glUniform1f(location, floatValue);
44 | }
45 |
46 | public void setFloatVec2(final int location, final float[] arrayValue) {
47 | GLES20.glUniform2fv(location, 1, FloatBuffer.wrap(arrayValue));
48 | }
49 |
50 | public void setFloatVec3(final int location, final float[] arrayValue) {
51 | GLES20.glUniform3fv(location, 1, FloatBuffer.wrap(arrayValue));
52 | }
53 |
54 | public void setFloatVec4(final int location, final float[] arrayValue) {
55 | GLES20.glUniform4fv(location, 1, FloatBuffer.wrap(arrayValue));
56 | }
57 |
58 | public void setFloatArray(final int location, final float[] arrayValue) {
59 | GLES20.glUniform1fv(location, arrayValue.length, FloatBuffer.wrap(arrayValue));
60 | }
61 |
62 | public void setPoint(final int location, final PointF point) {
63 | float[] vec2 = new float[2];
64 | vec2[0] = point.x;
65 | vec2[1] = point.y;
66 | GLES20.glUniform2fv(location, 1, vec2, 0);
67 | }
68 |
69 | public void setUniformMatrix3fv(final int location, final float[] matrix) {
70 | GLES20.glUniformMatrix3fv(location, 1, false, matrix, 0);
71 | }
72 |
73 | public void setUniformMatrix4fv(final int location, final float[] matrix) {
74 | GLES20.glUniformMatrix4fv(location, 1, false, matrix, 0);
75 | }
76 |
77 | public void setInteger(String name, final int intValue) {
78 | int uniformLocation = getUniformLocation(name);
79 | setInteger(uniformLocation, intValue);
80 | }
81 |
82 | public void setFloat(final String name, final float floatValue) {
83 | int uniformLocation = getUniformLocation(name);
84 | setFloat(uniformLocation, floatValue);
85 | }
86 |
87 | public void setFloatVec2(final String name, final float[] arrayValue) {
88 | int uniformLocation = getUniformLocation(name);
89 | setFloatVec2(uniformLocation, arrayValue);
90 | }
91 |
92 | public void setFloatVec3(final String name, final float[] arrayValue) {
93 | int uniformLocation = getUniformLocation(name);
94 | setFloatVec3(uniformLocation, arrayValue);
95 | }
96 |
97 | public void setFloatVec4(final String name, final float[] arrayValue) {
98 | int uniformLocation = getUniformLocation(name);
99 | setFloatVec4(uniformLocation, arrayValue);
100 | }
101 |
102 | public void setFloatArray(final String name, final float[] arrayValue) {
103 | int uniformLocation = getUniformLocation(name);
104 | setFloatArray(uniformLocation, arrayValue);
105 | }
106 |
107 | public void setPoint(final String name, final PointF point) {
108 | int uniformLocation = getUniformLocation(name);
109 | setPoint(uniformLocation, point);
110 | }
111 |
112 | public void setUniformMatrix3fv(final String name, final float[] matrix) {
113 | int uniformLocation = getUniformLocation(name);
114 | setUniformMatrix3fv(uniformLocation, matrix);
115 | }
116 |
117 | public void setUniformMatrix4fv(final String name, final float[] matrix) {
118 | int uniformLocation = getUniformLocation(name);
119 | setUniformMatrix4fv(uniformLocation, matrix);
120 | }
121 |
122 | public int getUniformLocation(int program, String name) {
123 | int location = GLES20.glGetUniformLocation(program, name);
124 | GLUtil.checkLocation(location, "setInteger " + name);
125 | return location;
126 | }
127 |
128 | public int getUniformLocation(String name) {
129 | return getUniformLocation(mProgram, name);
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/Sprite2d.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.erlei.videorecorder.gles;
18 |
19 | import android.opengl.Matrix;
20 | import android.util.Log;
21 |
22 | import com.erlei.videorecorder.util.LogUtil;
23 |
24 | /**
25 | * Base class for a 2d object. Includes position, scale, rotation, and flat-shaded color.
26 | */
27 | public class Sprite2d {
28 | private static final String TAG = LogUtil.TAG;
29 |
30 | private Drawable2d mDrawable;
31 | private float mColor[];
32 | private int mTextureId;
33 | private float mAngle;
34 | private float mScaleX, mScaleY;
35 | private float mPosX, mPosY;
36 |
37 | private float[] mModelViewMatrix;
38 | private boolean mMatrixReady;
39 |
40 | private float[] mScratchMatrix = new float[16];
41 |
42 | public Sprite2d(Drawable2d drawable) {
43 | mDrawable = drawable;
44 | mColor = new float[4];
45 | mColor[3] = 1.0f;
46 | mTextureId = -1;
47 |
48 | mModelViewMatrix = new float[16];
49 | mMatrixReady = false;
50 | }
51 |
52 | /**
53 | * Re-computes mModelViewMatrix, based on the current values for rotation, scale, and
54 | * translation.
55 | */
56 | private void recomputeMatrix() {
57 | float[] modelView = mModelViewMatrix;
58 |
59 | Matrix.setIdentityM(modelView, 0);
60 | Matrix.translateM(modelView, 0, mPosX, mPosY, 0.0f);
61 | if (mAngle != 0.0f) {
62 | Matrix.rotateM(modelView, 0, mAngle, 0.0f, 0.0f, 1.0f);
63 | }
64 | Matrix.scaleM(modelView, 0, mScaleX, mScaleY, 1.0f);
65 | mMatrixReady = true;
66 | }
67 |
68 | /**
69 | * Returns the sprite scale along the X axis.
70 | */
71 | public float getScaleX() {
72 | return mScaleX;
73 | }
74 |
75 | /**
76 | * Returns the sprite scale along the Y axis.
77 | */
78 | public float getScaleY() {
79 | return mScaleY;
80 | }
81 |
82 | /**
83 | * Sets the sprite scale (size).
84 | */
85 | public void setScale(float scaleX, float scaleY) {
86 | mScaleX = scaleX;
87 | mScaleY = scaleY;
88 | mMatrixReady = false;
89 | }
90 |
91 | /**
92 | * Gets the sprite rotation angle, in degrees.
93 | */
94 | public float getRotation() {
95 | return mAngle;
96 | }
97 |
98 | /**
99 | * Sets the sprite rotation angle, in degrees. Sprite will rotate counter-clockwise.
100 | */
101 | public void setRotation(float angle) {
102 | // Normalize. We're not expecting it to be way off, so just iterate.
103 | while (angle >= 360.0f) {
104 | angle -= 360.0f;
105 | }
106 | while (angle <= -360.0f) {
107 | angle += 360.0f;
108 | }
109 | mAngle = angle;
110 | mMatrixReady = false;
111 | }
112 |
113 | /**
114 | * Returns the position on the X axis.
115 | */
116 | public float getPositionX() {
117 | return mPosX;
118 | }
119 |
120 | /**
121 | * Returns the position on the Y axis.
122 | */
123 | public float getPositionY() {
124 | return mPosY;
125 | }
126 |
127 | /**
128 | * Sets the sprite position.
129 | */
130 | public void setPosition(float posX, float posY) {
131 | mPosX = posX;
132 | mPosY = posY;
133 | mMatrixReady = false;
134 | }
135 |
136 | /**
137 | * Returns the model-view matrix.
138 | *
139 | * To avoid allocations, this returns internal state. The caller must not modify it.
140 | */
141 | public float[] getModelViewMatrix() {
142 | if (!mMatrixReady) {
143 | recomputeMatrix();
144 | }
145 | return mModelViewMatrix;
146 | }
147 |
148 | /**
149 | * Sets color to use for flat-shaded rendering. Has no effect on textured rendering.
150 | */
151 | public void setColor(float red, float green, float blue) {
152 | mColor[0] = red;
153 | mColor[1] = green;
154 | mColor[2] = blue;
155 | }
156 |
157 | /**
158 | * Sets texture to use for textured rendering. Has no effect on flat-shaded rendering.
159 | */
160 | public void setTexture(int textureId) {
161 | mTextureId = textureId;
162 | }
163 |
164 | /**
165 | * Returns the color.
166 | *
167 | * To avoid allocations, this returns internal state. The caller must not modify it.
168 | */
169 | public float[] getColor() {
170 | return mColor;
171 | }
172 |
173 | /**
174 | * Draws the rectangle with the supplied program and projection matrix.
175 | */
176 | public void draw(FlatShadedProgram program, float[] projectionMatrix) {
177 | // Compute model/view/projection matrix.
178 | Matrix.multiplyMM(mScratchMatrix, 0, projectionMatrix, 0, getModelViewMatrix(), 0);
179 |
180 | program.draw(mScratchMatrix, mColor, mDrawable.getVertexArray(), 0,
181 | mDrawable.getVertexCount(), mDrawable.getCoordsPerVertex(),
182 | mDrawable.getVertexStride());
183 | }
184 |
185 | /**
186 | * Draws the rectangle with the supplied program and projection matrix.
187 | */
188 | public void draw(Texture2dProgram program, float[] projectionMatrix) {
189 | // Compute model/view/projection matrix.
190 | Matrix.multiplyMM(mScratchMatrix, 0, projectionMatrix, 0, getModelViewMatrix(), 0);
191 |
192 | program.draw(mScratchMatrix, mDrawable.getVertexArray(), 0,
193 | mDrawable.getVertexCount(), mDrawable.getCoordsPerVertex(),
194 | mDrawable.getVertexStride(), GLUtil.IDENTITY_MATRIX, mDrawable.getTexCoordArray(),
195 | mTextureId, mDrawable.getTexCoordStride());
196 | }
197 |
198 | @Override
199 | public String toString() {
200 | return "[Sprite2d pos=" + mPosX + "," + mPosY +
201 | " scale=" + mScaleX + "," + mScaleY + " angle=" + mAngle +
202 | " color={" + mColor[0] + "," + mColor[1] + "," + mColor[2] +
203 | "} drawable=" + mDrawable + "]";
204 | }
205 | }
206 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/gles/WindowSurface.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.erlei.videorecorder.gles;
18 |
19 | import android.graphics.SurfaceTexture;
20 | import android.view.Surface;
21 |
22 | /**
23 | * Recordable EGL window surface.
24 | *
25 | * It's good practice to explicitly release() the surface, preferably from a "finally" block.
26 | */
27 | public class WindowSurface extends EglSurfaceBase {
28 | private Object mSurface;
29 | private boolean mReleaseSurface;
30 |
31 | /**
32 | * Associates an EGL surface with the native window surface.
33 | *
34 | * Set releaseSurface to true if you want the Surface to be released when release() is
35 | * called. This is convenient, but can interfere with framework classes that expect to
36 | * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
37 | * surfaceDestroyed() callback won't fire).
38 | */
39 | public WindowSurface(EglCore eglCore, Object surface, boolean releaseSurface) {
40 | super(eglCore);
41 | createWindowSurface(surface);
42 | mSurface = surface;
43 | mReleaseSurface = releaseSurface;
44 | }
45 |
46 | /**
47 | * Associates an EGL surface with the SurfaceTexture.
48 | */
49 | public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
50 | super(eglCore);
51 | createWindowSurface(surfaceTexture);
52 | }
53 |
54 | public WindowSurface(OffscreenSurface windowSurface) {
55 | super(windowSurface.mEglCore);
56 |
57 | }
58 |
59 |
60 | /**
61 | * Releases any resources associated with the EGL surface (and, if configured to do so,
62 | * with the Surface as well).
63 | *
64 | * Does not require that the surface's EGL context be current.
65 | */
66 | public void release() {
67 | super.release();
68 | if (mSurface != null) {
69 | if (mReleaseSurface) {
70 | if (mSurface instanceof Surface) {
71 | ((Surface) mSurface).release();
72 | }
73 | }
74 | mSurface = null;
75 | }
76 | }
77 |
78 | /**
79 | * Recreate the EGLSurface, using the new EglBase. The caller should have already
80 | * freed the old EGLSurface with releaseEglSurface().
81 | *
82 | * This is useful when we want to update the EGLSurface associated with a Surface.
83 | * For example, if we want to share with a different EGLContext, which can only
84 | * be done by tearing down and recreating the context. (That's handled by the caller;
85 | * this just creates a new EGLSurface for the Surface we were handed earlier.)
86 | *
87 | * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
88 | * context somewhere, the create call will fail with complaints from the Surface
89 | * about already being connected.
90 | */
91 | public void recreate(EglCore newEglCore) {
92 | if (mSurface == null) {
93 | throw new RuntimeException("not yet implemented for SurfaceTexture");
94 | }
95 | mEglCore = newEglCore; // switch to new context
96 | createWindowSurface(mSurface); // create new surface
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/AudioCapture.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import android.media.AudioFormat;
4 | import android.media.AudioRecord;
5 | import android.media.MediaRecorder;
6 | import android.os.Handler;
7 | import android.os.HandlerThread;
8 | import android.os.Message;
9 |
10 | import com.erlei.videorecorder.encoder.AudioEncoder;
11 | import com.erlei.videorecorder.encoder.MediaEncoder;
12 | import com.erlei.videorecorder.util.LogUtil;
13 |
14 | import java.nio.ByteBuffer;
15 |
16 | public class AudioCapture extends HandlerThread {
17 |
18 | private static final String TAG = "AudioCapture";
19 |
20 |
21 | private static final int MSG_READ_SAMPLES = 1;
22 | private static final int MSG_STOP_CAPTURE = 2;
23 |
24 |
25 | private static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
26 | private static final int FRAMES = 25; // AAC, frame/buffer/sec
27 | private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
28 | private final Handler mHandler;
29 | private int mSampleRate = SAMPLE_RATE;
30 | private int mFrames = FRAMES;
31 | private int mChannelConfig;
32 | private final AudioEncoder mAudioEncoder;
33 | private ByteBuffer mByteBuffer;
34 | private AudioRecord mAudioRecord;
35 | private int mMinBufferSize;
36 |
37 |
38 | public AudioCapture(AudioEncoder audioEncoder, int frames) {
39 | this(audioEncoder, SAMPLE_RATE, frames, 1);
40 | }
41 |
42 | public AudioCapture(AudioEncoder encoder, int sampleRate, int frames, int channelCount) {
43 | super(TAG);
44 | mAudioEncoder = encoder;
45 | mSampleRate = sampleRate;
46 | mFrames = frames;
47 | mChannelConfig = channelCount == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO;
48 | start();
49 | mHandler = new Handler(getLooper()) {
50 | @Override
51 | public void handleMessage(Message msg) {
52 | super.handleMessage(msg);
53 | switch (msg.what) {
54 | case MSG_READ_SAMPLES:
55 | readSamples();
56 | break;
57 |
58 | case MSG_STOP_CAPTURE:
59 | handleStopCapture();
60 | break;
61 | }
62 | }
63 | };
64 | }
65 |
66 | private static final int[] AUDIO_SOURCES = new int[]{
67 | MediaRecorder.AudioSource.MIC,
68 | MediaRecorder.AudioSource.DEFAULT,
69 | MediaRecorder.AudioSource.CAMCORDER,
70 | MediaRecorder.AudioSource.VOICE_COMMUNICATION,
71 | MediaRecorder.AudioSource.VOICE_RECOGNITION,
72 | };
73 |
74 | private int mFrameCount;
75 |
76 | @Override
77 | protected void onLooperPrepared() {
78 | super.onLooperPrepared();
79 | initAudioRecord();
80 | }
81 |
82 | @Override
83 | public void run() {
84 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
85 | super.run();
86 | }
87 |
88 | private void initAudioRecord() {
89 | mMinBufferSize = AudioRecord.getMinBufferSize(
90 | mSampleRate,
91 | mChannelConfig,
92 | AudioFormat.ENCODING_PCM_16BIT);
93 |
94 | mAudioRecord = null;
95 | for (final int source : AUDIO_SOURCES) {
96 | try {
97 | mAudioRecord = new AudioRecord(
98 | source,
99 | mSampleRate,
100 | mChannelConfig,
101 | AudioFormat.ENCODING_PCM_16BIT,
102 | mMinBufferSize * mFrames / 2);
103 |
104 | if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
105 | mAudioRecord.release();
106 | mAudioRecord = null;
107 | }
108 |
109 | } catch (final Exception e) {
110 | e.printStackTrace();
111 | mAudioRecord = null;
112 | LogUtil.loge(TAG, "new AudioRecord with source " + source + "error " + e);
113 | }
114 | if (mAudioRecord != null) break;
115 | }
116 | if (mAudioRecord != null) {
117 | LogUtil.logd(TAG, "mAudioRecord.startRecording();");
118 | mByteBuffer = ByteBuffer.allocateDirect(mMinBufferSize / 2);
119 | mAudioRecord.startRecording();
120 | // int framePeriod = mSampleRate * (1000 / mFrames) / 1000;
121 | int framePeriod = 160;
122 | LogUtil.loge(TAG, "setPositionNotificationPeriod" + framePeriod);
123 | mAudioRecord.setPositionNotificationPeriod(framePeriod);
124 | mAudioRecord.setRecordPositionUpdateListener(new AudioRecord.OnRecordPositionUpdateListener() {
125 | @Override
126 | public void onMarkerReached(AudioRecord audioRecord) {
127 | LogUtil.logd(TAG, "onMarkerReached");
128 | }
129 | private long last;
130 | @Override
131 | public void onPeriodicNotification(AudioRecord audioRecord) {
132 | long l = System.currentTimeMillis();
133 | LogUtil.logd(TAG, "onPeriodicNotification "+(l - last));
134 | mHandler.sendEmptyMessage(MSG_READ_SAMPLES);
135 | last = l;
136 |
137 | }
138 | }, mHandler);
139 | mHandler.sendEmptyMessage(MSG_READ_SAMPLES);
140 | }
141 |
142 | }
143 |
144 | private void readSamples() {
145 | MediaEncoder.MediaEncoderHandler handler = mAudioEncoder.getHandler();
146 | if (handler == null) return;
147 | mByteBuffer.clear();
148 | int readBytes = mAudioRecord.read(mByteBuffer, mMinBufferSize / 2);
149 | if (readBytes > 0) {
150 | // set audio data to encoder
151 | mByteBuffer.position(readBytes);
152 | mByteBuffer.flip();
153 | LogUtil.logd(TAG, "AudioCapture send count " + ++mFrameCount + "\t\t readBytes = " + readBytes + "\t ");
154 | handler.encode(mByteBuffer, readBytes, System.nanoTime() / 1000L);
155 | handler.frameAvailableSoon();
156 | }
157 | }
158 |
159 |
160 | public void stopCapture() {
161 | mHandler.sendEmptyMessage(MSG_STOP_CAPTURE);
162 | }
163 |
164 |
165 | private void handleStopCapture() {
166 | MediaEncoder.MediaEncoderHandler handler = mAudioEncoder.getHandler();
167 | if (handler == null) return;
168 | handler.encode(null, 0, System.nanoTime() / 1000L);
169 | try {
170 | mAudioRecord.stop();
171 | } catch (Exception e) {
172 | e.printStackTrace();
173 | } finally {
174 | mAudioRecord.release();
175 | }
176 | quitSafely();
177 | LogUtil.loge(TAG, "quit");
178 | }
179 |
180 | }
181 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/AudioCaptureThread.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import android.media.AudioFormat;
4 | import android.media.AudioRecord;
5 | import android.media.AudioTimestamp;
6 | import android.media.MediaRecorder;
7 |
8 | import com.erlei.videorecorder.encoder.AudioEncoder;
9 | import com.erlei.videorecorder.encoder.MediaEncoder;
10 | import com.erlei.videorecorder.util.LogUtil;
11 |
12 | import java.nio.ByteBuffer;
13 |
14 | public class AudioCaptureThread extends Thread {
15 |
16 | private static final String TAG = "AudioCaptureThread";
17 |
18 | private static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
19 | private static final int FRAMES = 25; // AAC, frame/buffer/sec
20 | private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
21 | private int mSampleRate = SAMPLE_RATE;
22 | private int mFrames = FRAMES;
23 | private int mChannelConfig;
24 | private final AudioEncoder mAudioEncoder;
25 | private volatile boolean mCapture = true;
26 |
27 |
28 | public AudioCaptureThread(AudioEncoder audioEncoder, int frames) {
29 | this(audioEncoder, SAMPLE_RATE, frames, 1);
30 | }
31 |
32 | public AudioCaptureThread(AudioEncoder encoder, int sampleRate, int frames, int channelCount) {
33 | mAudioEncoder = encoder;
34 | mSampleRate = sampleRate;
35 | mFrames = frames;
36 | mChannelConfig = channelCount == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO;
37 | }
38 |
39 | private static final int[] AUDIO_SOURCES = new int[]{
40 | MediaRecorder.AudioSource.MIC,
41 | MediaRecorder.AudioSource.DEFAULT,
42 | MediaRecorder.AudioSource.CAMCORDER,
43 | MediaRecorder.AudioSource.VOICE_COMMUNICATION,
44 | MediaRecorder.AudioSource.VOICE_RECOGNITION,
45 | };
46 |
47 | private int mFrameCount;
48 |
49 | @Override
50 | public void run() {
51 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
52 | try {
53 | final int minBufferSize = AudioRecord.getMinBufferSize(
54 | mSampleRate,
55 | mChannelConfig,
56 | AudioFormat.ENCODING_PCM_16BIT);
57 |
58 | AudioRecord audioRecord = null;
59 | for (final int source : AUDIO_SOURCES) {
60 | try {
61 | audioRecord = new AudioRecord(
62 | source,
63 | mSampleRate,
64 | mChannelConfig,
65 | AudioFormat.ENCODING_PCM_16BIT,
66 | minBufferSize * 2);
67 |
68 | if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
69 | audioRecord.release();
70 | audioRecord = null;
71 | }
72 |
73 | } catch (final Exception e) {
74 | e.printStackTrace();
75 | audioRecord = null;
76 | LogUtil.loge(TAG, "new AudioRecord with source " + source + "error " + e);
77 | }
78 | if (audioRecord != null) break;
79 | }
80 | if (audioRecord != null) {
81 | try {
82 | if (!isInterrupted() && mCapture) {
83 | LogUtil.logd(TAG, "AudioThread:startRecord audio recording");
84 | final ByteBuffer buf = ByteBuffer.allocateDirect(minBufferSize / 2);
85 | int readBytes;
86 | audioRecord.startRecording();
87 | try {
88 | MediaEncoder.MediaEncoderHandler handler = mAudioEncoder.getHandler();
89 | long lastMillis = System.currentTimeMillis();
90 | while (!isInterrupted() && mCapture) {
91 | // read audio data from internal mic
92 | buf.clear();
93 | readBytes = audioRecord.read(buf, minBufferSize / 2);
94 | if (readBytes > 0) {
95 | // set audio data to encoder
96 | buf.position(readBytes);
97 | buf.flip();
98 | LogUtil.logd(TAG, "AudioCapture send count " + ++mFrameCount + "\t\t readBytes = " + readBytes + "\t " + (System.currentTimeMillis() - lastMillis) + "ms");
99 | handler.encode(buf, readBytes, System.nanoTime() / 1000L);
100 | lastMillis = System.currentTimeMillis();
101 | handler.frameAvailableSoon();
102 | }
103 | }
104 | handler.encode(null, 0, System.nanoTime() / 1000L);
105 | } finally {
106 | audioRecord.stop();
107 | }
108 | }
109 | } finally {
110 | audioRecord.release();
111 | }
112 | } else {
113 | LogUtil.loge(TAG, "failed to initialize AudioRecord");
114 | }
115 | } catch (final Exception e) {
116 | LogUtil.loge(TAG, "AudioThread#run" + e);
117 | }
118 | LogUtil.loge(TAG, "AudioThread:finished");
119 | }
120 |
121 | public void setCapture(boolean capture) {
122 | mCapture = capture;
123 | }
124 |
125 | public boolean isCapture() {
126 | return mCapture;
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/CameraController.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import android.content.Context;
4 | import android.graphics.Rect;
5 | import android.graphics.SurfaceTexture;
6 | import android.support.annotation.IntRange;
7 | import android.view.MotionEvent;
8 |
9 | import com.erlei.videorecorder.camera.Camera;
10 | import com.erlei.videorecorder.camera.FpsRange;
11 | import com.erlei.videorecorder.camera.Size;
12 | import com.erlei.videorecorder.camera.annotations.Antibanding;
13 | import com.erlei.videorecorder.camera.annotations.ColorEffect;
14 | import com.erlei.videorecorder.camera.annotations.Facing;
15 | import com.erlei.videorecorder.camera.annotations.FlashModel;
16 | import com.erlei.videorecorder.camera.annotations.FocusModel;
17 | import com.erlei.videorecorder.camera.annotations.SceneModel;
18 | import com.erlei.videorecorder.camera.annotations.WhiteBalance;
19 |
20 | import java.util.List;
21 |
22 | public interface CameraController {
23 |
24 |
25 | void setCameraBuilder(Camera.CameraBuilder cameraBuilder);
26 |
27 | List getSupportedPreviewSizes();
28 |
29 | Camera.CameraBuilder getCameraBuilder();
30 |
31 | void setPreviewFpsRange(FpsRange fpsRange);
32 |
33 | void setZoomOnTouch(MotionEvent event);
34 |
35 | /**
36 | * @return 相机是否打开
37 | */
38 | boolean isOpen();
39 |
40 | int getCameraOrientation();
41 |
42 | int getDisplayOrientation();
43 |
44 | void setFocusAreaOnTouch(MotionEvent event);
45 |
46 | void setMeteringAreaOnTouch(MotionEvent event);
47 |
48 | android.hardware.Camera.Parameters getCameraParameters();
49 |
50 | Camera getCamera();
51 |
52 | void setCameraParameters(android.hardware.Camera.Parameters parameters);
53 |
54 | Context getContext();
55 |
56 | Size getCameraSize();
57 |
58 | Size getSurfaceSize();
59 |
60 | boolean openCamera(SurfaceTexture texture);
61 |
62 | void closeCamera();
63 |
64 | /**
65 | * @return camera is front
66 | */
67 | boolean isFront();
68 |
69 | /**
70 | * 设置缩放 如果设备支持的话
71 | * 最小值为0 , 最大值为 getMaxZoom() , 如果设置的值大于获取的最大值 , 那么将设置为MaxZoom
72 | *
73 | * @param zoom 缩放级别
74 | */
75 | void setZoom(@IntRange(from = 0, to = Integer.MAX_VALUE) int zoom);
76 |
77 | /**
78 | * 平滑缩放 如果设备支持的话
79 | * 最小值为0 , 最大值为 getMaxZoom() , 如果设置的值大于获取的最大值 , 那么将设置为MaxZoom
80 | *
81 | * @param zoom 缩放级别
82 | */
83 | void startSmoothZoom(@IntRange(from = 0, to = Integer.MAX_VALUE) int zoom);
84 |
85 | /**
86 | * @param sceneModels 场景模式
87 | * @see android.hardware.Camera.Parameters#SCENE_MODE_ACTION
88 | * @see android.hardware.Camera.Parameters#SCENE_MODE_AUTO
89 | * @see android.hardware.Camera.Parameters#SCENE_MODE_BARCODE
90 | * @see android.hardware.Camera.Parameters#SCENE_MODE_BEACH
91 | * @see android.hardware.Camera.Parameters#SCENE_MODE_CANDLELIGHT
92 | * @see android.hardware.Camera.Parameters#SCENE_MODE_FIREWORKS
93 | * @see android.hardware.Camera.Parameters#SCENE_MODE_HDR
94 | * @see android.hardware.Camera.Parameters#SCENE_MODE_LANDSCAPE
95 | * @see android.hardware.Camera.Parameters#SCENE_MODE_NIGHT
96 | * @see android.hardware.Camera.Parameters#SCENE_MODE_NIGHT_PORTRAIT
97 | * @see android.hardware.Camera.Parameters#SCENE_MODE_PARTY
98 | * @see android.hardware.Camera.Parameters#SCENE_MODE_SNOW
99 | * @see android.hardware.Camera.Parameters#SCENE_MODE_PORTRAIT
100 | * @see android.hardware.Camera.Parameters#SCENE_MODE_SPORTS
101 | * @see android.hardware.Camera.Parameters#SCENE_MODE_THEATRE
102 | * @see android.hardware.Camera.Parameters#SCENE_MODE_SUNSET
103 | * @see android.hardware.Camera.Parameters#SCENE_MODE_STEADYPHOTO
104 | */
105 | void setSceneMode(@SceneModel String... sceneModels);
106 |
107 | /**
108 | * @param colorEffects 设置色彩效果
109 | * @see android.hardware.Camera.Parameters#EFFECT_SEPIA
110 | * @see android.hardware.Camera.Parameters#EFFECT_AQUA
111 | * @see android.hardware.Camera.Parameters#EFFECT_BLACKBOARD
112 | * @see android.hardware.Camera.Parameters#EFFECT_MONO
113 | * @see android.hardware.Camera.Parameters#EFFECT_NEGATIVE
114 | * @see android.hardware.Camera.Parameters#EFFECT_NONE
115 | * @see android.hardware.Camera.Parameters#EFFECT_POSTERIZE
116 | * @see android.hardware.Camera.Parameters#EFFECT_SOLARIZE
117 | * @see android.hardware.Camera.Parameters#EFFECT_WHITEBOARD
118 | */
119 | void setColorEffects(@ColorEffect String... colorEffects);
120 |
121 | /**
122 | * @param focusModels 对焦模式
123 | * @see android.hardware.Camera.Parameters#FOCUS_MODE_AUTO
124 | * @see android.hardware.Camera.Parameters#FOCUS_MODE_CONTINUOUS_VIDEO
125 | * @see android.hardware.Camera.Parameters#FOCUS_MODE_CONTINUOUS_PICTURE
126 | * @see android.hardware.Camera.Parameters#FOCUS_MODE_EDOF
127 | * @see android.hardware.Camera.Parameters#FOCUS_MODE_FIXED
128 | * @see android.hardware.Camera.Parameters#FOCUS_MODE_INFINITY
129 | * @see android.hardware.Camera.Parameters#FOCUS_MODE_MACRO
130 | */
131 | void setFocusMode(@FocusModel String... focusModels);
132 |
133 | /**
134 | * 设置防闪烁参数 ,(由于灯光频率(50HZ或者60HZ)影响的数字相机曝光,进而产生的条纹。)
135 | *
136 | * @param antibanding 防闪烁值
137 | * @see android.hardware.Camera.Parameters#ANTIBANDING_50HZ
138 | * @see android.hardware.Camera.Parameters#ANTIBANDING_60HZ
139 | * @see android.hardware.Camera.Parameters#ANTIBANDING_AUTO
140 | * @see android.hardware.Camera.Parameters#ANTIBANDING_OFF
141 | */
142 | void setAntibanding(@Antibanding String... antibanding);
143 |
144 | /**
145 | * @param flashModels 闪光灯模式
146 | * @see android.hardware.Camera.Parameters#FLASH_MODE_AUTO
147 | * @see android.hardware.Camera.Parameters#FLASH_MODE_OFF
148 | * @see android.hardware.Camera.Parameters#FLASH_MODE_ON
149 | * @see android.hardware.Camera.Parameters#FLASH_MODE_RED_EYE
150 | * @see android.hardware.Camera.Parameters#FLASH_MODE_TORCH
151 | */
152 | void setFlashMode(@FlashModel String... flashModels);
153 |
154 | /**
155 | * 是否是录制模式
156 | *
157 | * @param recording 录制模式
158 | * @see android.hardware.Camera.Parameters#setRecordingHint(boolean)
159 | */
160 | void setRecordingHint(boolean recording);
161 |
162 | /**
163 | * 设置使用的摄像头朝向
164 | *
165 | * @param facing 摄像头朝向
166 | * @see android.hardware.Camera.CameraInfo#CAMERA_FACING_BACK
167 | * @see android.hardware.Camera.CameraInfo#CAMERA_FACING_FRONT
168 | */
169 | void setFacing(@Facing int facing);
170 |
171 | /**
172 | * 设置比白平衡
173 | *
174 | * @param whiteBalance 白平衡
175 | */
176 | void setWhiteBalance(@WhiteBalance String... whiteBalance);
177 |
178 | /**
179 | * /**
180 | * 设置曝光补偿
181 | *
182 | * @param compensation 曝光补偿
183 | */
184 | void setExposureCompensation(int compensation);
185 |
186 | /**
187 | * 设置测光区域
188 | *
189 | * @param rect 测光区域列表
190 | */
191 | void setMeteringAreas(Rect... rect);
192 |
193 | /**
194 | * @return 获取支持的预览帧率区间
195 | */
196 | List getSupportedPreviewFpsRange();
197 |
198 | /**
199 | * 设置焦点
200 | *
201 | * @param rect 焦点区域列表
202 | */
203 | void setFocusAreas(Rect... rect);
204 |
205 | /**
206 | * 切换摄像头朝向
207 | */
208 | void toggleFacing();
209 |
210 | /**
211 | * 获取相机支持的模式,相机打开之后才能调用
212 | *
213 | * @param modes modes
214 | */
215 | List getSupportedModes(String... modes);
216 |
217 |
218 | /**
219 | * 设置模式
220 | * @param key key
221 | * @param value value
222 | */
223 | void setMode(String key, String value);
224 | }
225 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/DefaultCameraPreview.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import android.content.Context;
4 | import android.graphics.Rect;
5 | import android.view.SurfaceView;
6 | import android.view.TextureView;
7 |
8 | import com.erlei.videorecorder.camera.Size;
9 | import com.erlei.videorecorder.gles.EglCore;
10 |
11 | public class DefaultCameraPreview implements ICameraPreview {
12 |
13 | protected SurfaceView mSurfaceView;
14 | protected TextureView mTextureView;
15 | protected Context mContext;
16 |
17 | public DefaultCameraPreview(SurfaceView surfaceView) {
18 | mSurfaceView = surfaceView;
19 | mContext = surfaceView.getContext();
20 | }
21 |
22 | public DefaultCameraPreview(TextureView textureView) {
23 | mTextureView = textureView;
24 | mContext = textureView.getContext();
25 | }
26 |
27 |
28 | @Override
29 | public Size getSurfaceSize() {
30 | if (mSurfaceView != null) {
31 | Rect surfaceFrame = mSurfaceView.getHolder().getSurfaceFrame();
32 | return new Size(surfaceFrame.width(), surfaceFrame.height());
33 | } else if (mTextureView != null) {
34 | return new Size(mTextureView.getWidth(), mTextureView.getHeight());
35 | }
36 | return new Size(0, 0);
37 | }
38 |
39 | @Override
40 | public Context getContext() {
41 | return mContext;
42 | }
43 |
44 | @Override
45 | public Object getSurface(EglCore eglCore) {
46 | if (mSurfaceView != null) {
47 | return mSurfaceView.getHolder().getSurface();
48 | } else if (mTextureView != null) {
49 | return mTextureView.getSurfaceTexture();
50 | }
51 | return null;
52 | }
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/ICameraPreview.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import android.content.Context;
4 |
5 | import com.erlei.videorecorder.camera.Size;
6 | import com.erlei.videorecorder.gles.EglCore;
7 |
8 | public interface ICameraPreview {
9 |
10 | Size getSurfaceSize();
11 |
12 | Context getContext();
13 |
14 | /**
15 | * @return object
16 | * if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
17 | * throw new RuntimeException("invalid surface: " + surface);
18 | * }
19 | * @param eglCore
20 | */
21 | Object getSurface(EglCore eglCore);
22 | }
23 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/IVideoRecorder.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import android.graphics.Bitmap;
4 | import android.graphics.SurfaceTexture;
5 |
6 | import java.io.File;
7 |
8 | public interface IVideoRecorder {
9 |
10 | void startPreview();
11 |
12 | void startRecord();
13 |
14 | void stopRecord();
15 |
16 | CameraController getCameraController();
17 |
18 | /**
19 | * @return 是否开启录制
20 | */
21 | boolean isRecordEnable();
22 |
23 | /**
24 | * @return 混合器是否正在运行
25 | */
26 | boolean isMuxerRunning();
27 |
28 | void onSizeChanged(int width, int height);
29 |
30 | void stopPreview();
31 |
32 | void release();
33 |
34 | File getOutputFile();
35 |
36 | /**
37 | * @return 获取预览的纹理
38 | */
39 | SurfaceTexture getPreviewTexture();
40 |
41 |
42 | /**
43 | * 拍照
44 | */
45 | void takePicture(TakePictureCallback callback);
46 |
47 |
48 | interface TakePictureCallback {
49 |
50 | /**
51 | * @param picture 图片文件
52 | * 在UI线程调用
53 | */
54 | void onPictureTaken(File picture);
55 |
56 | /**
57 | * @param bitmap bitmap
58 | * @return File 返回一个图片存储地址 , 如果返回null , 那么表示不需要保存为文件, 将不会调用 onPictureTaken
59 | * 此方法工作在后台线程
60 | */
61 | File onPictureTaken(Bitmap bitmap);
62 |
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/OffscreenCameraPreview.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import android.content.Context;
4 |
5 | import com.erlei.videorecorder.camera.Size;
6 | import com.erlei.videorecorder.gles.EglCore;
7 | import com.erlei.videorecorder.gles.OffscreenSurface;
8 |
9 | public class OffscreenCameraPreview implements ICameraPreview {
10 |
11 | private final Context mContext;
12 | private final int mWidth;
13 | private final int mHeight;
14 | private OffscreenSurface mOffscreenSurface;
15 |
16 | public OffscreenCameraPreview(Context context, int width, int height) {
17 | mContext = context;
18 | mWidth = width;
19 | mHeight = height;
20 | }
21 |
22 | @Override
23 | public Size getSurfaceSize() {
24 | return new Size(mWidth, mHeight);
25 | }
26 |
27 | @Override
28 | public Context getContext() {
29 | return mContext;
30 | }
31 |
32 | /**
33 | * @param eglCore
34 | * @return object
35 | * if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
36 | * throw new RuntimeException("invalid surface: " + surface);
37 | * }
38 | */
39 | @Override
40 | public synchronized Object getSurface(EglCore eglCore) {
41 | if (mOffscreenSurface == null)
42 | mOffscreenSurface = new OffscreenSurface(eglCore, mWidth, mHeight);
43 | return mOffscreenSurface;
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/OnDrawTextureListener.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import com.erlei.videorecorder.camera.Size;
4 |
5 | public interface OnDrawTextureListener {
6 |
7 | /**
8 | * @param size - the size of the frame
9 | */
10 | void onCameraStarted(Size size);
11 |
12 |
13 | void onCameraStopped();
14 |
15 |
16 | /**
17 | * @param FBOin 一个帧缓冲区 ,texIn 依附在这个Fbo上
18 | * @param texIn 包含了相机预览数据的纹理id
19 | * @return 应该返回一个textureId , 如果返回的textureId <= 0 , 那么将使用 texIn
20 | */
21 | int onDrawTexture(int FBOin, int texIn);
22 |
23 | /**
24 | * @param size surfaceSize
25 | */
26 | void onSizeChanged(Size size);
27 | }
28 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/recorder/VideoRecorderHandler.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.recorder;
2 |
3 | import android.os.Handler;
4 | import android.os.Message;
5 |
6 | import com.erlei.videorecorder.encoder.MuxerCallback;
7 |
8 | public class VideoRecorderHandler extends Handler implements MuxerCallback {
9 |
10 | protected static final int MSG_UPDATE_FPS = 1;
11 | protected static final int MSG_MEDIA_MUXER_STOPPED = 2;
12 | protected static final int MSG_MEDIA_MUXER_START = 3;
13 | protected static final int MSG_MEDIA_CAPTURE_START = 4;
14 | protected static final int MSG_MEDIA_CAPTURE_STOPPED = 5;
15 |
16 |
17 | @Override
18 | public void handleMessage(Message msg) {
19 | super.handleMessage(msg);
20 | switch (msg.what) {
21 | case MSG_UPDATE_FPS:
22 | handleUpdateFPS((float) msg.obj);
23 | break;
24 | case MSG_MEDIA_MUXER_STOPPED:
25 | handleVideoMuxerStopped((String) msg.obj);
26 | break;
27 | case MSG_MEDIA_MUXER_START:
28 | handleVideoMuxerStarted((String) msg.obj);
29 | break;
30 | case MSG_MEDIA_CAPTURE_START:
31 | handleMediaCaptureStarted((String) msg.obj);
32 | break;
33 | case MSG_MEDIA_CAPTURE_STOPPED:
34 | handleMediaCaptureStopped((String) msg.obj);
35 | break;
36 | }
37 | }
38 |
39 | /**
40 | * 停止捕捉音视频数据
41 | *
42 | * @param output 本次录制的视频文件路径
43 | * @see VideoRecorderHandler#handleMediaCaptureStarted(java.lang.String)
44 | */
45 | protected void handleMediaCaptureStopped(String output) {
46 |
47 | }
48 |
49 | /**
50 | * 开始捕捉音视频数据
51 | *
52 | * @param output 本次录制的视频文件路径
53 | * @see VideoRecorderHandler#handleMediaCaptureStopped(java.lang.String)
54 | */
55 | protected void handleMediaCaptureStarted(String output) {
56 |
57 | }
58 |
59 | /**
60 | * 开始合并音视频编码数据
61 | *
62 | * @param output 本次录制的视频文件路径
63 | *
64 | * 注意 这个方法和 handleVideoMuxerStopped 并不总是成对的 , 有一种异常情况是
65 | * 调用了startRecord 之后立刻调用 stopRecord ,会导致不会调用handleVideoMuxerStarted
66 | * 这种情况是因为调用了startRecord 之后并不能立马开始混合音视频编码数据 ,
67 | * 需要等待混合添加跟踪轨道之后才能开启开启混合器 , 所以如果在开始录制之后立即停止录制,
68 | * 导致接收不到VideoRecorderHandler.MSG_MEDIA_MUXER_START
69 | * 消息 , 导致没有调用 handleVideoMuxerStarted()
70 | * @see VideoRecorderHandler#handleVideoMuxerStopped(java.lang.String)
71 | */
72 | protected void handleVideoMuxerStarted(String output) {
73 |
74 | }
75 |
76 |
77 | /**
78 | * 停止合并音视频编码数据
79 | *
80 | * @param output 本次录制的视频文件路径
81 | *
82 | * 注意 这个方法和 handleVideoMuxerStarted 并不总是成对的 , 有一种异常情况是
83 | * 调用了startRecord 之后立刻调用 stopRecord ,会导致不会调用handleVideoMuxerStarted
84 | * 这种情况是因为调用了startRecord 之后并不能立马开始混合音视频编码数据 ,
85 | * 需要等待混合添加跟踪轨道之后才能开启开启混合器 , 所以如果在开始录制之后立即停止录制,
86 | * 导致接收不到VideoRecorderHandler.MSG_MEDIA_MUXER_START
87 | * 消息 , 导致没有调用 handleVideoMuxerStarted()
88 | * @see VideoRecorderHandler#handleVideoMuxerStarted(java.lang.String)
89 | */
90 | protected void handleVideoMuxerStopped(String output) {
91 |
92 | }
93 |
94 | protected void handleUpdateFPS(float obj) {
95 |
96 | }
97 |
98 |
99 | protected void updateFPS(float fps) {
100 | sendMessage(obtainMessage(MSG_UPDATE_FPS, fps));
101 | }
102 |
103 | @Override
104 | public void onPrepared() {
105 |
106 | }
107 |
108 | @Override
109 | public void onMuxerStarted(String output) {
110 | sendMessage(obtainMessage(MSG_MEDIA_MUXER_START, output));
111 | }
112 |
113 | @Override
114 | public void onMuxerStopped(String output) {
115 | sendMessage(obtainMessage(MSG_MEDIA_MUXER_STOPPED, output));
116 | }
117 |
118 | public void onCaptureStarted(String output) {
119 | sendMessage(obtainMessage(MSG_MEDIA_CAPTURE_START, output));
120 | }
121 |
122 | public void onCaptureStopped(String output) {
123 | sendMessage(obtainMessage(MSG_MEDIA_CAPTURE_STOPPED, output));
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/util/Config.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.util;
2 |
3 | /**
4 | * Created by lll on 2018/2/9.
5 | * 项目配置
6 | */
7 | @SuppressWarnings({"WeakerAccess"})
8 | public class Config {
9 | public static final String TAG = "VideoRecorder";
10 | public static final boolean DEBUG = true;
11 | }
12 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/util/FPSCounterFactory.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.util;
2 |
3 | import java.util.ArrayList;
4 |
5 | public class FPSCounterFactory {
6 |
7 |
8 | public static FPSCounter getDefaultFPSCounter() {
9 | return new FPSCounter2();
10 | }
11 |
12 | public static abstract class FPSCounter {
13 | public abstract float getFPS();
14 | }
15 |
16 | /**
17 | * 实时计算
18 | * 使用上一帧的时间间隔进行计算
19 | */
20 | public static class FPSCounter2 extends FPSCounter {
21 | private long lastFrame = System.nanoTime();
22 | private float FPS = 0;
23 |
24 | public float getFPS() {
25 | checkFPS();
26 | return FPS;
27 | }
28 |
29 | public void checkFPS() {
30 | long time = (System.nanoTime() - lastFrame);
31 | FPS = 1 / (time / 1000000000.0f);
32 | lastFrame = System.nanoTime();
33 | }
34 |
35 | }
36 |
37 | /**
38 | * 精确采样
39 | * 采样前N个帧,然后计算平均值
40 | */
41 | public static class FPSCounter1 extends FPSCounter {
42 | ArrayList lst = new ArrayList<>();
43 | private long msPerFrame = 1;
44 | private long l;
45 | static final int frame = 30;
46 |
47 | private void update() {
48 | long currentTime = System.nanoTime();
49 | msPerFrame = currentTime - l;
50 | l = currentTime;
51 | synchronized (this) {
52 | lst.add(msPerFrame);
53 | if (lst.size() > frame) {
54 | lst.remove(0);
55 | }
56 | }
57 | }
58 |
59 | @Override
60 | public float getFPS() {
61 | update();
62 | long sum = 0;
63 | for (Long aLong : lst) {
64 | sum += aLong;
65 | }
66 | return (float) (1e9 * lst.size() / sum);
67 | }
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/util/LogUtil.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.util;
2 |
3 | import android.util.Log;
4 |
5 | public class LogUtil {
6 | public static String TAG = Config.TAG;
7 | public static final boolean LOG_ENABLE = Config.DEBUG;
8 |
9 | public static void logd(String msg) {
10 | if (LOG_ENABLE) Log.d(TAG, msg);
11 | }
12 |
13 | public static void logd(String tag, String msg) {
14 | if (LOG_ENABLE) Log.d(TAG + "-" + tag, msg);
15 | }
16 |
17 | public static void loge(String msg) {
18 | Log.e(TAG, msg);
19 | }
20 |
21 | public static void loge(String tag, String msg) {
22 | Log.e(TAG + "-" + tag, msg);
23 | }
24 |
25 | public static void logi(String msg) {
26 | Log.i(TAG, msg);
27 | }
28 |
29 | public static void logi(String tag, String msg) {
30 | Log.i(TAG + "-" + tag, msg);
31 | }
32 |
33 | public static void logw(String msg) {
34 | Log.w(TAG, msg);
35 | }
36 |
37 | public static void logw(String tag, String msg) {
38 | Log.w(TAG + "-" + tag, msg);
39 | }
40 |
41 | public static void logv(String msg) {
42 | Log.v(TAG, msg);
43 | }
44 |
45 | public static void logv(String tag, String msg) {
46 | Log.v(TAG + "-" + tag, msg);
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/util/RecordGestureDetector.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.util;
2 |
3 |
4 | import android.os.Handler;
5 | import android.os.Message;
6 | import android.view.MotionEvent;
7 | import android.view.View;
8 |
9 | import java.lang.ref.WeakReference;
10 |
11 | /**
12 | * 点击拍摄 , 长按录制手势处理器
13 | */
14 | public class RecordGestureDetector {
15 |
16 | private static final int LONG_PRESS = 1;
17 | private static final long LONG_PRESS_TIMEOUT = 500;
18 | private final GestureHandler mHandler;
19 | private final OnGestureListener mListener;
20 | private MotionEvent mCurrentDownEvent;
21 | private boolean mInLongPress;
22 |
23 | public interface OnGestureListener {
24 |
25 | void onLongPressDown(View view, MotionEvent e);
26 |
27 | void onLongPressUp(View view, MotionEvent e);
28 |
29 | void onSingleTap(View view, MotionEvent e);
30 |
31 | }
32 |
33 |
34 | public static class SimpleOnGestureListener implements RecordGestureDetector.OnGestureListener {
35 |
36 | @Override
37 | public void onLongPressDown(View view, MotionEvent e) {
38 |
39 | }
40 |
41 | @Override
42 | public void onLongPressUp(View view, MotionEvent e) {
43 |
44 | }
45 |
46 | @Override
47 | public void onSingleTap(View view, MotionEvent e) {
48 |
49 | }
50 | }
51 |
52 | private static class GestureHandler extends Handler {
53 |
54 | private WeakReference mWeakReference;
55 |
56 | GestureHandler(RecordGestureDetector gestureDetector) {
57 | super();
58 | mWeakReference = new WeakReference<>(gestureDetector);
59 | }
60 |
61 | GestureHandler(RecordGestureDetector gestureDetector, Handler handler) {
62 | super(handler.getLooper());
63 | mWeakReference = new WeakReference<>(gestureDetector);
64 | }
65 |
66 | @Override
67 | public void handleMessage(Message msg) {
68 | switch (msg.what) {
69 | case LONG_PRESS:
70 | if (mWeakReference.get() != null) {
71 | mWeakReference.get().dispatchLongPress((View) msg.obj);
72 | }
73 | break;
74 | default:
75 | throw new RuntimeException("Unknown message " + msg); //never
76 | }
77 | }
78 | }
79 |
80 | public RecordGestureDetector(RecordGestureDetector.OnGestureListener listener) {
81 | this(listener, null);
82 | }
83 |
84 |
85 | public RecordGestureDetector(RecordGestureDetector.OnGestureListener listener, Handler handler) {
86 | if (handler != null) {
87 | mHandler = new GestureHandler(this, handler);
88 | } else {
89 | mHandler = new GestureHandler(this);
90 | }
91 | mListener = listener;
92 |
93 | if (mListener == null) {
94 | throw new NullPointerException("OnGestureListener must not be null");
95 | }
96 | }
97 |
98 |
99 | public boolean onTouchEvent(View v, MotionEvent ev) {
100 | final int action = ev.getAction();
101 | switch (action & MotionEvent.ACTION_MASK) {
102 | case MotionEvent.ACTION_DOWN:
103 | mCurrentDownEvent = MotionEvent.obtain(ev);
104 | mHandler.removeMessages(LONG_PRESS);
105 | mHandler.sendMessageAtTime(Message.obtain(mHandler, LONG_PRESS, v), mCurrentDownEvent.getDownTime() + LONG_PRESS_TIMEOUT);
106 | break;
107 | case MotionEvent.ACTION_UP:
108 | if (mInLongPress) {
109 | mInLongPress = false;
110 | mListener.onLongPressUp(v, ev);
111 | } else {
112 | mListener.onSingleTap(v, ev);
113 | }
114 | mHandler.removeMessages(LONG_PRESS);
115 | break;
116 | case MotionEvent.ACTION_CANCEL:
117 | cancel();
118 | break;
119 | }
120 | return false;
121 | }
122 |
123 | private void cancel() {
124 | mHandler.removeMessages(LONG_PRESS);
125 | }
126 |
127 |
128 | private void dispatchLongPress(View view) {
129 | mInLongPress = true;
130 | mListener.onLongPressDown(view, mCurrentDownEvent);
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/util/SaveFrameTask.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.util;
2 |
3 | import android.graphics.Bitmap;
4 | import android.os.AsyncTask;
5 |
6 | import com.erlei.videorecorder.recorder.IVideoRecorder;
7 |
8 | import java.io.BufferedOutputStream;
9 | import java.io.File;
10 | import java.io.FileOutputStream;
11 | import java.io.IOException;
12 | import java.nio.ByteBuffer;
13 |
14 | public class SaveFrameTask extends AsyncTask {
15 |
16 | private int mWidth;
17 | private int mHeight;
18 | private IVideoRecorder.TakePictureCallback mPictureCallback;
19 |
20 | public SaveFrameTask(int width, int height, IVideoRecorder.TakePictureCallback pictureCallback) {
21 | mWidth = width;
22 | mHeight = height;
23 | mPictureCallback = pictureCallback;
24 | }
25 |
26 | @Override
27 | protected File doInBackground(ByteBuffer... buffers) {
28 | ByteBuffer buffer = buffers[0];
29 |
30 | reverseByteBuffer(buffer, mWidth, mHeight);
31 | Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
32 | buffer.rewind();
33 | bmp.copyPixelsFromBuffer(buffer);
34 |
35 | File file = mPictureCallback.onPictureTaken(bmp);
36 | if (file != null) {
37 | BufferedOutputStream bos = null;
38 | try {
39 | bos = new BufferedOutputStream(new FileOutputStream(file));
40 | bmp.compress(Bitmap.CompressFormat.PNG, 100, bos);
41 | bmp.recycle();
42 | } catch (Exception e) {
43 | e.printStackTrace();
44 | } finally {
45 | if (bos != null) {
46 | try {
47 | bos.close();
48 | } catch (IOException e) {
49 | e.printStackTrace();
50 | }
51 | }
52 | }
53 | }
54 | return file;
55 | }
56 |
57 | private void reverseByteBuffer(ByteBuffer buf, int width, int height) {
58 | long ts = System.currentTimeMillis();
59 | int i = 0;
60 | byte[] tmp = new byte[width * 4];
61 | while (i++ < height / 2) {
62 | buf.get(tmp);
63 | System.arraycopy(buf.array(), buf.limit() - buf.position(), buf.array(), buf.position() - width * 4, width * 4);
64 | System.arraycopy(tmp, 0, buf.array(), buf.limit() - buf.position(), width * 4);
65 | }
66 | buf.rewind();
67 | LogUtil.logd("reverseByteBuffer took " + (System.currentTimeMillis() - ts) + "ms");
68 | }
69 |
70 | @Override
71 | protected void onPostExecute(File file) {
72 | super.onPostExecute(file);
73 | if (file != null) {
74 | mPictureCallback.onPictureTaken(file);
75 | }
76 | }
77 | }
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/util/SensorAccelerometer.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.util;
2 |
3 | import android.content.Context;
4 | import android.hardware.SensorEvent;
5 | import android.hardware.SensorEventListener;
6 | import android.hardware.SensorManager;
7 |
8 | /**
9 | * Created by lll on 2018/1/20.
10 | * 使用重力传感器监听屏幕方向
11 | */
12 | public class SensorAccelerometer implements SensorEventListener {
13 |
14 | private final OrientationChangeListener mListener;
15 | private SensorManager mSensorManager;
16 |
17 | public SensorAccelerometer(Context context, OrientationChangeListener listener) {
18 | mListener = listener;
19 | try {
20 | mSensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
21 | if (mSensorManager != null) {
22 | android.hardware.Sensor sensor = mSensorManager.getDefaultSensor(SensorManager.SENSOR_ACCELEROMETER);
23 | mSensorManager.registerListener(this, sensor, SensorManager.SENSOR_DELAY_NORMAL);
24 | }
25 | } catch (Exception e) {
26 | e.printStackTrace();
27 | }
28 | }
29 |
30 | @Override
31 | public void onSensorChanged(SensorEvent event) {
32 | if (mListener == null || event.sensor == null) return;
33 | if (event.sensor.getType() == android.hardware.Sensor.TYPE_ACCELEROMETER) {
34 | int x = (int) event.values[0];
35 | int y = (int) event.values[1];
36 | if (Math.abs(x) > 6) {// 倾斜度超过60度 10*1.732/2
37 | if (x <= -3)
38 | mListener.onChange(0);
39 | else
40 | mListener.onChange(1);
41 | } else {
42 | if (y <= -3)
43 | mListener.onChange(2);
44 | else
45 | mListener.onChange(3);
46 | }
47 |
48 | }
49 | }
50 |
51 | @Override
52 | public void onAccuracyChanged(android.hardware.Sensor sensor, int accuracy) {
53 |
54 | }
55 |
56 | public void release() {
57 | if (mSensorManager != null) mSensorManager.unregisterListener(this);
58 |
59 | }
60 |
61 | public interface OrientationChangeListener {
62 |
63 | void onChange(int orientation);
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/java/com/erlei/videorecorder/util/TextureUtil.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.util;
2 |
3 | import android.support.annotation.IntDef;
4 |
5 | import java.lang.annotation.Retention;
6 | import java.lang.annotation.RetentionPolicy;
7 |
8 | /**
9 | * Created by lll on 2018/5/10
10 | * Email : lllemail@foxmail.com
11 | * Describe : 纹理矩阵工具类
12 | */
13 | public class TextureUtil {
14 |
15 | public static final int ROTATION_0 = 0;
16 | public static final int ROTATION_90 = 90;
17 | public static final int ROTATION_180 = 180;
18 | public static final int ROTATION_270 = 270;
19 |
20 | @Retention(RetentionPolicy.SOURCE)
21 | @IntDef({ROTATION_90, ROTATION_180, ROTATION_270, ROTATION_0})
22 | public @interface Rotation {
23 | }
24 |
25 | public static final float TEXTURE_NO_ROTATION[] = {
26 | 0.0f, 1.0f,
27 | 1.0f, 1.0f,
28 | 0.0f, 0.0f,
29 | 1.0f, 0.0f,
30 | };
31 |
32 | public static final float TEXTURE_ROTATED_90[] = {
33 | 1.0f, 1.0f,
34 | 1.0f, 0.0f,
35 | 0.0f, 1.0f,
36 | 0.0f, 0.0f,
37 | };
38 | public static final float TEXTURE_ROTATED_180[] = {
39 | 1.0f, 0.0f,
40 | 0.0f, 0.0f,
41 | 1.0f, 1.0f,
42 | 0.0f, 1.0f,
43 | };
44 | public static final float TEXTURE_ROTATED_270[] = {
45 | 0.0f, 0.0f,
46 | 0.0f, 1.0f,
47 | 1.0f, 0.0f,
48 | 1.0f, 1.0f,
49 | };
50 |
51 | public static final float CUBE[] = {
52 | -1.0f, -1.0f,
53 | 1.0f, -1.0f,
54 | -1.0f, 1.0f,
55 | 1.0f, 1.0f,
56 | };
57 |
58 | private TextureUtil() {
59 | }
60 |
61 | public static float[] getTextureCords(@Rotation final int rotation, final boolean flipHorizontal,
62 | final boolean flipVertical) {
63 | float[] rotatedTex;
64 | switch (rotation) {
65 | case ROTATION_90:
66 | rotatedTex = TEXTURE_ROTATED_90;
67 | break;
68 | case ROTATION_180:
69 | rotatedTex = TEXTURE_ROTATED_180;
70 | break;
71 | case ROTATION_270:
72 | rotatedTex = TEXTURE_ROTATED_270;
73 | break;
74 | case ROTATION_0:
75 | default:
76 | rotatedTex = TEXTURE_NO_ROTATION;
77 | break;
78 | }
79 | if (flipHorizontal) {
80 | rotatedTex = new float[]{
81 | flip(rotatedTex[0]), rotatedTex[1],
82 | flip(rotatedTex[2]), rotatedTex[3],
83 | flip(rotatedTex[4]), rotatedTex[5],
84 | flip(rotatedTex[6]), rotatedTex[7],
85 | };
86 | }
87 | if (flipVertical) {
88 | rotatedTex = new float[]{
89 | rotatedTex[0], flip(rotatedTex[1]),
90 | rotatedTex[2], flip(rotatedTex[3]),
91 | rotatedTex[4], flip(rotatedTex[5]),
92 | rotatedTex[6], flip(rotatedTex[7]),
93 | };
94 | }
95 | return rotatedTex;
96 | }
97 |
98 |
99 | private static float flip(final float i) {
100 | if (i == 0.0f) {
101 | return 1.0f;
102 | }
103 | return 0.0f;
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/VideoRecorderCore/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | VideoRecorder
3 |
4 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 26
5 | defaultConfig {
6 | applicationId "com.erlei.videorecorder"
7 | minSdkVersion 18
8 | targetSdkVersion 26
9 | versionCode 1
10 | versionName "1.0"
11 | }
12 | buildTypes {
13 | release {
14 | minifyEnabled true
15 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
16 | }
17 | debug {
18 | minifyEnabled false
19 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
20 | }
21 | }
22 | }
23 |
24 | dependencies {
25 | implementation fileTree(include: ['*.jar'], dir: 'libs')
26 | implementation 'com.android.support:appcompat-v7:26.1.0'
27 | implementation project(':MultiPartRecorder')
28 | implementation 'com.android.support:support-annotations:27.1.1'
29 | }
30 |
--------------------------------------------------------------------------------
/app/doc/相关资料.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ### 相关链接
4 |
5 | - Camera
6 |
7 | [Android相机开发系列](https://www.polarxiong.com/archives/Android%E7%9B%B8%E6%9C%BA%E5%BC%80%E5%8F%91%E7%B3%BB%E5%88%97.html)
8 |
9 | - graphics
10 |
11 | [Android图形架构](https://source.android.com/devices/graphics/architecture)
12 |
13 | - OpenGLES
14 |
15 | [learnopengl-cn](https://learnopengl-cn.github.io/)
16 |
17 | [opengl-tutorial-cn](http://www.opengl-tutorial.org/cn/)
18 |
19 | [coordinate-mapping](https://developer.android.com/guide/topics/graphics/opengl#coordinate-mapping)
20 |
21 | [安卓 OpenGL ES 2.0 完全入门 一](https://blog.piasy.com/2016/06/07/Open-gl-es-android-2-part-1/)
22 |
23 | [安卓 OpenGL ES 2.0 完全入门 二](https://blog.piasy.com/2016/06/14/Open-gl-es-android-2-part-2/)
24 |
25 | [安卓 OpenGL ES 2.0 完全入门 三](https://blog.piasy.com/2017/10/06/Open-gl-es-android-2-part-3/)
26 |
27 |
28 | [OpenGL ES和坐标变换概述 一](http://zhangtielei.com/posts/blog-opengl-transformations-1.html)
29 |
30 | [OpenGL ES和坐标变换概述 二](http://zhangtielei.com/posts/blog-opengl-transformations-2.html)
31 |
32 | [http://www.songho.ca/opengl/index.html](http://www.songho.ca/opengl/index.html)
33 |
34 | [surfaceTexture#getTransformMatrix什么意思](https://stackoverflow.com/questions/30595493/what-does-the-return-value-of-surfacetexture-gettransformmatrix-mean-who-can-ex)
35 |
36 |
37 | - PixelBufferObject
38 |
39 | [glReadPixels](https://www.khronos.org/opengl/wiki/GLAPI/glReadPixels)
40 |
41 | [Android 关于美颜/滤镜 利用PBO从OpenGL录制视频](https://www.jianshu.com/p/3bc4db687546)
42 |
43 | [gl_pbo](http://www.songho.ca/opengl/gl_pbo.html)
44 |
45 | [fast-pixel-transfers-with-pixel-buffer-objects](http://roxlu.com/2014/048/fast-pixel-transfers-with-pixel-buffer-objects)
46 |
47 | [像素缓冲对象](https://www.khronos.org/opengl/wiki/Pixel_Buffer_Object)
48 |
49 | [像素传输](https://www.khronos.org/opengl/wiki/Pixel_Transfer#Pixel_transfer_parameters)
50 |
51 | - 音频采集
52 |
53 | [快速处理实时麦克风音频,设置回调功能](https://stackoverflow.com/questions/4525206/android-audiorecord-class-process-live-mic-audio-quickly-set-up-callback-func/4834651#4834651)
54 |
55 | [https://blog.csdn.net/flowingflying/article/details/39576701](https://blog.csdn.net/flowingflying/article/details/39576701)
56 |
57 | [https://github.com/Kickflip/kickflip-android-sdk/issues/9](https://github.com/Kickflip/kickflip-android-sdk/issues/9)
58 |
59 | [android-dev-audiorecord-without-blocking-or-threads](https://stackoverflow.com/questions/15804903/android-dev-audiorecord-without-blocking-or-threads)
60 |
61 | [https://issuetracker.google.com/issues/36968761](https://issuetracker.google.com/issues/36968761)
62 |
63 | - 编码
64 |
65 | [Android MediaCodec stuff](https://bigflake.com/mediacodec/)
66 |
67 | [MediaCodec](https://developer.android.com/reference/android/media/MediaCodec)
68 |
69 | [MediaMuxer](https://developer.android.com/reference/android/media/MediaMuxer)
70 |
71 | [optimizing-mediamuxers-writing-speed](https://blog.horizon.camera/post/134263616000/optimizing-mediamuxers-writing-speed)
72 |
73 | [安卓视频硬编码实现分析三](https://blog.piasy.com/2017/08/08/WebRTC-Android-HW-Encode-Video/)
74 |
75 | [我需要知道:H.264](https://blog.piasy.com/2017/09/22/I-Need-Know-About-H264/)
76 |
77 | - 加水印
78 |
79 | [how-to-embed-text-while-recording-video-in-android](https://stackoverflow.com/questions/23735464/how-to-embed-text-while-recording-video-in-android)
80 |
81 | [media-for-mobile/blob/master/effects/src/main/java/org/m4m/effects/TextOverlayEffect.java](https://github.com/INDExOS/media-for-mobile/blob/master/effects/src/main/java/org/m4m/effects/TextOverlayEffect.java)
82 |
83 | [纹理翻转](https://stackoverflow.com/questions/7811195/must-opengl-textures-be-flipped?rq=1)
84 |
85 | - 拍照相关
86 |
87 | [简化的Windows BMP位图文件格式规范](http://www.dragonwins.com/domains/getteched/bmp/bmpfileformat.htm)
88 |
89 |
90 | [保存一帧数据](https://stackoverflow.com/questions/21634450/how-can-we-make-the-saveframe-method-in-extractmpegframestest-more-efficient)
91 |
92 | [takePirture图片翻转的问题](https://stackoverflow.com/questions/28350695/flipping-opengl-texture-from-bytebuffer-before-creating-the-bitmap-in-android)
93 |
94 | - 相关项目
95 |
96 | [grafika](https://github.com/google/grafika)
97 |
98 | [FilterRecord](https://github.com/a483210/myExample) 成功的使用出了PBO异步 glReadPixel , 读取耗时非常小
99 |
100 | [android-gpuimage-plus](https://github.com/wysaid/android-gpuimage-plus)
101 |
102 | [AudioVideoRecordingSample](https://github.com/saki4510t/AudioVideoRecordingSample)
103 |
104 | [media-for-mobile](https://github.com/INDExOS/media-for-mobile)
105 |
106 | - 图像处理
107 |
108 | [Kernel (image_processing)](https://en.wikipedia.org/wiki/Kernel_(image_processing))
109 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
34 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/app/src/main/java/com/erlei/videorecorder/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder;
2 |
3 | import android.Manifest;
4 | import android.os.Bundle;
5 | import android.support.annotation.NonNull;
6 | import android.support.v4.app.ActivityCompat;
7 | import android.support.v4.app.Fragment;
8 | import android.support.v7.app.AppCompatActivity;
9 |
10 | import com.erlei.videorecorder.fragment.MultiPartRecorderFragment;
11 |
12 | import static android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON;
13 |
14 | public class MainActivity extends AppCompatActivity {
15 |
16 | private static final int REQUEST_CAMERA = 1;
17 | private String[] mPermissions = {
18 | Manifest.permission.CAMERA,
19 | Manifest.permission.RECORD_AUDIO,
20 | Manifest.permission.WRITE_EXTERNAL_STORAGE,
21 | Manifest.permission.READ_EXTERNAL_STORAGE};
22 |
23 |
24 | @Override
25 | protected void onCreate(Bundle savedInstanceState) {
26 | super.onCreate(savedInstanceState);
27 | getWindow().addFlags(FLAG_KEEP_SCREEN_ON);
28 | setContentView(R.layout.activity_main);
29 |
30 | if (!checkSelfPermissions(mPermissions)) {
31 | ActivityCompat.requestPermissions(MainActivity.this, mPermissions, REQUEST_CAMERA);
32 | }else {
33 | setContent();
34 | }
35 |
36 | }
37 |
38 | public boolean checkSelfPermissions(String... permission) {
39 | for (String s : permission) {
40 | if (ActivityCompat.checkSelfPermission(this, s) != android.content.pm.PackageManager.PERMISSION_GRANTED) {
41 | return false;
42 | }
43 | }
44 | return true;
45 | }
46 |
47 | @Override
48 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
49 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
50 | if (REQUEST_CAMERA == requestCode && grantResults.length == mPermissions.length) {
51 | setContent();
52 | }
53 | }
54 |
55 | private void setContent() {
56 | // Fragment fragment = SurfaceViewFragment.newInstance();
57 | // Fragment fragment = CameraGLSurfaceViewFragment.newInstance();
58 | // Fragment fragment = CameraGLTextureViewFragment.newInstance();
59 | // Fragment fragment = VideoRecorderFragment.newInstance();
60 | Fragment fragment = MultiPartRecorderFragment.newInstance();
61 | getSupportFragmentManager().beginTransaction().replace(R.id.content, fragment).commitAllowingStateLoss();
62 | }
63 |
64 | }
65 |
--------------------------------------------------------------------------------
/app/src/main/java/com/erlei/videorecorder/fragment/SurfaceViewFragment.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.fragment;
2 |
3 | import android.os.Bundle;
4 | import android.support.annotation.Nullable;
5 | import android.support.v4.app.Fragment;
6 | import android.view.LayoutInflater;
7 | import android.view.SurfaceHolder;
8 | import android.view.SurfaceView;
9 | import android.view.View;
10 | import android.view.ViewGroup;
11 |
12 | import com.erlei.videorecorder.R;
13 | import com.erlei.videorecorder.camera.Camera;
14 | import com.erlei.videorecorder.camera.Size;
15 | import com.erlei.videorecorder.util.LogUtil;
16 |
17 | /**
18 | * Created by lll on 2018/1/20.
19 | * 打开一个相机预览
20 | */
21 | public class SurfaceViewFragment extends Fragment implements SurfaceHolder.Callback, Camera.CameraCallback {
22 |
23 | private SurfaceView mSurfaceView;
24 | private Camera mCamera;
25 |
26 | public static SurfaceViewFragment newInstance() {
27 | return new SurfaceViewFragment();
28 | }
29 |
30 | @Nullable
31 | @Override
32 | public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
33 | return inflater.inflate(R.layout.fragment_camera_surface, container, false);
34 | }
35 |
36 | @Override
37 | public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
38 | super.onViewCreated(view, savedInstanceState);
39 | mSurfaceView = view.findViewById(R.id.SurfaceView);
40 | mSurfaceView.getHolder().addCallback(this);
41 | }
42 |
43 | @Override
44 | public void onResume() {
45 | super.onResume();
46 | }
47 |
48 | @Override
49 | public void onDestroy() {
50 | super.onDestroy();
51 | }
52 |
53 | @Override
54 | public void onPause() {
55 | super.onPause();
56 | }
57 |
58 | @Override
59 | public void surfaceCreated(SurfaceHolder holder) {
60 | mCamera = new Camera.CameraBuilder(getContext()).useDefaultConfig().setSurfaceHolder(holder).addPreviewCallback(this).build().open();
61 | }
62 |
63 | @Override
64 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
65 |
66 | }
67 |
68 | @Override
69 | public void surfaceDestroyed(SurfaceHolder holder) {
70 | if (mCamera != null) mCamera.close();
71 | }
72 |
73 | /**
74 | * 打开相机预览成功 , 可以在这个回调里根据camera.getPreviewSize() 重新调整SurfaceView的大小比例 , 避免预览变形
75 | */
76 | @Override
77 | public void onSuccess(Camera camera) {
78 | adjustSurfaceViewSize(camera.getPreviewSize());
79 | }
80 |
81 | @Override
82 | public void onFailure(int code, String msg) {
83 | LogUtil.loge(msg);
84 | }
85 |
86 | /**
87 | * 调整SurfaceView的大小比例 , 以避免预览变形
88 | *
89 | * @param previewSize 预览大小
90 | */
91 | private void adjustSurfaceViewSize(Size previewSize) {
92 | int width = getResources().getDisplayMetrics().widthPixels;
93 | int height = (int) (width * ((previewSize.getWidth() * 1.0f) / previewSize.getHeight()));
94 | ViewGroup.LayoutParams lp = (ViewGroup.LayoutParams) mSurfaceView.getLayoutParams();
95 | lp.width = width;
96 | lp.height = height;
97 | mSurfaceView.setLayoutParams(lp);
98 | }
99 |
100 |
101 | }
102 |
--------------------------------------------------------------------------------
/app/src/main/java/com/erlei/videorecorder/fragment/VideoRecorderFragment.java:
--------------------------------------------------------------------------------
1 | package com.erlei.videorecorder.fragment;
2 |
3 | import android.annotation.SuppressLint;
4 | import android.os.Bundle;
5 | import android.os.Environment;
6 | import android.support.annotation.Nullable;
7 | import android.support.v4.app.Fragment;
8 | import android.view.LayoutInflater;
9 | import android.view.SurfaceHolder;
10 | import android.view.SurfaceView;
11 | import android.view.View;
12 | import android.view.ViewGroup;
13 | import android.widget.CheckBox;
14 | import android.widget.CompoundButton;
15 | import android.widget.TextView;
16 | import android.widget.Toast;
17 |
18 | import com.erlei.videorecorder.R;
19 | import com.erlei.videorecorder.camera.Camera;
20 | import com.erlei.videorecorder.recorder.DefaultCameraPreview;
21 | import com.erlei.videorecorder.recorder.VideoRecorder;
22 | import com.erlei.videorecorder.recorder.VideoRecorderHandler;
23 | import com.erlei.videorecorder.util.LogUtil;
24 |
25 | import java.io.File;
26 | import java.util.Locale;
27 |
28 | public class VideoRecorderFragment extends Fragment implements SurfaceHolder.Callback {
29 |
30 | private SurfaceView mSurfaceView;
31 | private Camera mCamera;
32 | private VideoRecorder mRecorder;
33 | private CheckBox mBtnRecord;
34 | private TextView mTvFps;
35 |
36 |
37 | public static VideoRecorderFragment newInstance() {
38 | return new VideoRecorderFragment();
39 | }
40 |
41 | @Nullable
42 | @Override
43 | public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
44 | return inflater.inflate(R.layout.fragment_camera_surface, container, false);
45 | }
46 |
47 | @Override
48 | public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
49 | super.onViewCreated(view, savedInstanceState);
50 | mSurfaceView = view.findViewById(R.id.SurfaceView);
51 | mBtnRecord = view.findViewById(R.id.cbRecord);
52 | mTvFps = view.findViewById(R.id.tvFps);
53 | mBtnRecord.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
54 |
55 | public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
56 | if (mRecorder != null) {
57 | if (mRecorder.isRecordEnable()) {
58 | mRecorder.stopRecord();
59 | } else {
60 | mRecorder.startRecord();
61 | }
62 | }
63 | }
64 | });
65 | mSurfaceView.getHolder().addCallback(this);
66 | }
67 |
68 |
69 | @Override
70 | public void surfaceCreated(SurfaceHolder holder) {
71 | LogUtil.logd("surfaceCreated " + System.currentTimeMillis());
72 | mRecorder = new VideoRecorder.Builder(new DefaultCameraPreview(mSurfaceView))
73 | .setCallbackHandler(new CallbackHandler())
74 | .setLogFPSEnable(false)
75 | .setFrameRate(30)
76 | .setOutPutFile(new File(getContext().getExternalFilesDir(Environment.DIRECTORY_MOVIES),"1.mp4"))
77 | .setChannelCount(2)
78 | .build();
79 | }
80 |
81 | @Override
82 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
83 | mRecorder.onSizeChanged(width, height);
84 | }
85 |
86 | @Override
87 | public void surfaceDestroyed(SurfaceHolder holder) {
88 | mRecorder.stopPreview();
89 | }
90 |
91 | // TODO: 2018/6/26 暂时让他泄露吧~
92 | @SuppressLint("HandlerLeak")
93 | private class CallbackHandler extends VideoRecorderHandler {
94 | @Override
95 | protected void handleUpdateFPS(float fps) {
96 | mTvFps.setText(String.format(Locale.getDefault(), "%.2f", fps));
97 | }
98 |
99 | @Override
100 | protected void handleVideoMuxerStopped(@Nullable String output) {
101 | if (output != null) {
102 | Toast.makeText(getContext(), output, Toast.LENGTH_SHORT).show();
103 | } else {
104 | Toast.makeText(getContext(), "录制失败", Toast.LENGTH_SHORT).show();
105 | }
106 | }
107 | }
108 | }
109 |
110 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xxhdpi/ic_switch_camera.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/drawable-xxhdpi/ic_switch_camera.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
11 |
16 |
21 |
26 |
31 |
36 |
41 |
46 |
51 |
56 |
61 |
66 |
71 |
76 |
81 |
86 |
91 |
96 |
101 |
106 |
111 |
116 |
121 |
126 |
131 |
136 |
141 |
146 |
151 |
156 |
161 |
166 |
171 |
172 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/selector_btn_record.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/shape_record_normal.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/shape_recording.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
8 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_camera_glsurfaceview.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
11 |
12 |
16 |
17 |
24 |
25 |
26 |
27 |
28 |
29 |
32 |
33 |
40 |
41 |
42 |
52 |
53 |
60 |
61 |
67 |
68 |
74 |
75 |
76 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_camera_gltextureview.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
11 |
12 |
16 |
17 |
24 |
25 |
26 |
27 |
28 |
29 |
32 |
33 |
40 |
41 |
42 |
52 |
53 |
60 |
61 |
67 |
68 |
74 |
75 |
76 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_camera_surface.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
11 |
12 |
19 |
20 |
28 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_multi_part_recorder.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
11 |
12 |
17 |
18 |
22 |
23 |
30 |
31 |
32 |
33 |
34 |
35 |
38 |
39 |
46 |
47 |
48 |
58 |
59 |
66 |
67 |
73 |
74 |
80 |
81 |
82 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/include_camera_controller.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
8 |
15 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/raw/fragment_oes_shader.glsl:
--------------------------------------------------------------------------------
1 | #extension GL_OES_EGL_image_external : require precision mediump float;
2 | uniform samplerExternalOES sTexture;
3 | varying vec2 texCoord;
4 |
5 | void main() {
6 | gl_FragColor = texture2D(sTexture,texCoord);
7 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/fragment_shader.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 | uniform sampler2D sTexture;
3 | varying vec2 texCoord;
4 |
5 | void main() {
6 | gl_FragColor = texture2D(sTexture,texCoord);
7 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/vertex_shader.glsl:
--------------------------------------------------------------------------------
1 | attribute vec2 vPosition;
2 | attribute vec2 vTexCoord;
3 | varying vec2 texCoord;
4 |
5 | void main() {
6 | texCoord = vTexCoord;
7 | gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );
8 | }
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | VideoRecorder
3 |
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/app/src/main/res/xml/file_paths.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
6 |
9 |
10 |
13 |
16 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 |
5 | repositories {
6 | maven { url 'https://maven.aliyun.com/repository/public' }
7 | maven { url 'https://maven.aliyun.com/repository/google' }
8 | google()
9 | jcenter()
10 | }
11 | dependencies {
12 | classpath 'com.android.tools.build:gradle:7.2.2'
13 |
14 |
15 | // NOTE: Do not place your application dependencies here; they belong
16 | // in the individual module build.gradle files
17 | }
18 | }
19 |
20 | allprojects {
21 | repositories {
22 | maven { url 'https://maven.aliyun.com/repository/public' }
23 | maven { url 'https://maven.aliyun.com/repository/google' }
24 | google()
25 | jcenter()
26 | }
27 | }
28 |
29 | task clean(type: Delete) {
30 | delete rootProject.buildDir
31 | }
32 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | org.gradle.jvmargs=-Xmx20148m
13 |
14 | # When configured, Gradle will run in incubating parallel mode.
15 | # This option should only be used with decoupled projects. More details, visit
16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
17 | org.gradle.parallel=true
18 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/screenshort/440800708950629658.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/screenshort/440800708950629658.png
--------------------------------------------------------------------------------
/screenshort/479649274876459714.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erleizh/VideoRecorder/c954462bb24edafbb4929572f3a2e48478d4c7ce/screenshort/479649274876459714.jpg
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app', ':VideoRecorderCore', ':MultiPartRecorder'
2 |
--------------------------------------------------------------------------------