imageInfos) {
34 | super();
35 | mContext = context;
36 | if (imageInfos == null || imageInfos.size() == 0) {
37 | throw new RuntimeException("imageInfos must be not empty");
38 | }
39 | this.mImageInfos.addAll(imageInfos);
40 | mSize = mImageInfos.size();
41 | }
42 |
43 | @Override
44 | public void onInit(int videoWidth, int videoHeight) {
45 | super.onInit(videoWidth, videoHeight);
46 | glProgram = GLESTools.createProgram(GLESTools.uRes(mContext.getResources(), "drawimage_vertex.sh"),
47 | GLESTools.uRes(mContext.getResources(), "drawimage_fragment.sh"));
48 | GLES20.glUseProgram(glProgram);
49 | glCamTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture");
50 | glImageTextureLoc = GLES20.glGetUniformLocation(glProgram, "uImageTexture");
51 | glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition");
52 | glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord");
53 | glImageRectLoc = GLES20.glGetUniformLocation(glProgram, "imageRect");
54 | glImageAngelLoc = GLES20.glGetUniformLocation(glProgram, "imageAngel");
55 |
56 | initImageTexture();
57 | }
58 |
59 | protected void initImageTexture() {
60 | imageTextures = new ArrayList<>();
61 | ImageTexture imageTexture;
62 | for (int i = 0; i < mSize; i++) {
63 | imageTexture = new ImageTexture(outVideoWidth, outVideoHeight);
64 | imageTexture.load(mContext, mImageInfos.get(i).resId);
65 | imageTextures.add(imageTexture);
66 | }
67 | }
68 |
69 | @Override
70 | public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) {
71 | GLES20.glViewport(0, 0, outVideoWidth, outVideoHeight);
72 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
73 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
74 | int textureId;
75 | int frameBuffer;
76 | Rect rect;
77 | ImageTexture preImageTexture = null;
78 | for (int i = 0; i < mSize; i++) {
79 | if (preImageTexture == null) {
80 | textureId = cameraTexture;
81 | } else {
82 | textureId = preImageTexture.getTextureId();
83 | }
84 | if (i == mSize - 1) {
85 | frameBuffer = targetFrameBuffer;
86 | } else {
87 | frameBuffer = imageTextures.get(i).getFrameBuffer();
88 | }
89 | rect = mImageInfos.get(i).rect;
90 | if (rect.left == rect.right || rect.top == rect.bottom) {
91 | continue;
92 | }
93 | drawImage(convertToRectF(rect), imageTextures.get(i).getImageTextureId(), textureId, frameBuffer, shapeBuffer, textureBuffer);
94 | preImageTexture = imageTextures.get(i);
95 | }
96 | GLES20.glFinish();
97 | }
98 |
99 | protected void drawImage(RectF rectF, int imageTextureId, int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textureBuffer) {
100 | GLES20.glEnableVertexAttribArray(glCamPostionLoc);
101 | GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc);
102 | shapeBuffer.position(0);
103 | GLES20.glVertexAttribPointer(glCamPostionLoc, 2,
104 | GLES20.GL_FLOAT, false,
105 | 2 * 4, shapeBuffer);
106 | textureBuffer.position(0);
107 | GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2,
108 | GLES20.GL_FLOAT, false,
109 | 2 * 4, textureBuffer);
110 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
111 | GLES20.glUseProgram(glProgram);
112 | GLES20.glUniform4f(glImageRectLoc, rectF.left, rectF.top, rectF.right, rectF.bottom);
113 | // GLES20.glUniform1f(glImageAngelLoc, (float)(30.0f*Math.PI/180));//用来更新旋转角度的
114 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
115 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture);
116 | GLES20.glUniform1i(glCamTextureLoc, 0);
117 | GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
118 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTextureId);
119 | GLES20.glUniform1i(glImageTextureLoc, 1);
120 | GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
121 | GLES20.glDisableVertexAttribArray(glCamPostionLoc);
122 | GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc);
123 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
124 | GLES20.glUseProgram(0);
125 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
126 | }
127 |
128 | @Override
129 | public void onDestroy() {
130 | super.onDestroy();
131 | GLES20.glDeleteProgram(glProgram);
132 | destroyImageTexture();
133 | }
134 |
135 | protected void destroyImageTexture() {
136 | for (ImageTexture imageTexture : imageTextures) {
137 | imageTexture.destroy();
138 | }
139 | }
140 |
141 | private RectF convertToRectF(Rect iconRect) {
142 | RectF iconRectF = new RectF();
143 | iconRectF.top = iconRect.top / (float) outVideoHeight;
144 | iconRectF.bottom = iconRect.bottom / (float) outVideoHeight;
145 | iconRectF.left = iconRect.left / (float) outVideoWidth;
146 | iconRectF.right = iconRect.right / (float) outVideoWidth;
147 | return iconRectF;
148 | }
149 |
150 | public static class ImageDrawData {
151 | public int resId = 0;
152 | public Rect rect;
153 | }
154 | }
155 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/filter/image/ImageDrawConstants.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.filter.image;
2 |
3 | /**
4 | * Created by ICE on 2017/11/6.
5 | */
6 |
7 | public class ImageDrawConstants {
8 | public static final String Default_vertexShader_filter = "" +
9 | "attribute vec4 aCamPosition;\n" +
10 | "attribute vec2 aCamTextureCoord;\n" +
11 | "varying vec2 vCamTextureCoord;\n" +
12 | "void main(){\n" +
13 | " gl_Position= aCamPosition;\n" +
14 | " vCamTextureCoord = aCamTextureCoord;\n" +
15 | "}";
16 | public static final String Default_fragmentshader_filter = "" +
17 | "precision highp float;\n" +
18 | "varying highp vec2 vCamTextureCoord;\n" +
19 | "uniform sampler2D uCamTexture;\n" +
20 | "void main(){\n" +
21 | " vec4 color = texture2D(uCamTexture, vCamTextureCoord);\n" +
22 | " gl_FragColor = color;\n" +
23 | "}";
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/filter/image/ImageTexture.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.filter.image;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.opengl.GLES20;
6 | import android.support.annotation.IntegerRes;
7 |
8 | import com.icechn.videorecorder.tools.BitmapUtils;
9 | import com.icechn.videorecorder.tools.GLESTools;
10 |
11 | /**
12 | * Created by ICE on 2017/11/6.
13 | */
14 |
15 | public class ImageTexture {
16 | private int imageTextureId;
17 | private int frameBufferTextureId;
18 | private int frameBuffer;
19 | private int imageSize[];
20 | private int outWidth;
21 | private int outHeight;
22 |
23 | public ImageTexture(int outWidth, int outHeight) {
24 | imageSize = new int[2];
25 | this.outWidth = outWidth;
26 | this.outHeight = outHeight;
27 | }
28 |
29 | public ImageTexture load(Context context, String filePath, boolean isAssetsFile) {
30 | if (isAssetsFile) {
31 | return loadBitmap(BitmapUtils.loadBitmapFromAssets(context, filePath));
32 | } else {
33 | return loadBitmap(BitmapUtils.loadBitmapFromDisk(context, filePath));
34 | }
35 | }
36 |
37 | public ImageTexture load(Context context, @IntegerRes int resId) {
38 | return loadBitmap(BitmapUtils.loadBitmapFromRaw(context, resId));
39 | }
40 |
41 | public ImageTexture loadBitmap(Bitmap bitmap) {
42 | if (bitmap != null) {
43 | imageTextureId = GLESTools.loadTexture(bitmap, GLESTools.NO_TEXTURE);
44 | imageSize[0] = bitmap.getWidth();
45 | imageSize[1] = bitmap.getHeight();
46 | int[] frameBufferArr = new int[1];
47 | int[] frameBufferTextureArr = new int[1];
48 | GLESTools.createFrameBuff(frameBufferArr,
49 | frameBufferTextureArr,
50 | outWidth,
51 | outHeight);
52 | frameBuffer = frameBufferArr[0];
53 | frameBufferTextureId = frameBufferTextureArr[0];
54 | bitmap.recycle();
55 | }
56 | return this;
57 | }
58 |
59 | public void setImageTextureId(int imageTextureId) {
60 | this.imageTextureId = imageTextureId;
61 | }
62 |
63 | public int getImageTextureId() {
64 | return imageTextureId;
65 | }
66 | public int getTextureId() {
67 | return frameBufferTextureId;
68 | }
69 | public int getFrameBuffer() {
70 | return frameBuffer;
71 | }
72 |
73 | public int getImageWidth() {
74 | return imageSize[0];
75 | }
76 |
77 | public int getImageHeight() {
78 | return imageSize[1];
79 | }
80 |
81 | public float getImageRatio() {
82 | return 1.0f * imageSize[0] / imageSize[1];
83 | }
84 |
85 | public void destroy() {
86 | GLES20.glDeleteTextures(2, new int[]{imageTextureId, frameBufferTextureId}, 0);
87 | GLES20.glDeleteFramebuffers(1, new int[]{frameBuffer}, 0);
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/filter/softaudiofilter/BaseSoftAudioFilter.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.filter.softaudiofilter;
2 |
3 | /**
4 | * Created by lake on 14/06/16.
5 | * Librestreaming project.
6 | */
7 | public class BaseSoftAudioFilter {
8 | protected int SIZE;
9 | protected int SIZE_HALF;
10 |
11 | public void onInit(int size) {
12 | SIZE = size;
13 | SIZE_HALF = size/2;
14 | }
15 |
16 | /**
17 | *
18 | * @param orignBuff
19 | * @param targetBuff
20 | * @param presentationTimeMs
21 | * @param sequenceNum
22 | * @return false to use orignBuff,true to use targetBuff
23 | */
24 | public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
25 | return false;
26 | }
27 |
28 | public void onDestroy() {
29 |
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/filter/softaudiofilter/SetVolumeAudioFilter.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.filter.softaudiofilter;
2 |
3 | /**
4 | * Created by lake on 14/06/16.
5 | * Librestreaming project.
6 | */
7 | public class SetVolumeAudioFilter extends BaseSoftAudioFilter {
8 | private float volumeScale=1.0f;
9 |
10 | public SetVolumeAudioFilter() {
11 | }
12 |
13 | /**
14 | * @param scale 0.0~
15 | */
16 | public void setVolumeScale(float scale) {
17 | volumeScale = scale;
18 | }
19 |
20 | @Override
21 | public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
22 | for (int i = 0; i < SIZE; i += 2) {
23 | short origin = (short) (((orignBuff[i + 1] << 8) | orignBuff[i] & 0xff));
24 | origin = (short) (origin * volumeScale);
25 | orignBuff[i + 1] = (byte) (origin >> 8);
26 | orignBuff[i] = (byte) (origin);
27 | }
28 | return false;
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/model/AudioBuff.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.model;
2 |
3 | public class AudioBuff {
4 | public boolean isReadyToFill;
5 | public int audioFormat = -1;
6 | public byte[] buff;
7 |
8 | public AudioBuff(int audioFormat, int size) {
9 | isReadyToFill = true;
10 | this.audioFormat = audioFormat;
11 | buff = new byte[size];
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/model/MediaCodecGLWapper.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.model;
2 |
3 | public class MediaCodecGLWapper extends ScreenGLWapper {
4 | }
5 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/model/MediaConfig.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.model;
2 |
3 | public class MediaConfig {
4 |
5 | public static final int Rending_Model_OpenGLES = MediaMakerConfig.RENDERING_MODE_OPENGLES;
6 |
7 | public static class DirectionMode {
8 | public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = MediaMakerConfig.FLAG_DIRECTION_FLIP_HORIZONTAL;
9 | public static final int FLAG_DIRECTION_FLIP_VERTICAL = MediaMakerConfig.FLAG_DIRECTION_FLIP_VERTICAL;
10 | public static final int FLAG_DIRECTION_ROATATION_0 = MediaMakerConfig.FLAG_DIRECTION_ROATATION_0;
11 | public static final int FLAG_DIRECTION_ROATATION_90 = MediaMakerConfig.FLAG_DIRECTION_ROATATION_90;
12 | public static final int FLAG_DIRECTION_ROATATION_180 = MediaMakerConfig.FLAG_DIRECTION_ROATATION_180;
13 | public static final int FLAG_DIRECTION_ROATATION_270 = MediaMakerConfig.FLAG_DIRECTION_ROATATION_270;
14 | }
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/model/MediaMakerConfig.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.model;
2 |
3 | import android.util.Log;
4 |
5 | import java.lang.reflect.Field;
6 | import java.lang.reflect.Modifier;
7 |
8 | public class MediaMakerConfig {
9 |
10 | public static final int RENDERING_MODE_OPENGLES = 2;
11 | /**
12 | * same with jni
13 | */
14 | public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = 0x01;
15 | public static final int FLAG_DIRECTION_FLIP_VERTICAL = 0x02;
16 | public static final int FLAG_DIRECTION_ROATATION_0 = 0x10;
17 | public static final int FLAG_DIRECTION_ROATATION_90 = 0x20;
18 | public static final int FLAG_DIRECTION_ROATATION_180 = 0x40;
19 | public static final int FLAG_DIRECTION_ROATATION_270 = 0x80;
20 |
21 | public boolean done;
22 | public boolean printDetailMsg;
23 | public int renderingMode;
24 | public int frontCameraDirectionMode;
25 | public int backCameraDirectionMode;
26 | public boolean isPortrait;
27 | public int previewVideoWidth;
28 | public int previewVideoHeight;
29 | public int videoWidth;
30 | public int videoHeight;
31 | public int videoFPS;
32 | public int videoGOP;
33 | public float cropRatio;
34 | public int previewColorFormat;
35 | public int previewBufferSize;
36 | public int mediacodecAVCColorFormat;
37 | public int mediacdoecAVCBitRate;
38 | public int videoBufferQueueNum;
39 | public int audioBufferQueueNum;
40 | public int audioRecoderFormat;
41 | public int audioRecoderSampleRate;
42 | public int audioRecoderChannelConfig;
43 | public int audioRecoderSliceSize;
44 | public int audioRecoderSource;
45 | public int audioRecoderBufferSize;
46 | public int previewMaxFps;
47 | public int previewMinFps;
48 | public int mediacodecAVCFrameRate;
49 | public int mediacodecAVCIFrameInterval;
50 | public int mediacodecAVCProfile;
51 | public int mediacodecAVClevel;
52 |
53 | public int mediacodecAACProfile;
54 | public int mediacodecAACSampleRate;
55 | public int mediacodecAACChannelCount;
56 | public int mediacodecAACBitRate;
57 | public int mediacodecAACMaxInputSize;
58 |
59 | //face detect
60 | public boolean isFaceDetectEnable = false;
61 | public boolean isSquare = false;
62 |
63 | public boolean saveVideoEnable = false;
64 | public String saveVideoPath;
65 |
66 | public MediaMakerConfig() {
67 | done = false;
68 | printDetailMsg = false;
69 | videoWidth = -1;
70 | videoHeight = -1;
71 | videoFPS=-1;
72 | videoGOP=1;
73 | previewColorFormat = -1;
74 | mediacodecAVCColorFormat = -1;
75 | mediacdoecAVCBitRate = -1;
76 | videoBufferQueueNum = -1;
77 | audioBufferQueueNum = -1;
78 | mediacodecAVCFrameRate = -1;
79 | mediacodecAVCIFrameInterval = -1;
80 | mediacodecAVCProfile = -1;
81 | mediacodecAVClevel = -1;
82 | mediacodecAACProfile = -1;
83 | mediacodecAACSampleRate = -1;
84 | mediacodecAACChannelCount = -1;
85 | mediacodecAACBitRate = -1;
86 | mediacodecAACMaxInputSize = -1;
87 | }
88 |
89 | public void dump() {
90 | Log.e("",this.toString());
91 | }
92 |
93 | @Override
94 | public String toString() {
95 | StringBuilder sb = new StringBuilder();
96 | sb.append("ResParameter:");
97 | Field[] fields = this.getClass().getDeclaredFields();
98 | for (Field field : fields) {
99 | if (Modifier.isStatic(field.getModifiers())) {
100 | continue;
101 | }
102 | field.setAccessible(true);
103 | try {
104 | sb.append(field.getName());
105 | sb.append('=');
106 | sb.append(field.get(this));
107 | sb.append(';');
108 | } catch (IllegalAccessException e) {
109 | }
110 | }
111 | return sb.toString();
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/model/OffScreenGLWapper.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.model;
2 |
3 | import android.opengl.EGLConfig;
4 | import android.opengl.EGLContext;
5 | import android.opengl.EGLDisplay;
6 | import android.opengl.EGLSurface;
7 |
8 | public class OffScreenGLWapper{
9 | public EGLConfig eglConfig;
10 | public EGLDisplay eglDisplay;
11 | public EGLSurface eglSurface;
12 | public EGLContext eglContext;
13 |
14 | public int cam2dProgram;
15 | public int cam2dTextureMatrix;
16 | public int cam2dTextureLoc;
17 | public int cam2dPostionLoc;
18 | public int cam2dTextureCoordLoc;
19 |
20 | public int camProgram;
21 | public int camTextureLoc;
22 | public int camPostionLoc;
23 | public int camTextureCoordLoc;
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/model/RecordConfig.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.model;
2 |
3 |
4 | import android.hardware.Camera;
5 |
6 | public class RecordConfig {
7 | private Size targetVideoSize;
8 | private int videoBufferQueueNum;
9 | private int bitRate;
10 | private int renderingMode;
11 | private int defaultCamera;
12 | private int frontCameraDirectionMode;
13 | private int backCameraDirectionMode;
14 | private int videoFPS;
15 | private int videoGOP;
16 | private boolean printDetailMsg;
17 |
18 |
19 | private RecordConfig() {
20 | }
21 |
22 | public static RecordConfig obtain() {
23 | RecordConfig res = new RecordConfig();
24 | res.setRenderingMode(MediaConfig.Rending_Model_OpenGLES);
25 | res.setTargetVideoSize(new Size(640, 480));
26 | res.setVideoFPS(25);
27 | res.setVideoGOP(1);
28 | res.setVideoBufferQueueNum(5);
29 | res.setBitRate(2000000);
30 | res.setPrintDetailMsg(false);
31 | res.setDefaultCamera(Camera.CameraInfo.CAMERA_FACING_BACK);
32 | res.setBackCameraDirectionMode(MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0);
33 | res.setFrontCameraDirectionMode(MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0);
34 | return res;
35 | }
36 |
37 |
38 | /**
39 | * set the default camera to start stream
40 | */
41 | public void setDefaultCamera(int defaultCamera) {
42 | this.defaultCamera = defaultCamera;
43 | }
44 |
45 | /**
46 | * set front camera rotation & flip
47 | * @param frontCameraDirectionMode {@link MediaConfig.DirectionMode}
48 | */
49 | public void setFrontCameraDirectionMode(int frontCameraDirectionMode) {
50 | this.frontCameraDirectionMode = frontCameraDirectionMode;
51 | }
52 | /**
53 | * set front camera rotation & flip
54 | * @param backCameraDirectionMode {@link MediaConfig.DirectionMode}
55 | */
56 | public void setBackCameraDirectionMode(int backCameraDirectionMode) {
57 | this.backCameraDirectionMode = backCameraDirectionMode;
58 | }
59 |
60 | /**
61 | * set renderingMode when using soft mode
62 | * no use for hard mode
63 | * @param renderingMode {@link MediaConfig#Rending_Model_OpenGLES}
64 | */
65 | public void setRenderingMode(int renderingMode) {
66 | this.renderingMode = renderingMode;
67 | }
68 |
69 | /**
70 | * no use for now
71 | * @param printDetailMsg
72 | */
73 | public void setPrintDetailMsg(boolean printDetailMsg) {
74 | this.printDetailMsg = printDetailMsg;
75 | }
76 |
77 | /**
78 | * set the target video size.
79 | * real video size may different from it.Depend on device.
80 | * @param videoSize
81 | */
82 | public void setTargetVideoSize(Size videoSize) {
83 | targetVideoSize = videoSize;
84 | }
85 |
86 | /**
87 | * set video buffer number for soft mode.
88 | * num larger:video Smoother,more memory.
89 | * @param num
90 | */
91 | public void setVideoBufferQueueNum(int num) {
92 | videoBufferQueueNum = num;
93 | }
94 |
95 | /**
96 | * set video bitrate
97 | * @param bitRate
98 | */
99 | public void setBitRate(int bitRate) {
100 | this.bitRate = bitRate;
101 | }
102 |
103 | public int getVideoFPS() {
104 | return videoFPS;
105 | }
106 |
107 | public void setVideoFPS(int videoFPS) {
108 | this.videoFPS = videoFPS;
109 | }
110 |
111 | public int getVideoGOP(){
112 | return videoGOP;
113 | }
114 |
115 | public void setVideoGOP(int videoGOP){
116 | this.videoGOP = videoGOP;
117 | }
118 |
119 | public int getVideoBufferQueueNum() {
120 | return videoBufferQueueNum;
121 | }
122 |
123 | public int getBitRate() {
124 | return bitRate;
125 | }
126 |
127 | public Size getTargetVideoSize() {
128 | return targetVideoSize;
129 | }
130 |
131 | public int getDefaultCamera() {
132 | return defaultCamera;
133 | }
134 |
135 | public int getBackCameraDirectionMode() {
136 | return backCameraDirectionMode;
137 | }
138 |
139 | public int getFrontCameraDirectionMode() {
140 | return frontCameraDirectionMode;
141 | }
142 |
143 | public int getRenderingMode() {
144 | return renderingMode;
145 | }
146 |
147 | public boolean isPrintDetailMsg() {
148 | return printDetailMsg;
149 | }
150 |
151 | private boolean square = false;
152 | public void setSquare(boolean enable) {
153 | this.square = enable;
154 | }
155 | public boolean isSquare() {
156 | return this.square;
157 | }
158 |
159 | public boolean isSaveVideoEnable() {
160 | return true;
161 | }
162 |
163 | private String saveVideoPath = null;
164 | public void setSaveVideoPath(String path) {
165 | this.saveVideoPath = path;
166 | }
167 | public String getSaveVideoPath() {
168 | return this.saveVideoPath;
169 | }
170 |
171 | }
172 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/model/ScreenGLWapper.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.model;
2 |
3 | import android.opengl.EGLConfig;
4 | import android.opengl.EGLContext;
5 | import android.opengl.EGLDisplay;
6 | import android.opengl.EGLSurface;
7 |
8 | public class ScreenGLWapper {
9 | public EGLDisplay eglDisplay;
10 | public EGLConfig eglConfig;
11 | public EGLSurface eglSurface;
12 | public EGLContext eglContext;
13 |
14 | public int drawProgram;
15 | public int drawTextureLoc;
16 | public int drawPostionLoc;
17 | public int drawTextureCoordLoc;
18 | }
19 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/model/Size.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.model;
2 |
3 | public final class Size {
4 | /**
5 | * Create a new immutable Size instance.
6 | *
7 | * @param width The width of the size, in pixels
8 | * @param height The height of the size, in pixels
9 | */
10 | public Size(int width, int height) {
11 | mWidth = width;
12 | mHeight = height;
13 | }
14 |
15 | /**
16 | * Get the width of the size (in pixels).
17 | *
18 | * @return width
19 | */
20 | public int getWidth() {
21 | return mWidth;
22 | }
23 |
24 | /**
25 | * Get the height of the size (in pixels).
26 | *
27 | * @return height
28 | */
29 | public int getHeight() {
30 | return mHeight;
31 | }
32 |
33 | /**
34 | * Check if this size is equal to another size.
35 | *
36 | * Two sizes are equal if and only if both their widths and heights are
37 | * equal.
38 | *
39 | *
40 | * A size object is never equal to any other type of object.
41 | *
42 | *
43 | * @return {@code true} if the objects were equal, {@code false} otherwise
44 | */
45 | @Override
46 | public boolean equals(final Object obj) {
47 | if (obj == null) {
48 | return false;
49 | }
50 | if (this == obj) {
51 | return true;
52 | }
53 | if (obj instanceof Size) {
54 | Size other = (Size) obj;
55 | return mWidth == other.mWidth && mHeight == other.mHeight;
56 | }
57 | return false;
58 | }
59 |
60 | /**
61 | * Return the size represented as a string with the format {@code "WxH"}
62 | *
63 | * @return string representation of the size
64 | */
65 | @Override
66 | public String toString() {
67 | return mWidth + "x" + mHeight;
68 | }
69 |
70 | private static NumberFormatException invalidSize(String s) {
71 | throw new NumberFormatException("Invalid Size: \"" + s + "\"");
72 | }
73 |
74 | /**
75 | * {@inheritDoc}
76 | */
77 | @Override
78 | public int hashCode() {
79 | // assuming most sizes are <2^16, doing a rotate will give us perfect hashing
80 | return mHeight ^ ((mWidth << (Integer.SIZE / 2)) | (mWidth >>> (Integer.SIZE / 2)));
81 | }
82 |
83 | private final int mWidth;
84 | private final int mHeight;
85 | }
86 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/tools/BitmapUtils.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.tools;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.graphics.ImageFormat;
7 | import android.graphics.Rect;
8 | import android.graphics.YuvImage;
9 | import android.os.Environment;
10 | import android.util.Log;
11 |
12 | import java.io.BufferedOutputStream;
13 | import java.io.ByteArrayOutputStream;
14 | import java.io.File;
15 | import java.io.FileInputStream;
16 | import java.io.FileOutputStream;
17 | import java.io.IOException;
18 | import java.io.InputStream;
19 |
20 | public class BitmapUtils {
21 | public static Bitmap loadBitmapFromAssets(Context context, String filePath) {
22 | InputStream inputStream = null;
23 | try {
24 | inputStream = context.getResources().getAssets().open(filePath);
25 | } catch (IOException e) {
26 | e.printStackTrace();
27 | }
28 | if (inputStream == null) return null;
29 | BitmapFactory.Options options = new BitmapFactory.Options();
30 | options.inScaled = false;
31 | Bitmap bitmap = BitmapFactory.decodeStream(inputStream);
32 | return bitmap;
33 | }
34 | public static Bitmap loadBitmapFromDisk(Context context, String filePath) {
35 | InputStream inputStream = null;
36 | try {
37 | inputStream = new FileInputStream(filePath);
38 | } catch (IOException e) {
39 | e.printStackTrace();
40 | }
41 | if (inputStream == null) return null;
42 | BitmapFactory.Options options = new BitmapFactory.Options();
43 | options.inScaled = false;
44 | Bitmap bitmap = BitmapFactory.decodeStream(inputStream);
45 | return bitmap;
46 | }
47 |
48 | public static Bitmap loadBitmapFromRaw(Context context, int resourceId) {
49 | BitmapFactory.Options options = new BitmapFactory.Options();
50 | options.inScaled = false;
51 | Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
52 | return bitmap;
53 | }
54 |
55 | public static void saveBitmap(byte[] buffer, int width, int height) {
56 | try {
57 | // 调用image.compressToJpeg()将YUV格式图像数据data转为jpg格式
58 | YuvImage image = new YuvImage(buffer, ImageFormat.NV21, width,
59 | height, null);
60 | if (image != null) {
61 | ByteArrayOutputStream outstream = new ByteArrayOutputStream();
62 | image.compressToJpeg(new Rect(0, 0, width, height), 80, outstream);
63 |
64 | ByteArrayOutputStream stream = new ByteArrayOutputStream();
65 | image.compressToJpeg(new Rect(0,0,width,height),80,stream);
66 | Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(),0,stream.size());
67 |
68 | saveBitmap(bmp);
69 |
70 | outstream.flush();
71 | }
72 | } catch (Exception ex) {
73 | Log.e("Sys", "Error:" + ex.getMessage());
74 | }
75 | }
76 |
77 | //图片保存
78 | private static void saveBitmap(Bitmap b){
79 | String path = Environment.getExternalStorageDirectory()+ "/Omoshiroi/photo/";
80 | File folder=new File(path);
81 | if(!folder.exists()&&!folder.mkdirs()){
82 | Log.i("SaveBitmap", "save pic fail");
83 | return;
84 | }
85 | long dataTake = System.currentTimeMillis();
86 | final String jpegName=path+ dataTake +".jpg";
87 | try {
88 | FileOutputStream fout = new FileOutputStream(jpegName);
89 | BufferedOutputStream bos = new BufferedOutputStream(fout);
90 | b.compress(Bitmap.CompressFormat.JPEG, 100, bos);
91 | bos.flush();
92 | bos.close();
93 | } catch (IOException e) {
94 | e.printStackTrace();
95 | }
96 | Log.i("SaveBitmap", "save pic success:"+jpegName);
97 |
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/tools/ByteArrayTools.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.tools;
2 |
3 | public class ByteArrayTools {
4 | public static void intToByteArrayFull(byte[] dst, int pos, int interger) {
5 | dst[pos] = (byte) ((interger >> 24) & 0xFF);
6 | dst[pos + 1] = (byte) ((interger >> 16) & 0xFF);
7 | dst[pos + 2] = (byte) ((interger >> 8) & 0xFF);
8 | dst[pos + 3] = (byte) ((interger) & 0xFF);
9 | }
10 |
11 | public static void intToByteArrayTwoByte(byte[] dst, int pos, int interger) {
12 | dst[pos] = (byte) ((interger >> 8) & 0xFF);
13 | dst[pos + 1] = (byte) ((interger) & 0xFF);
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/tools/GLESTools.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.tools;
2 |
3 | import android.content.res.Resources;
4 | import android.graphics.Bitmap;
5 | import android.opengl.GLES20;
6 | import android.opengl.GLUtils;
7 | import android.util.Log;
8 |
9 | import java.io.BufferedReader;
10 | import java.io.InputStream;
11 | import java.io.InputStreamReader;
12 |
13 | public class GLESTools {
14 | public static int FLOAT_SIZE_BYTES = 4;
15 | public static int SHORT_SIZE_BYTES = 2;
16 |
17 | public static String readTextFile(Resources res, int resId) {
18 | InputStream inputStream = res.openRawResource(resId);
19 | BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
20 | String line;
21 | StringBuilder result = new StringBuilder();
22 | try {
23 | while ((line = br.readLine()) != null) {
24 | result.append(line);
25 | result.append("\n");
26 | }
27 | } catch (Exception e) {
28 | e.printStackTrace();
29 | return null;
30 | }
31 | return result.toString();
32 | }
33 |
34 | public static int createProgram(Resources res, int vertexShaderResId, int fragmentShaderResId) {
35 | String vertexShaderCode = readTextFile(res, vertexShaderResId);
36 | String fragmentShaderCode = readTextFile(res, fragmentShaderResId);
37 | return createProgram(vertexShaderCode, fragmentShaderCode);
38 | }
39 |
40 | public static int createProgram(String vertexShaderCode, String fragmentShaderCode) {
41 | if (vertexShaderCode == null || fragmentShaderCode == null) {
42 | throw new RuntimeException("invalid shader code");
43 | }
44 | int vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
45 | int fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
46 |
47 | GLES20.glShaderSource(vertexShader, vertexShaderCode);
48 | GLES20.glShaderSource(fragmentShader, fragmentShaderCode);
49 | int[] status = new int[1];
50 | GLES20.glCompileShader(vertexShader);
51 | GLES20.glGetShaderiv(vertexShader, GLES20.GL_COMPILE_STATUS, status, 0);
52 | if (GLES20.GL_FALSE == status[0]) {
53 | throw new RuntimeException("vertext shader compile,failed:" + GLES20.glGetShaderInfoLog(vertexShader));
54 | }
55 | GLES20.glCompileShader(fragmentShader);
56 | GLES20.glGetShaderiv(fragmentShader, GLES20.GL_COMPILE_STATUS, status, 0);
57 | if (GLES20.GL_FALSE == status[0]) {
58 | throw new RuntimeException("fragment shader compile,failed:" + GLES20.glGetShaderInfoLog(fragmentShader));
59 | }
60 | int program = GLES20.glCreateProgram();
61 | GLES20.glAttachShader(program, vertexShader);
62 | GLES20.glAttachShader(program, fragmentShader);
63 | GLES20.glLinkProgram(program);
64 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
65 | if (GLES20.GL_FALSE == status[0]) {
66 | throw new RuntimeException("link program,failed:" + GLES20.glGetProgramInfoLog(program));
67 | }
68 | return program;
69 | }
70 |
71 | public static void checkGlError(String op) {
72 | int error = GLES20.glGetError();
73 | if (error != GLES20.GL_NO_ERROR) {
74 | String msg = op + ": glError 0x" + Integer.toHexString(error);
75 | Log.d("",msg);
76 | throw new RuntimeException(msg);
77 | }
78 | }
79 |
80 | public static final int NO_TEXTURE = -1;
81 |
82 | public static int loadTexture(final Bitmap image, final int reUseTexture) {
83 | int[] texture = new int[1];
84 | if (reUseTexture == NO_TEXTURE) {
85 | GLES20.glGenTextures(1, texture, 0);
86 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]);
87 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
88 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
89 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
90 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
91 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
92 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
93 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
94 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
95 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, image, 0);
96 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
97 | } else {
98 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, reUseTexture);
99 | GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, image);
100 | texture[0] = reUseTexture;
101 | }
102 | return texture[0];
103 | }
104 |
105 | public static void createFrameBuff(int[] frameBuffer, int[] frameBufferTex, int width, int height) {
106 | GLES20.glGenFramebuffers(1, frameBuffer, 0);
107 | GLES20.glGenTextures(1, frameBufferTex, 0);
108 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTex[0]);
109 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
110 | GLESTools.checkGlError("createCamFrameBuff");
111 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
112 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
113 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
114 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
115 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
116 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
117 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
118 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
119 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
120 | GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, frameBufferTex[0], 0);
121 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
122 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
123 | GLESTools.checkGlError("createCamFrameBuff");
124 | }
125 |
126 | //通过路径加载Assets中的文本内容
127 | public static String uRes(Resources mRes, String path) {
128 | StringBuilder result = new StringBuilder();
129 | try {
130 | InputStream is = mRes.getAssets().open(path);
131 | int ch;
132 | byte[] buffer = new byte[1024];
133 | while (-1 != (ch = is.read(buffer))) {
134 | result.append(new String(buffer, 0, ch));
135 | }
136 | } catch (Exception e) {
137 | return null;
138 | }
139 | return result.toString().replaceAll("\\r\\n", "\n");
140 | }
141 |
142 | }
143 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/tools/TimeHandler.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.tools;
2 |
3 | import android.os.Handler;
4 | import android.os.Looper;
5 | import android.os.Message;
6 |
7 | public class TimeHandler
8 | extends Handler {
9 | public static final int WHAT_233=0;
10 | private long delayTimeInMils;
11 | private boolean freeNow;
12 | private Task task;
13 | private boolean shouldContinue;
14 |
15 | public TimeHandler(Looper looper, Task task) {
16 | super(looper);
17 | this.task = task;
18 | freeNow=true;
19 | shouldContinue=false;
20 | }
21 |
22 | public void clearMsg() {
23 | while (hasMessages(WHAT_233)) {
24 | removeMessages(WHAT_233);
25 | }
26 | shouldContinue = false;
27 | freeNow = true;
28 | }
29 |
30 | public void sendSingleMsg(long timeDelayed) {
31 | clearMsg();
32 | freeNow = false;
33 | shouldContinue = false;
34 | sendEmptyMessageDelayed(0, timeDelayed);
35 | }
36 |
37 | public void sendLoopMsg(long timeDelayed, long timeDelayedInLoop) {
38 | clearMsg();
39 | freeNow = false;
40 | delayTimeInMils = timeDelayedInLoop;
41 | shouldContinue = true;
42 | sendEmptyMessageDelayed(0, timeDelayed);
43 | }
44 |
45 | public void handleMessage(Message paramMessage) {
46 | if (task != null) {
47 | task.run();
48 | }
49 | if (shouldContinue) {
50 | sendEmptyMessageDelayed(0, delayTimeInMils);
51 | }
52 | }
53 |
54 | public boolean isFreeNow() {
55 | return freeNow;
56 | }
57 |
58 | public interface Task {
59 | void run();
60 | }
61 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/tools/VideoSplicer.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.tools;
2 |
3 | import android.media.MediaCodec;
4 | import android.media.MediaCodec.BufferInfo;
5 | import android.media.MediaExtractor;
6 | import android.media.MediaFormat;
7 | import android.media.MediaMuxer;
8 | import android.util.Log;
9 |
10 | import java.io.IOException;
11 | import java.nio.ByteBuffer;
12 | import java.util.ArrayList;
13 | import java.util.Iterator;
14 |
15 | /**
16 | * Created by ICE on 2018/2/6.
17 | */
18 |
19 | //@TargetApi(18)
20 | public class VideoSplicer {
21 |
22 | private final String TAG = "VideoSplicer";
23 | private ArrayList mVideoList;
24 | private String mOutFilename;
25 |
26 | private MediaMuxer mMuxer;
27 | private ByteBuffer mReadBuf;
28 | private int mOutAudioTrackIndex;
29 | private int mOutVideoTrackIndex;
30 | private MediaFormat mAudioFormat;
31 | private MediaFormat mVideoFormat;
32 |
33 | public VideoSplicer(ArrayList videoList, String outFilename) {
34 | mVideoList = videoList;
35 | this.mOutFilename = outFilename;
36 | mReadBuf = ByteBuffer.allocate(1048576);
37 | }
38 |
39 | public boolean joinVideo() {
40 | boolean getAudioFormat = false;
41 | boolean getVideoFormat = false;
42 | Iterator videoIterator = mVideoList.iterator();
43 |
44 | //--------step 1 MediaExtractor拿到多媒体信息,用于MediaMuxer创建文件
45 | while (videoIterator.hasNext()) {
46 | String videoPath = (String) videoIterator.next();
47 | MediaExtractor extractor = new MediaExtractor();
48 |
49 | try {
50 | extractor.setDataSource(videoPath);
51 | } catch (Exception ex) {
52 | ex.printStackTrace();
53 | }
54 |
55 | int trackIndex;
56 | if (!getVideoFormat) {
57 | trackIndex = this.selectTrack(extractor, "video/");
58 | if (trackIndex < 0) {
59 | Log.e(TAG, "No video track found in " + videoPath);
60 | } else {
61 | extractor.selectTrack(trackIndex);
62 | mVideoFormat = extractor.getTrackFormat(trackIndex);
63 | getVideoFormat = true;
64 | }
65 | }
66 |
67 | if (!getAudioFormat) {
68 | trackIndex = this.selectTrack(extractor, "audio/");
69 | if (trackIndex < 0) {
70 | Log.e(TAG, "No audio track found in " + videoPath);
71 | } else {
72 | extractor.selectTrack(trackIndex);
73 | mAudioFormat = extractor.getTrackFormat(trackIndex);
74 | getAudioFormat = true;
75 | }
76 | }
77 |
78 | extractor.release();
79 | if (getVideoFormat && getAudioFormat) {
80 | break;
81 | }
82 | }
83 |
84 | try {
85 | mMuxer = new MediaMuxer(this.mOutFilename, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
86 | } catch (IOException e) {
87 | e.printStackTrace();
88 | }
89 | if (getVideoFormat) {
90 | mOutVideoTrackIndex = mMuxer.addTrack(mVideoFormat);
91 | }
92 | if (getAudioFormat) {
93 | mOutAudioTrackIndex = mMuxer.addTrack(mAudioFormat);
94 | }
95 | mMuxer.start();
96 | //--------step 1 end---------------------------//
97 |
98 |
99 | //--------step 2 遍历文件,MediaExtractor读取帧数据,MediaMuxer写入帧数据,并记录帧信息
100 | long ptsOffset = 0L;
101 | Iterator trackIndex = mVideoList.iterator();
102 | while (trackIndex.hasNext()) {
103 | String videoPath = (String) trackIndex.next();
104 | boolean hasVideo = true;
105 | boolean hasAudio = true;
106 | MediaExtractor videoExtractor = new MediaExtractor();
107 |
108 | try {
109 | videoExtractor.setDataSource(videoPath);
110 | } catch (Exception var27) {
111 | var27.printStackTrace();
112 | }
113 |
114 | int inVideoTrackIndex = this.selectTrack(videoExtractor, "video/");
115 | if (inVideoTrackIndex < 0) {
116 | hasVideo = false;
117 | }
118 |
119 | videoExtractor.selectTrack(inVideoTrackIndex);
120 | MediaExtractor audioExtractor = new MediaExtractor();
121 |
122 | try {
123 | audioExtractor.setDataSource(videoPath);
124 | } catch (Exception e) {
125 | e.printStackTrace();
126 | }
127 |
128 | int inAudioTrackIndex = this.selectTrack(audioExtractor, "audio/");
129 | if (inAudioTrackIndex < 0) {
130 | hasAudio = false;
131 | }
132 |
133 | audioExtractor.selectTrack(inAudioTrackIndex);
134 | boolean bMediaDone = false;
135 | long presentationTimeUs = 0L;
136 | long audioPts = 0L;
137 | long videoPts = 0L;
138 |
139 | while (!bMediaDone) {
140 | if (!hasVideo && !hasAudio) {
141 | break;
142 | }
143 |
144 | int outTrackIndex;
145 | MediaExtractor extractor;
146 | int currenttrackIndex;
147 | if ((!hasVideo || audioPts - videoPts <= 50000L) && hasAudio) {
148 | currenttrackIndex = inAudioTrackIndex;
149 | outTrackIndex = mOutAudioTrackIndex;
150 | extractor = audioExtractor;
151 | } else {
152 | currenttrackIndex = inVideoTrackIndex;
153 | outTrackIndex = mOutVideoTrackIndex;
154 | extractor = videoExtractor;
155 | }
156 |
157 | mReadBuf.rewind();
158 | int chunkSize = extractor.readSampleData(mReadBuf, 0);//读取帧数据
159 | if (chunkSize < 0) {
160 | if (currenttrackIndex == inVideoTrackIndex) {
161 | hasVideo = false;
162 | } else if (currenttrackIndex == inAudioTrackIndex) {
163 | hasAudio = false;
164 | }
165 | } else {
166 | if (extractor.getSampleTrackIndex() != currenttrackIndex) {
167 | Log.e(TAG, "WEIRD: got sample from track " + extractor.getSampleTrackIndex() + ", expected " + currenttrackIndex);
168 | }
169 |
170 | presentationTimeUs = extractor.getSampleTime();//读取帧的pts
171 | if (currenttrackIndex == inVideoTrackIndex) {
172 | videoPts = presentationTimeUs;
173 | } else {
174 | audioPts = presentationTimeUs;
175 | }
176 |
177 | BufferInfo info = new BufferInfo();
178 | info.offset = 0;
179 | info.size = chunkSize;
180 | info.presentationTimeUs = ptsOffset + presentationTimeUs;//pts重新计算
181 | if ((extractor.getSampleFlags() & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) {
182 | info.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME;
183 | }
184 |
185 | mReadBuf.rewind();
186 | Log.i(TAG, String.format("write sample track %d, size %d, pts %d flag %d", new Object[]{Integer.valueOf(outTrackIndex), Integer.valueOf(info.size), Long.valueOf(info.presentationTimeUs), Integer.valueOf(info.flags)}));
187 | mMuxer.writeSampleData(outTrackIndex, mReadBuf, info);//写入文件
188 | extractor.advance();
189 | }
190 | }
191 |
192 | //记录当前文件的最后一个pts,作为下一个文件的pts offset
193 | ptsOffset += videoPts > audioPts ? videoPts : audioPts;
194 | ptsOffset += 10000L;//前一个文件的最后一帧与后一个文件的第一帧,差10ms,只是估计值,不准确,但能用
195 |
196 | Log.i(TAG, "finish one file, ptsOffset " + ptsOffset);
197 |
198 | videoExtractor.release();
199 | audioExtractor.release();
200 | }
201 |
202 | if (mMuxer != null) {
203 | try {
204 | mMuxer.stop();
205 | mMuxer.release();
206 | } catch (Exception e) {
207 | Log.e(TAG, "Muxer close error. No data was written");
208 | }
209 |
210 | mMuxer = null;
211 | }
212 |
213 | Log.i(TAG, "video join finished");
214 | return true;
215 | }
216 |
217 | private int selectTrack(MediaExtractor extractor, String mimePrefix) {
218 | int numTracks = extractor.getTrackCount();
219 |
220 | for (int i = 0; i < numTracks; ++i) {
221 | MediaFormat format = extractor.getTrackFormat(i);
222 | String mime = format.getString("mime");
223 | if (mime.startsWith(mimePrefix)) {
224 | return i;
225 | }
226 | }
227 |
228 | return -1;
229 | }
230 | }
231 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/ui/AspectTextureView.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.ui;
2 |
3 | import android.content.Context;
4 | import android.util.AttributeSet;
5 | import android.view.TextureView;
6 | import android.view.View;
7 |
8 | public class AspectTextureView extends TextureView {
9 | public static final int MODE_FITXY = 0;
10 | public static final int MODE_INSIDE = 1;
11 | public static final int MODE_OUTSIDE = 2;
12 | private double targetAspect = -1;
13 | private int aspectMode = MODE_OUTSIDE;
14 |
15 | public AspectTextureView(Context context) {
16 | super(context);
17 | }
18 |
19 | public AspectTextureView(Context context, AttributeSet attrs) {
20 | super(context, attrs);
21 | }
22 |
23 | public AspectTextureView(Context context, AttributeSet attrs, int defStyleAttr) {
24 | super(context, attrs, defStyleAttr);
25 | }
26 |
27 | /**
28 | * @param mode {@link #MODE_FITXY},{@link #MODE_INSIDE},{@link #MODE_OUTSIDE}
29 | * @param aspectRatio width/height
30 | */
31 | public void setAspectRatio(int mode, double aspectRatio) {
32 | if (mode != MODE_INSIDE && mode != MODE_OUTSIDE && mode != MODE_FITXY) {
33 | throw new IllegalArgumentException("illegal mode");
34 | }
35 | if (aspectRatio < 0) {
36 | throw new IllegalArgumentException("illegal aspect ratio");
37 | }
38 | if (targetAspect != aspectRatio || aspectMode != mode) {
39 | targetAspect = aspectRatio;
40 | aspectMode = mode;
41 | requestLayout();
42 | }
43 | }
44 |
45 | @Override
46 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
47 | if (targetAspect > 0) {
48 | int initialWidth = MeasureSpec.getSize(widthMeasureSpec);
49 | int initialHeight = MeasureSpec.getSize(heightMeasureSpec);
50 |
51 | double viewAspectRatio = (double) initialWidth / initialHeight;
52 | double aspectDiff = targetAspect / viewAspectRatio - 1;
53 |
54 | if (Math.abs(aspectDiff) > 0.01 && aspectMode != MODE_FITXY) {
55 | if (aspectMode == MODE_INSIDE) {
56 | if (aspectDiff > 0) {
57 | initialHeight = (int) (initialWidth / targetAspect);
58 | } else {
59 | initialWidth = (int) (initialHeight * targetAspect);
60 | }
61 | } else if (aspectMode == MODE_OUTSIDE) {
62 | if (aspectDiff > 0) {
63 | initialWidth = (int) (initialHeight * targetAspect);
64 | } else {
65 | initialHeight = (int) (initialWidth / targetAspect);
66 | }
67 | }
68 | widthMeasureSpec = MeasureSpec.makeMeasureSpec(initialWidth, MeasureSpec.EXACTLY);
69 | heightMeasureSpec = MeasureSpec.makeMeasureSpec(initialHeight, MeasureSpec.EXACTLY);
70 | }
71 | }
72 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
73 | }
74 |
75 | @Override
76 | public void layout(int l, int t, int r, int b) {
77 | View p = (View) getParent();
78 | if (p != null) {
79 | int pw = p.getMeasuredWidth();
80 | int ph = p.getMeasuredHeight();
81 | int w = getMeasuredWidth();
82 | int h = getMeasuredHeight();
83 | t = (ph - h) / 2;
84 | l = (pw - w) / 2;
85 | r += l;
86 | b += t;
87 | }
88 | super.layout(l, t, r, b);
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/app/src/main/java/com/icechn/videorecorder/ui/RecordingActivity.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder.ui;
2 |
3 | import android.content.Intent;
4 | import android.content.res.Configuration;
5 | import android.graphics.Rect;
6 | import android.graphics.SurfaceTexture;
7 | import android.hardware.Camera;
8 | import android.os.Bundle;
9 | import android.os.Environment;
10 | import android.os.Handler;
11 | import android.support.v7.app.AppCompatActivity;
12 | import android.util.Log;
13 | import android.view.TextureView;
14 | import android.view.View;
15 | import android.widget.Button;
16 | import android.widget.Toast;
17 |
18 | import com.icechn.videorecorder.R;
19 | import com.icechn.videorecorder.client.RecorderClient;
20 | import com.icechn.videorecorder.core.listener.IVideoChange;
21 | import com.icechn.videorecorder.filter.image.DrawMultiImageFilter;
22 | import com.icechn.videorecorder.filter.image.DrawMultiImageFilter.ImageDrawData;
23 | import com.icechn.videorecorder.filter.softaudiofilter.SetVolumeAudioFilter;
24 | import com.icechn.videorecorder.model.MediaConfig;
25 | import com.icechn.videorecorder.model.RecordConfig;
26 | import com.icechn.videorecorder.model.Size;
27 |
28 | import java.util.ArrayList;
29 |
30 |
31 | public class RecordingActivity extends AppCompatActivity implements
32 | TextureView.SurfaceTextureListener, View.OnClickListener, IVideoChange {
33 | public static final String IS_SQUARE = "is_square";
34 | protected RecorderClient mRecorderClient;
35 | protected AspectTextureView mTextureView;
36 | protected Handler mainHander;
37 | protected Button btn_toggle;
38 | protected boolean started;
39 | protected String mSaveVideoPath = null;
40 | protected boolean mIsSquare = false;
41 | RecordConfig recordConfig;
42 |
43 | @Override
44 | protected void onCreate(Bundle savedInstanceState) {
45 | Intent i = getIntent();
46 | mIsSquare = i.getBooleanExtra(IS_SQUARE, false);
47 | mSaveVideoPath = Environment.getExternalStorageDirectory().getPath() + "/live_save_video" + System.currentTimeMillis() + ".mp4";
48 | started = false;
49 | super.onCreate(savedInstanceState);
50 | setContentView(R.layout.activity_streaming);
51 | mTextureView = (AspectTextureView) findViewById(R.id.preview_textureview);
52 | mTextureView.setKeepScreenOn(true);
53 | mTextureView.setSurfaceTextureListener(this);
54 |
55 | btn_toggle = (Button) findViewById(R.id.btn_toggle);
56 | btn_toggle.setOnClickListener(this);
57 |
58 | findViewById(R.id.btn_swap).setOnClickListener(this);
59 | findViewById(R.id.btn_flash).setOnClickListener(this);
60 |
61 | prepareStreamingClient();
62 | onSetFilters();
63 | }
64 |
65 | @Override
66 | protected void onResume() {
67 | super.onResume();
68 | }
69 |
70 | @Override
71 | protected void onPause() {
72 | super.onPause();
73 | }
74 |
75 | @Override
76 | protected void onDestroy() {
77 | if (mainHander != null) {
78 | mainHander.removeCallbacksAndMessages(null);
79 | }
80 | if (started) {
81 | mRecorderClient.stopRecording();
82 | }
83 | if (mRecorderClient != null) {
84 | mRecorderClient.destroy();
85 | }
86 | super.onDestroy();
87 | }
88 |
89 | private void prepareStreamingClient() {
90 | mRecorderClient = new RecorderClient();
91 |
92 | recordConfig = RecordConfig.obtain();
93 | if (mIsSquare) {
94 | recordConfig.setTargetVideoSize(new Size(480, 480));
95 | } else {
96 | recordConfig.setTargetVideoSize(new Size(640, 480));
97 | }
98 | recordConfig.setSquare(true);
99 | recordConfig.setBitRate(750 * 1024);
100 | recordConfig.setVideoFPS(20);
101 | recordConfig.setVideoGOP(1);
102 | recordConfig.setRenderingMode(MediaConfig.Rending_Model_OpenGLES);
103 | //camera
104 | recordConfig.setDefaultCamera(Camera.CameraInfo.CAMERA_FACING_FRONT);
105 | int frontDirection, backDirection;
106 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
107 | Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_FRONT, cameraInfo);
108 | frontDirection = cameraInfo.orientation;
109 | Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_BACK, cameraInfo);
110 | backDirection = cameraInfo.orientation;
111 | if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
112 | recordConfig.setFrontCameraDirectionMode((frontDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90) | MediaConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL);
113 | recordConfig.setBackCameraDirectionMode((backDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270));
114 | } else {
115 | recordConfig.setBackCameraDirectionMode((backDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180));
116 | recordConfig.setFrontCameraDirectionMode((frontDirection == 90 ? MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180 : MediaConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0) | MediaConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL);
117 | }
118 | //save video
119 | recordConfig.setSaveVideoPath(mSaveVideoPath);
120 |
121 | if (!mRecorderClient.prepare(this, recordConfig)) {
122 | mRecorderClient = null;
123 | Log.e("RecordingActivity", "prepare,failed!!");
124 | Toast.makeText(this, "StreamingClient prepare failed", Toast.LENGTH_LONG).show();
125 | finish();
126 | return;
127 | }
128 |
129 | //resize textureview
130 | Size s = mRecorderClient.getVideoSize();
131 | mTextureView.setAspectRatio(AspectTextureView.MODE_INSIDE, ((double) s.getWidth()) / s.getHeight());
132 |
133 | mRecorderClient.setVideoChangeListener(this);
134 |
135 | mRecorderClient.setSoftAudioFilter(new SetVolumeAudioFilter());
136 | }
137 |
138 | protected void onSetFilters() {
139 | ArrayList infos = new ArrayList<>();
140 | ImageDrawData data = new ImageDrawData();
141 | data.resId = R.drawable.t;
142 | data.rect = new Rect(100, 100, 238, 151);
143 | infos.add(data);
144 | mRecorderClient.setHardVideoFilter(new DrawMultiImageFilter(this, infos));
145 | }
146 |
147 | @Override
148 | public void onVideoSizeChanged(int width, int height) {
149 | mTextureView.setAspectRatio(AspectTextureView.MODE_INSIDE, ((double) width) / height);
150 | }
151 |
152 | @Override
153 | public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
154 | if (mRecorderClient != null) {
155 | mRecorderClient.startPreview(surface, width, height);
156 | }
157 | }
158 |
159 | @Override
160 | public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
161 | if (mRecorderClient != null) {
162 | mRecorderClient.updatePreview(width, height);
163 | }
164 | }
165 |
166 | @Override
167 | public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
168 | if (mRecorderClient != null) {
169 | mRecorderClient.stopPreview(true);
170 | }
171 | return false;
172 | }
173 |
174 | @Override
175 | public void onSurfaceTextureUpdated(SurfaceTexture surface) {
176 |
177 | }
178 |
179 | @Override
180 | public void onClick(View v) {
181 | switch (v.getId()) {
182 | case R.id.btn_toggle:
183 | if (!started) {
184 | btn_toggle.setText("stop");
185 | mRecorderClient.startRecording();
186 | } else {
187 | btn_toggle.setText("start");
188 | mRecorderClient.stopRecording();
189 | Toast.makeText(RecordingActivity.this, "视频文件已保存至"+ mSaveVideoPath, Toast.LENGTH_SHORT).show();
190 | }
191 | started = !started;
192 | break;
193 | case R.id.btn_swap:
194 | mRecorderClient.swapCamera();
195 | findViewById(R.id.btn_flash).setVisibility(mRecorderClient.isFrontCamera() ? View.GONE : View.VISIBLE);
196 | break;
197 | case R.id.btn_flash:
198 | mRecorderClient.toggleFlashLight();
199 | break;
200 | }
201 | }
202 |
203 |
204 | }
205 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/nose_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/drawable-xhdpi/nose_0.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/t.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/drawable-xhdpi/t.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/teeth_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/drawable-xhdpi/teeth_0.png
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
13 |
18 |
26 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_streaming.xml:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
19 |
20 |
27 |
28 |
35 |
43 |
44 |
45 |
49 |
53 |
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_video_record.xml:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
16 |
20 |
21 |
22 |
29 |
30 |
37 |
38 |
45 |
46 |
54 |
55 |
56 |
64 |
65 |
68 |
73 |
78 |
79 |
80 |
88 |
89 |
90 |
97 |
106 |
107 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 8dp
4 | 8dp
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | VideoRecorder
3 |
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/app/src/test/java/com/icechn/videorecorder/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package com.icechn.videorecorder;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() throws Exception {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | jcenter()
6 | }
7 | dependencies {
8 | classpath 'com.android.tools.build:gradle:2.3.3'
9 |
10 | // NOTE: Do not place your application dependencies here; they belong
11 | // in the individual module build.gradle files
12 | }
13 | }
14 |
15 | allprojects {
16 | repositories {
17 | jcenter()
18 | }
19 | }
20 |
21 | task clean(type: Delete) {
22 | delete rootProject.buildDir
23 | }
24 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | org.gradle.jvmargs=-Xmx1536m
13 |
14 | # When configured, Gradle will run in incubating parallel mode.
15 | # This option should only be used with decoupled projects. More details, visit
16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
17 | # org.gradle.parallel=true
18 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ICECHN/VideoRecorderWithOpenGL/680fe55aaf2fdc7c55b4e48bc85dd1a9845d3e89/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Tue Nov 07 10:52:28 CST 2017
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------