├── .gitignore
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── com
│ │ └── jscheng
│ │ └── scamera
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── java
│ │ └── com
│ │ │ └── jscheng
│ │ │ └── scamera
│ │ │ ├── BaseActivity.java
│ │ │ ├── record
│ │ │ └── VideoEncoder.java
│ │ │ ├── render
│ │ │ ├── BaseRenderDrawer.java
│ │ │ ├── CameraSurfaceRender.java
│ │ │ ├── DisplayRenderDrawer.java
│ │ │ ├── OriginalImageRenderDrawer.java
│ │ │ ├── OriginalRenderDrawer.java
│ │ │ ├── RecordRenderDrawer.java
│ │ │ ├── RenderDrawerGroups.java
│ │ │ └── WaterMarkRenderDrawer.java
│ │ │ ├── util
│ │ │ ├── CameraUtil.java
│ │ │ ├── DimemsionUtil.java
│ │ │ ├── EGLHelper.java
│ │ │ ├── GlesUtil.java
│ │ │ ├── LogUtil.java
│ │ │ ├── PermisstionUtil.java
│ │ │ └── StorageUtil.java
│ │ │ ├── view
│ │ │ ├── CameraFragment.java
│ │ │ ├── CameraSensor.java
│ │ │ └── MainActivity.java
│ │ │ └── widget
│ │ │ ├── CameraFocusView.java
│ │ │ ├── CameraGLSurfaceView.java
│ │ │ ├── CameraProgressButton.java
│ │ │ └── CameraSwitchView.java
│ └── res
│ │ ├── drawable-v24
│ │ └── ic_launcher_foreground.xml
│ │ ├── drawable
│ │ └── ic_launcher_background.xml
│ │ ├── layout
│ │ ├── activity_main.xml
│ │ └── fragment_camera.xml
│ │ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ ├── camera_close.png
│ │ ├── camera_flash_close.png
│ │ ├── camera_flash_open.png
│ │ ├── camera_ok.png
│ │ ├── camera_switch.png
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_round.png
│ │ └── watermark.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ └── values
│ │ ├── attrs.xml
│ │ ├── colors.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── com
│ └── jscheng
│ └── scamera
│ └── ExampleUnitTest.java
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/libraries
5 | /.idea/modules.xml
6 | /.idea/workspace.xml
7 | .DS_Store
8 | /build
9 | /captures
10 | .externalNativeBuild
11 | /.idea
12 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FboCamera 集合OpenGL 和 camera 应用
2 |
3 | 1. 利用 GLSurfaceView 预览
4 | 2. 离屏渲染 FBO
5 | 3. 增加水印特效
6 | 4. 录制视频,存放在Scamera目录下
7 |
8 | [《OpenGLES mediaCodec 预览录制视频 添加水印》](https://blog.csdn.net/qq_15893929/article/details/82864976)
9 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 28
5 | defaultConfig {
6 | applicationId "com.jscheng.scamera"
7 | minSdkVersion 21
8 | targetSdkVersion 28
9 | versionCode 1
10 | versionName "1.0"
11 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
12 | }
13 | buildTypes {
14 | release {
15 | minifyEnabled false
16 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
17 | }
18 | }
19 | }
20 |
21 | dependencies {
22 | implementation fileTree(dir: 'libs', include: ['*.jar'])
23 | implementation 'com.android.support:appcompat-v7:28.0.0-rc01'
24 | implementation 'com.android.support.constraint:constraint-layout:1.1.2'
25 | testImplementation 'junit:junit:4.12'
26 | androidTestImplementation 'com.android.support.test:runner:1.0.2'
27 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
28 | }
29 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/jscheng/scamera/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumented test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("com.jscheng.scamera", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
17 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/BaseActivity.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera;
2 |
3 | import android.content.DialogInterface;
4 | import android.content.Intent;
5 | import android.content.pm.PackageManager;
6 | import android.net.Uri;
7 | import android.support.annotation.NonNull;
8 | import android.support.v7.app.AlertDialog;
9 | import android.support.v7.app.AppCompatActivity;
10 | import android.widget.Toast;
11 |
12 | /**
13 | * Created By Chengjunsen on 2018/8/22
14 | */
15 | public class BaseActivity extends AppCompatActivity {
16 |
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/record/VideoEncoder.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.record;
2 |
3 | import android.media.MediaCodec;
4 | import android.media.MediaCodecInfo;
5 | import android.media.MediaFormat;
6 | import android.media.MediaMuxer;
7 | import android.util.Log;
8 | import android.view.Surface;
9 |
10 | import java.io.File;
11 | import java.io.IOException;
12 | import java.nio.ByteBuffer;
13 |
14 | import static com.jscheng.scamera.util.LogUtil.TAG;
15 |
16 | /**
17 | * Created By Chengjunsen on 2018/9/20
18 | */
19 | public class VideoEncoder {
20 | private static final int FRAME_RATE = 30;
21 | private static final int IFRAME_INTERVAL = 10;
22 |
23 | private Surface mInputSurface;
24 | private MediaMuxer mMuxer;
25 | private MediaCodec mEncoder;
26 | private MediaCodec.BufferInfo mBufferInfo;
27 | private int mTrackIndex;
28 | private boolean mMuxerStarted;
29 |
30 | public VideoEncoder(int width, int height, File outputFile)
31 | throws IOException {
32 | int bitRate = height * width * 3 * 8 * FRAME_RATE / 256;
33 | mBufferInfo = new MediaCodec.BufferInfo();
34 | MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
35 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
36 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
37 | format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
38 | format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
39 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
40 | Log.d(TAG, "format: " + format);
41 |
42 | mEncoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
43 | mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
44 | mInputSurface = mEncoder.createInputSurface();
45 | mEncoder.start();
46 | mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
47 |
48 | mTrackIndex = -1;
49 | mMuxerStarted = false;
50 | }
51 |
52 | public Surface getInputSurface() {
53 | return mInputSurface;
54 | }
55 |
56 | public void release() {
57 | Log.d(TAG, "releasing encoder objects");
58 | if (mEncoder != null) {
59 | mEncoder.stop();
60 | mEncoder.release();
61 | mEncoder = null;
62 | }
63 | if (mMuxer != null) {
64 | mMuxer.stop();
65 | mMuxer.release();
66 | mMuxer = null;
67 | }
68 | }
69 |
70 | public void drainEncoder(boolean endOfStream) {
71 | final int TIMEOUT_USEC = 10000;
72 | if (endOfStream) {
73 | Log.d(TAG, "sending EOS to encoder");
74 | mEncoder.signalEndOfInputStream();
75 | }
76 |
77 | while (true) {
78 | int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
79 | if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
80 | Log.d(TAG, "MediaCodec.INFO_TRY_AGAIN_LATER");
81 | // no output available yet
82 | if (!endOfStream) {
83 | break;
84 | } else {
85 | Log.d(TAG, "no output available, spinning to await EOS");
86 | }
87 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
88 | Log.d(TAG, "MediaCodec.INFO_OUTPUT_FORMAT_CHANGED");
89 | if (mMuxerStarted) {
90 | throw new RuntimeException("format changed twice");
91 | }
92 | MediaFormat newFormat = mEncoder.getOutputFormat();
93 | Log.d(TAG, "encoder output format changed: " + newFormat);
94 |
95 | mTrackIndex = mMuxer.addTrack(newFormat);
96 | mMuxer.start();
97 | mMuxerStarted = true;
98 | } else if (encoderStatus < 0) {
99 | Log.d(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
100 | } else {
101 | ByteBuffer encodedData = mEncoder.getOutputBuffer(encoderStatus);
102 | if (encodedData == null) {
103 | throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
104 | }
105 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
106 | Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
107 | mBufferInfo.size = 0;
108 | }
109 |
110 | if (mBufferInfo.size != 0) {
111 | if (!mMuxerStarted) {
112 | throw new RuntimeException("muxer hasn't started");
113 | }
114 | encodedData.position(mBufferInfo.offset);
115 | encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
116 | mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
117 | Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" + mBufferInfo.presentationTimeUs);
118 | } else {
119 | Log.d(TAG, "drainEncoder mBufferInfo: " + mBufferInfo.size);
120 | }
121 | mEncoder.releaseOutputBuffer(encoderStatus, false);
122 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
123 | if (!endOfStream) {
124 | Log.w(TAG, "reached end of stream unexpectedly");
125 | } else {
126 | Log.d(TAG, "end of stream reached");
127 | }
128 | break;
129 | }
130 | }
131 | }
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/render/BaseRenderDrawer.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.render;
2 |
3 | import android.opengl.GLES30;
4 |
5 | import com.jscheng.scamera.util.GlesUtil;
6 |
7 | import java.nio.ByteBuffer;
8 | import java.nio.ByteOrder;
9 | import java.nio.FloatBuffer;
10 |
11 | /**
12 | * Created By Chengjunsen on 2018/8/27
13 | */
14 | public abstract class BaseRenderDrawer {
15 | protected int width;
16 |
17 | protected int height;
18 |
19 | protected int mProgram;
20 |
21 | //顶点坐标 Buffer
22 | private FloatBuffer mVertexBuffer;
23 | protected int mVertexBufferId;
24 |
25 | //纹理坐标 Buffer
26 | private FloatBuffer mFrontTextureBuffer;
27 | protected int mFrontTextureBufferId;
28 |
29 | //纹理坐标 Buffer
30 | private FloatBuffer mBackTextureBuffer;
31 | protected int mBackTextureBufferId;
32 |
33 | private FloatBuffer mDisplayTextureBuffer;
34 | protected int mDisplayTextureBufferId;
35 |
36 | private FloatBuffer mFrameTextureBuffer;
37 | protected int mFrameTextureBufferId;
38 |
39 | protected float vertexData[] = {
40 | -1f, -1f,// 左下角
41 | 1f, -1f, // 右下角
42 | -1f, 1f, // 左上角
43 | 1f, 1f, // 右上角
44 | };
45 |
46 | protected float frontTextureData[] = {
47 | 1f, 1f, // 右上角
48 | 1f, 0f, // 右下角
49 | 0f, 1f, // 左上角
50 | 0f, 0f // 左下角
51 | };
52 |
53 | protected float backTextureData[] = {
54 | 0f, 1f, // 左上角
55 | 0f, 0f, // 左下角
56 | 1f, 1f, // 右上角
57 | 1f, 0f // 右上角
58 | };
59 |
60 | protected float displayTextureData[] = {
61 | 0f, 1f,
62 | 1f, 1f,
63 | 0f, 0f,
64 | 1f, 0f,
65 | };
66 |
67 | protected float frameBufferData[] = {
68 | 0f, 0f,
69 | 1f, 0f,
70 | 0f, 1f,
71 | 1f, 1f
72 | };
73 |
74 | protected final int CoordsPerVertexCount = 2;
75 |
76 | protected final int VertexCount = vertexData.length / CoordsPerVertexCount;
77 |
78 | protected final int VertexStride = CoordsPerVertexCount * 4;
79 |
80 | protected final int CoordsPerTextureCount = 2;
81 |
82 | protected final int TextureStride = CoordsPerTextureCount * 4;
83 |
84 | public BaseRenderDrawer() {
85 |
86 | }
87 |
88 | public void create() {
89 | mProgram = GlesUtil.createProgram(getVertexSource(), getFragmentSource());
90 | initVertexBufferObjects();
91 | onCreated();
92 | }
93 |
94 | public void surfaceChangedSize(int width, int height) {
95 | this.width = width;
96 | this.height = height;
97 | onChanged(width, height);
98 | }
99 |
100 | public void draw(long timestamp, float[] transformMatrix){
101 | clear();
102 | useProgram();
103 | viewPort(0, 0, width, height);
104 | onDraw();
105 | }
106 |
107 | protected void clear(){
108 | GLES30.glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
109 | GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT | GLES30.GL_DEPTH_BUFFER_BIT);
110 | }
111 |
112 | protected void initVertexBufferObjects() {
113 | int[] vbo = new int[5];
114 | GLES30.glGenBuffers(5, vbo, 0);
115 |
116 | mVertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
117 | .order(ByteOrder.nativeOrder())
118 | .asFloatBuffer()
119 | .put(vertexData);
120 | mVertexBuffer.position(0);
121 | mVertexBufferId = vbo[0];
122 | // ARRAY_BUFFER 将使用 Float*Array 而 ELEMENT_ARRAY_BUFFER 必须使用 Uint*Array
123 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mVertexBufferId);
124 | GLES30.glBufferData(GLES30.GL_ARRAY_BUFFER, vertexData.length * 4, mVertexBuffer, GLES30.GL_STATIC_DRAW);
125 |
126 |
127 | mBackTextureBuffer = ByteBuffer.allocateDirect(backTextureData.length * 4)
128 | .order(ByteOrder.nativeOrder())
129 | .asFloatBuffer()
130 | .put(backTextureData);
131 | mBackTextureBuffer.position(0);
132 | mBackTextureBufferId = vbo[1];
133 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mBackTextureBufferId);
134 | GLES30.glBufferData(GLES30.GL_ARRAY_BUFFER, backTextureData.length * 4, mBackTextureBuffer, GLES30.GL_STATIC_DRAW);
135 |
136 | mFrontTextureBuffer = ByteBuffer.allocateDirect(frontTextureData.length * 4)
137 | .order(ByteOrder.nativeOrder())
138 | .asFloatBuffer()
139 | .put(frontTextureData);
140 | mFrontTextureBuffer.position(0);
141 | mFrontTextureBufferId = vbo[2];
142 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mFrontTextureBufferId);
143 | GLES30.glBufferData(GLES30.GL_ARRAY_BUFFER, frontTextureData.length * 4, mFrontTextureBuffer, GLES30.GL_STATIC_DRAW);
144 |
145 | mDisplayTextureBuffer = ByteBuffer.allocateDirect(displayTextureData.length * 4)
146 | .order(ByteOrder.nativeOrder())
147 | .asFloatBuffer()
148 | .put(displayTextureData);
149 | mDisplayTextureBuffer.position(0);
150 | mDisplayTextureBufferId = vbo[3];
151 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mDisplayTextureBufferId);
152 | GLES30.glBufferData(GLES30.GL_ARRAY_BUFFER, displayTextureData.length * 4, mDisplayTextureBuffer, GLES30.GL_STATIC_DRAW);
153 |
154 | mFrameTextureBuffer = ByteBuffer.allocateDirect(frameBufferData.length * 4)
155 | .order(ByteOrder.nativeOrder())
156 | .asFloatBuffer()
157 | .put(frameBufferData);
158 | mFrameTextureBuffer.position(0);
159 | mFrameTextureBufferId = vbo[4];
160 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mFrameTextureBufferId);
161 | GLES30.glBufferData(GLES30.GL_ARRAY_BUFFER, frameBufferData.length * 4, mFrameTextureBuffer, GLES30.GL_STATIC_DRAW);
162 |
163 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER,0);
164 | }
165 |
166 | protected void useProgram(){
167 | GLES30.glUseProgram(mProgram);
168 | }
169 |
170 | protected void viewPort(int x, int y, int width, int height) {
171 | GLES30.glViewport(x, y, width, height);
172 | }
173 |
174 | public abstract void setInputTextureId(int textureId);
175 |
176 | public abstract int getOutputTextureId();
177 |
178 | protected abstract String getVertexSource();
179 |
180 | protected abstract String getFragmentSource();
181 |
182 | protected abstract void onCreated();
183 |
184 | protected abstract void onChanged(int width, int height);
185 |
186 | protected abstract void onDraw();
187 |
188 | }
189 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/render/CameraSurfaceRender.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.render;
2 |
3 | import android.content.Context;
4 | import android.graphics.SurfaceTexture;
5 | import android.opengl.EGL14;
6 | import android.opengl.GLSurfaceView;
7 | import android.util.Log;
8 |
9 | import com.jscheng.scamera.util.GlesUtil;
10 |
11 | import javax.microedition.khronos.egl.EGLConfig;
12 | import javax.microedition.khronos.opengles.GL10;
13 |
14 | import static com.jscheng.scamera.util.LogUtil.TAG;
15 |
16 | /**
17 | * Created By Chengjunsen on 2018/8/27
18 | */
19 | public class CameraSurfaceRender implements GLSurfaceView.Renderer {
20 |
21 | private CameraSufaceRenderCallback mCallback;
22 | private RenderDrawerGroups mRenderGroups;
23 | private int width, height;
24 | private int mCameraTextureId;
25 | private SurfaceTexture mCameraTexture;
26 | private float[] mTransformMatrix;
27 | private long timestamp;
28 |
29 | public CameraSurfaceRender(Context context) {
30 | this.mRenderGroups = new RenderDrawerGroups(context);
31 | mTransformMatrix = new float[16];
32 | }
33 |
34 | @Override
35 | public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
36 | mCameraTextureId = GlesUtil.createCameraTexture();
37 | mRenderGroups.setInputTexture(mCameraTextureId);
38 | mRenderGroups.create();
39 | initCameraTexture();
40 | if (mCallback != null) {
41 | mCallback.onCreate();
42 | }
43 | }
44 |
45 | public void initCameraTexture() {
46 | mCameraTexture = new SurfaceTexture(mCameraTextureId);
47 | mCameraTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
48 | @Override
49 | public void onFrameAvailable(SurfaceTexture surfaceTexture) {
50 | if (mCallback != null) {
51 | mCallback.onRequestRender();
52 | }
53 | }
54 | });
55 | }
56 |
57 | @Override
58 | public void onSurfaceChanged(GL10 gl10, int width, int height) {
59 | this.width = width;
60 | this.height = height;
61 | mRenderGroups.surfaceChangedSize(width, height);
62 | Log.d(TAG, "currentEGLContext: " + EGL14.eglGetCurrentContext().toString());
63 | if (mCallback != null) {
64 | mCallback.onChanged(width, height);
65 | }
66 | }
67 |
68 | @Override
69 | public void onDrawFrame(GL10 gl10) {
70 | if (mCameraTexture != null) {
71 | mCameraTexture.updateTexImage();
72 | timestamp = mCameraTexture.getTimestamp();
73 | mCameraTexture.getTransformMatrix(mTransformMatrix);
74 | mRenderGroups.draw(timestamp, mTransformMatrix);
75 | }
76 | if (mCallback != null) {
77 | mCallback.onDraw();
78 | }
79 | }
80 |
81 | public SurfaceTexture getCameraSurfaceTexture() {
82 | return mCameraTexture;
83 | }
84 |
85 | public void setCallback(CameraSufaceRenderCallback mCallback) {
86 | this.mCallback = mCallback;
87 | }
88 |
89 | public void releaseSurfaceTexture() {
90 | if (mCameraTexture != null) {
91 | mCameraTexture.release();
92 | mCameraTexture = null;
93 | }
94 | }
95 |
96 | public void resumeSurfaceTexture() {
97 | initCameraTexture();
98 | }
99 |
100 | public void startRecord() {
101 | mRenderGroups.startRecord();
102 | }
103 |
104 | public void stopRecord() {
105 | mRenderGroups.stopRecord();
106 | }
107 |
108 | public interface CameraSufaceRenderCallback {
109 | void onRequestRender();
110 | void onCreate();
111 | void onChanged(int width, int height);
112 | void onDraw();
113 | }
114 | }
115 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/render/DisplayRenderDrawer.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.render;
2 |
3 | import android.opengl.GLES30;
4 |
5 | import com.jscheng.scamera.render.BaseRenderDrawer;
6 |
7 | /**
8 | * Created By Chengjunsen on 2018/8/27
9 | */
10 | public class DisplayRenderDrawer extends BaseRenderDrawer {
11 | private int av_Position;
12 | private int af_Position;
13 | private int s_Texture;
14 | private int mTextureId;
15 |
16 | @Override
17 | protected void onCreated() {
18 | }
19 |
20 | @Override
21 | protected void onChanged(int width, int height) {
22 | av_Position = GLES30.glGetAttribLocation(mProgram, "av_Position");
23 | af_Position = GLES30.glGetAttribLocation(mProgram, "af_Position");
24 | s_Texture = GLES30.glGetUniformLocation(mProgram, "s_Texture");
25 | }
26 |
27 | @Override
28 | protected void onDraw() {
29 | GLES30.glEnableVertexAttribArray(av_Position);
30 | GLES30.glEnableVertexAttribArray(af_Position);
31 |
32 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mVertexBufferId);
33 | GLES30.glVertexAttribPointer(av_Position, CoordsPerVertexCount, GLES30.GL_FLOAT, false, 0, 0);
34 | // GLES30.glVertexAttribPointer(av_Position, CoordsPerVertexCount, GLES30.GL_FLOAT, false, VertexStride, mVertexBuffer);
35 | // 用GPU中的缓冲数据,不再RAM中取数据,所以后2个参数为0
36 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mDisplayTextureBufferId);
37 | GLES30.glVertexAttribPointer(af_Position, CoordsPerTextureCount, GLES30.GL_FLOAT, false, 0, 0);
38 | // GLES30.glVertexAttribPointer(af_Position, CoordsPerTextureCount, GLES30.GL_FLOAT, false, TextureStride, mDisplayTextureBuffer);
39 |
40 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, 0);
41 | GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
42 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, mTextureId);
43 | GLES30.glUniform1i(s_Texture, 0);
44 | // 绘制 GLES30.GL_TRIANGLE_STRIP:复用坐标
45 | GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, VertexCount);
46 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
47 | GLES30.glDisableVertexAttribArray(af_Position);
48 | GLES30.glDisableVertexAttribArray(av_Position);
49 | }
50 |
51 | private void bind2DTexture(int textureId, int textureType) {
52 | GLES30.glActiveTexture(GLES30.GL_TEXTURE0 + textureType);
53 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textureId);
54 | GLES30.glUniform1i(s_Texture, textureType);
55 | }
56 |
57 | private void unBind2DTexure() {
58 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
59 | }
60 |
61 | @Override
62 | public void setInputTextureId(int textureId) {
63 | this.mTextureId = textureId;
64 | }
65 |
66 | @Override
67 | public int getOutputTextureId() {
68 | return mTextureId;
69 | }
70 |
71 | @Override
72 | protected String getVertexSource() {
73 | final String source = "attribute vec4 av_Position; " +
74 | "attribute vec2 af_Position; " +
75 | "varying vec2 v_texPo; " +
76 | "void main() { " +
77 | " v_texPo = af_Position; " +
78 | " gl_Position = av_Position; " +
79 | "}";
80 | return source;
81 | }
82 |
83 | @Override
84 | protected String getFragmentSource() {
85 | final String source = "precision mediump float;\n" +
86 | "varying vec2 v_texPo;\n" +
87 | "uniform sampler2D s_Texture;\n" +
88 | "void main() {\n" +
89 | " vec4 tc = texture2D(s_Texture, v_texPo);\n" +
90 | " float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
91 | " gl_FragColor = texture2D(s_Texture, v_texPo);\n" +
92 | //" gl_FragColor = vec4(color, color, color, 1);\n" +
93 | "}";
94 | return source;
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/render/OriginalImageRenderDrawer.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.render;
2 |
3 | import android.content.Context;
4 | import android.opengl.GLES30;
5 |
6 | import com.jscheng.scamera.R;
7 | import com.jscheng.scamera.util.GlesUtil;
8 | import com.jscheng.scamera.render.BaseRenderDrawer;
9 |
10 | public class OriginalImageRenderDrawer extends BaseRenderDrawer {
11 | private int mInputTextureId;
12 | private int mOutputTextureId;
13 | private int avPosition;
14 | private int afPosition;
15 | private int sTexture;
16 | private Context mContext;
17 | private int mFrameBuffer;
18 |
19 | public OriginalImageRenderDrawer(Context context) {
20 | this.mContext = context;
21 | }
22 |
23 | @Override
24 | public void setInputTextureId(int textureId) {
25 | mInputTextureId = textureId;
26 | }
27 |
28 | @Override
29 | public int getOutputTextureId() {
30 | return mOutputTextureId;
31 | }
32 |
33 | @Override
34 | protected void onCreated() {
35 | }
36 |
37 | @Override
38 | protected void onChanged(int width, int height) {
39 | mOutputTextureId = GlesUtil.createFrameTexture(width, height);
40 | mFrameBuffer = GlesUtil.createFrameBuffer();
41 | GlesUtil.bindFrameTexture(mFrameBuffer, mOutputTextureId);
42 | mInputTextureId = GlesUtil.loadBitmapTexture(mContext, R.mipmap.ic_launcher);
43 | avPosition = GLES30.glGetAttribLocation(mProgram, "av_Position");
44 | afPosition = GLES30.glGetAttribLocation(mProgram, "af_Position");
45 | sTexture = GLES30.glGetUniformLocation(mProgram, "sTexture");
46 | }
47 |
48 | @Override
49 | protected void onDraw() {
50 | bindFrameBuffer();
51 |
52 | GLES30.glEnableVertexAttribArray(avPosition);
53 | GLES30.glEnableVertexAttribArray(afPosition);
54 | //设置顶点位置值
55 | //GLES30.glVertexAttribPointer(avPosition, CoordsPerVertexCount, GLES30.GL_FLOAT, false, VertexStride, mVertexBuffer);
56 | //设置纹理位置值
57 | //GLES30.glVertexAttribPointer(afPosition, CoordsPerTextureCount, GLES30.GL_FLOAT, false, TextureStride, mDisplayTextureBuffer);
58 |
59 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mVertexBufferId);
60 | GLES30.glVertexAttribPointer(avPosition, CoordsPerVertexCount, GLES30.GL_FLOAT, false, 0, 0);
61 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mDisplayTextureBufferId);
62 | GLES30.glVertexAttribPointer(afPosition, CoordsPerTextureCount, GLES30.GL_FLOAT, false, 0, 0);
63 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, 0);
64 | GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
65 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, mInputTextureId);
66 | GLES30.glUniform1i(sTexture, 0);
67 | //绘制 GLES30.GL_TRIANGLE_STRIP:复用坐标
68 | GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, VertexCount);
69 | GLES30.glDisableVertexAttribArray(avPosition);
70 | GLES30.glDisableVertexAttribArray(afPosition);
71 |
72 | unBindFrameBuffer();
73 | }
74 |
75 | public void bindFrameBuffer() {
76 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, mFrameBuffer);
77 | }
78 |
79 | public void unBindFrameBuffer() {
80 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
81 | }
82 |
83 | @Override
84 | protected String getVertexSource() {
85 | final String source =
86 | "attribute vec4 av_Position; " +
87 | "attribute vec2 af_Position; " +
88 | "varying vec2 v_texPo; " +
89 | "void main() { " +
90 | " v_texPo = af_Position; " +
91 | " gl_Position = av_Position; " +
92 | "}";
93 | return source;
94 | }
95 |
96 | @Override
97 | protected String getFragmentSource() {
98 | final String source =
99 | "precision mediump float; " +
100 | "varying vec2 v_texPo; " +
101 | "uniform sampler2D sTexture; " +
102 | "void main() { " +
103 | " gl_FragColor = texture2D(sTexture, v_texPo); " +
104 | "} ";
105 | return source;
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/render/OriginalRenderDrawer.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.render;
2 |
3 | import android.graphics.SurfaceTexture;
4 | import android.opengl.GLES11Ext;
5 | import android.opengl.GLES30;
6 |
7 | import com.jscheng.scamera.render.BaseRenderDrawer;
8 | import com.jscheng.scamera.util.CameraUtil;
9 | import com.jscheng.scamera.util.GlesUtil;
10 |
11 | /**
12 | * Created By Chengjunsen on 2018/8/27
13 | */
14 | public class OriginalRenderDrawer extends BaseRenderDrawer {
15 | private int av_Position;
16 | private int af_Position;
17 | private int s_Texture;
18 | private int mInputTextureId;
19 | private int mOutputTextureId;
20 |
21 | @Override
22 | protected void onCreated() {
23 | }
24 |
25 | @Override
26 | protected void onChanged(int width, int height) {
27 | mOutputTextureId = GlesUtil.createFrameTexture(width, height);
28 |
29 | av_Position = GLES30.glGetAttribLocation(mProgram, "av_Position");
30 | af_Position = GLES30.glGetAttribLocation(mProgram, "af_Position");
31 | s_Texture = GLES30.glGetUniformLocation(mProgram, "s_Texture");
32 | }
33 |
34 | @Override
35 | protected void onDraw() {
36 | if (mInputTextureId == 0 || mOutputTextureId == 0) {
37 | return;
38 | }
39 | GLES30.glEnableVertexAttribArray(av_Position);
40 | GLES30.glEnableVertexAttribArray(af_Position);
41 | //GLES30.glVertexAttribPointer(av_Position, CoordsPerVertexCount, GLES30.GL_FLOAT, false, VertexStride, mVertexBuffer);
42 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mVertexBufferId);
43 | GLES30.glVertexAttribPointer(av_Position, CoordsPerVertexCount, GLES30.GL_FLOAT, false, 0, 0);
44 | if (CameraUtil.isBackCamera()) {
45 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mBackTextureBufferId);
46 | } else {
47 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mFrontTextureBufferId);
48 | }
49 | GLES30.glVertexAttribPointer(af_Position, CoordsPerTextureCount, GLES30.GL_FLOAT, false, 0, 0);
50 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, 0);
51 | bindTexture(mInputTextureId);
52 | GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, VertexCount);
53 | unBindTexure();
54 | GLES30.glDisableVertexAttribArray(av_Position);
55 | GLES30.glDisableVertexAttribArray(af_Position);
56 | }
57 |
58 | private void bindTexture(int textureId) {
59 | GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
60 | GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
61 | GLES30.glUniform1i(s_Texture, 0);
62 | }
63 |
64 | private void unBindTexure() {
65 | GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
66 | }
67 |
68 | @Override
69 | public void setInputTextureId(int textureId) {
70 | mInputTextureId = textureId;
71 | }
72 |
73 | @Override
74 | public int getOutputTextureId() {
75 | return mOutputTextureId;
76 | }
77 |
78 | @Override
79 | protected String getVertexSource() {
80 | final String source = "attribute vec4 av_Position; " +
81 | "attribute vec2 af_Position; " +
82 | "varying vec2 v_texPo; " +
83 | "void main() { " +
84 | " v_texPo = af_Position; " +
85 | " gl_Position = av_Position; " +
86 | "}";
87 | return source;
88 | }
89 |
90 | @Override
91 | protected String getFragmentSource() {
92 | final String source = "#extension GL_OES_EGL_image_external : require \n" +
93 | "precision mediump float; " +
94 | "varying vec2 v_texPo; " +
95 | "uniform samplerExternalOES s_Texture; " +
96 | "void main() { " +
97 | " gl_FragColor = texture2D(s_Texture, v_texPo); " +
98 | "} ";
99 | return source;
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/render/RecordRenderDrawer.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.render;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.opengl.EGL14;
7 | import android.opengl.EGLContext;
8 | import android.opengl.EGLSurface;
9 | import android.opengl.GLES30;
10 | import android.os.Handler;
11 | import android.os.Looper;
12 | import android.os.Message;
13 | import android.util.Log;
14 |
15 | import com.jscheng.scamera.R;
16 | import com.jscheng.scamera.record.VideoEncoder;
17 | import com.jscheng.scamera.util.EGLHelper;
18 | import com.jscheng.scamera.util.GlesUtil;
19 | import com.jscheng.scamera.util.StorageUtil;
20 | import java.io.File;
21 | import java.io.IOException;
22 |
23 | import static com.jscheng.scamera.util.LogUtil.TAG;
24 |
25 | /**
26 | * Created By Chengjunsen on 2018/9/21
27 | */
28 | public class RecordRenderDrawer extends BaseRenderDrawer implements Runnable{
29 | // 绘制的纹理 ID
30 | private int mTextureId;
31 | private VideoEncoder mVideoEncoder;
32 | private String mVideoPath;
33 | private Handler mMsgHandler;
34 | private EGLHelper mEglHelper;
35 | private EGLSurface mEglSurface;
36 | private boolean isRecording;
37 | private EGLContext mEglContext;
38 |
39 | private int av_Position;
40 | private int af_Position;
41 | private int s_Texture;
42 |
43 |
44 | public RecordRenderDrawer(Context context) {
45 | this.mVideoEncoder = null;
46 | this.mEglHelper = null;
47 | this.mTextureId = 0;
48 | this.isRecording = false;
49 | new Thread(this).start();
50 | }
51 |
52 | @Override
53 | public void setInputTextureId(int textureId) {
54 | this.mTextureId = textureId;
55 | Log.d(TAG, "setInputTextureId: " + textureId);
56 | }
57 |
58 | @Override
59 | public int getOutputTextureId() {
60 | return mTextureId;
61 | }
62 |
63 | @Override
64 | public void create() {
65 | mEglContext = EGL14.eglGetCurrentContext();
66 | }
67 |
68 | public void startRecord() {
69 | Log.d(TAG, "startRecord context : " + mEglContext.toString());
70 | Message msg = mMsgHandler.obtainMessage(MsgHandler.MSG_START_RECORD, width, height, mEglContext);
71 | mMsgHandler.sendMessage(msg);
72 | isRecording = true;
73 | }
74 |
75 | public void stopRecord() {
76 | Log.d(TAG, "stopRecord");
77 | isRecording = false;
78 | mMsgHandler.sendMessage(mMsgHandler.obtainMessage(MsgHandler.MSG_STOP_RECORD));
79 | }
80 |
81 | public void quit() {
82 | mMsgHandler.sendMessage(mMsgHandler.obtainMessage(MsgHandler.MSG_QUIT));
83 | }
84 |
85 | @Override
86 | public void surfaceChangedSize(int width, int height) {
87 | this.width = width;
88 | this.height = height;
89 | }
90 |
91 | @Override
92 | public void draw(long timestamp, float[] transformMatrix) {
93 | if (isRecording) {
94 | Log.d(TAG, "draw: ");
95 | Message msg = mMsgHandler.obtainMessage(MsgHandler.MSG_FRAME, timestamp);
96 | mMsgHandler.sendMessage(msg);
97 | }
98 | }
99 |
100 | @Override
101 | public void run() {
102 | Looper.prepare();
103 | mMsgHandler = new MsgHandler();
104 | Looper.loop();
105 | }
106 |
107 | private class MsgHandler extends Handler {
108 | public static final int MSG_START_RECORD = 1;
109 | public static final int MSG_STOP_RECORD = 2;
110 | public static final int MSG_UPDATE_CONTEXT = 3;
111 | public static final int MSG_UPDATE_SIZE = 4;
112 | public static final int MSG_FRAME = 5;
113 | public static final int MSG_QUIT = 6;
114 |
115 | public MsgHandler() {
116 |
117 | }
118 |
119 | @Override
120 | public void handleMessage(Message msg) {
121 | switch (msg.what) {
122 | case MSG_START_RECORD:
123 | prepareVideoEncoder((EGLContext) msg.obj, msg.arg1, msg.arg2);
124 | break;
125 | case MSG_STOP_RECORD:
126 | stopVideoEncoder();
127 | break;
128 | case MSG_UPDATE_CONTEXT:
129 | updateEglContext((EGLContext) msg.obj);
130 | break;
131 | case MSG_UPDATE_SIZE:
132 | updateChangedSize(msg.arg1, msg.arg2);
133 | break;
134 | case MSG_FRAME:
135 | drawFrame((long)msg.obj);
136 | break;
137 | case MSG_QUIT:
138 | quitLooper();
139 | break;
140 | default:
141 | break;
142 | }
143 | }
144 | }
145 |
146 | private void prepareVideoEncoder(EGLContext context, int width, int height) {
147 | try {
148 | mEglHelper = new EGLHelper();
149 | mEglHelper.createGL(context);
150 | mVideoPath = StorageUtil.getVedioPath(true) + "glvideo.mp4";
151 | mVideoEncoder = new VideoEncoder(width, height, new File(mVideoPath));
152 | mEglSurface = mEglHelper.createWindowSurface(mVideoEncoder.getInputSurface());
153 | boolean error = mEglHelper.makeCurrent(mEglSurface);
154 | if (!error) {
155 | Log.e(TAG, "prepareVideoEncoder: make current error");
156 | }
157 | onCreated();
158 | } catch (IOException e) {
159 | e.printStackTrace();
160 | }
161 | }
162 |
163 | private void stopVideoEncoder() {
164 | mVideoEncoder.drainEncoder(true);
165 | if (mEglHelper != null) {
166 | mEglHelper.destroySurface(mEglSurface);
167 | mEglHelper.destroyGL();
168 | mEglSurface = EGL14.EGL_NO_SURFACE;
169 | mVideoEncoder.release();
170 | mEglHelper = null;
171 | mVideoEncoder = null;
172 | }
173 | }
174 |
175 | private void updateEglContext(EGLContext context) {
176 | mEglSurface = EGL14.EGL_NO_SURFACE;
177 | mEglHelper.destroyGL();
178 | mEglHelper.createGL(context);
179 | mEglSurface = mEglHelper.createWindowSurface(mVideoEncoder.getInputSurface());
180 | boolean error = mEglHelper.makeCurrent(mEglSurface);
181 | if (!error) {
182 | Log.e(TAG, "prepareVideoEncoder: make current error");
183 | }
184 | }
185 |
186 | private void drawFrame(long timeStamp) {
187 | Log.d(TAG, "drawFrame: " + timeStamp );
188 | mEglHelper.makeCurrent(mEglSurface);
189 | mVideoEncoder.drainEncoder(false);
190 | onDraw();
191 | mEglHelper.setPresentationTime(mEglSurface, timeStamp);
192 | mEglHelper.swapBuffers(mEglSurface);
193 | }
194 |
195 | private void updateChangedSize(int width, int height) {
196 | onChanged(width, height);
197 | }
198 |
199 | private void quitLooper() {
200 | Looper.myLooper().quit();
201 | }
202 |
203 | @Override
204 | protected void onCreated() {
205 | mProgram = GlesUtil.createProgram(getVertexSource(), getFragmentSource());
206 | initVertexBufferObjects();
207 | av_Position = GLES30.glGetAttribLocation(mProgram, "av_Position");
208 | af_Position = GLES30.glGetAttribLocation(mProgram, "af_Position");
209 | s_Texture = GLES30.glGetUniformLocation(mProgram, "s_Texture");
210 | Log.d(TAG, "onCreated: av_Position " + av_Position);
211 | Log.d(TAG, "onCreated: af_Position " + af_Position);
212 | Log.d(TAG, "onCreated: s_Texture " + s_Texture);
213 | Log.e(TAG, "onCreated: error " + GLES30.glGetError());
214 | }
215 |
216 | @Override
217 | protected void onChanged(int width, int height) {
218 |
219 | }
220 |
221 | @Override
222 | protected void onDraw() {
223 | clear();
224 | useProgram();
225 | viewPort(0, 0, width, height);
226 |
227 | GLES30.glEnableVertexAttribArray(av_Position);
228 | GLES30.glEnableVertexAttribArray(af_Position);
229 | // GLES30.glVertexAttribPointer(av_Position, CoordsPerVertexCount, GLES30.GL_FLOAT, false, VertexStride, mVertexBuffer);
230 | // GLES30.glVertexAttribPointer(af_Position, CoordsPerTextureCount, GLES30.GL_FLOAT, false, TextureStride, mDisplayTextureBuffer);
231 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mVertexBufferId);
232 | GLES30.glVertexAttribPointer(av_Position, CoordsPerVertexCount, GLES30.GL_FLOAT, false, 0, 0);
233 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mDisplayTextureBufferId);
234 | GLES30.glVertexAttribPointer(af_Position, CoordsPerTextureCount, GLES30.GL_FLOAT, false, 0, 0);
235 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, 0);
236 | GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
237 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, mTextureId);
238 | GLES30.glUniform1i(s_Texture, 0);
239 | // 绘制 GLES30.GL_TRIANGLE_STRIP:复用坐标
240 | GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, VertexCount);
241 | GLES30.glDisableVertexAttribArray(av_Position);
242 | GLES30.glDisableVertexAttribArray(af_Position);
243 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
244 | }
245 |
246 | @Override
247 | protected String getVertexSource() {
248 | final String source = "attribute vec4 av_Position; " +
249 | "attribute vec2 af_Position; " +
250 | "varying vec2 v_texPo; " +
251 | "void main() { " +
252 | " v_texPo = af_Position; " +
253 | " gl_Position = av_Position; " +
254 | "}";
255 | return source;
256 | }
257 |
258 | @Override
259 | protected String getFragmentSource() {
260 | final String source = "precision mediump float;\n" +
261 | "varying vec2 v_texPo;\n" +
262 | "uniform sampler2D s_Texture;\n" +
263 | "void main() {\n" +
264 | " vec4 tc = texture2D(s_Texture, v_texPo);\n" +
265 | " gl_FragColor = texture2D(s_Texture, v_texPo);\n" +
266 | "}";
267 | return source;
268 | }
269 | }
270 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/render/RenderDrawerGroups.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.render;
2 |
3 | import android.content.Context;
4 | import android.opengl.EGLContext;
5 | import android.opengl.GLES30;
6 | import android.util.Log;
7 |
8 | import com.jscheng.scamera.util.GlesUtil;
9 |
10 | import static com.jscheng.scamera.util.LogUtil.TAG;
11 |
12 | /**
13 | * Created By Chengjunsen on 2018/8/31
14 | * 统一管理所有的RenderDrawer 和 FBO
15 | */
16 | public class RenderDrawerGroups {
17 | private int mInputTexture;
18 | private int mFrameBuffer;
19 | private OriginalRenderDrawer mOriginalDrawer;
20 | private WaterMarkRenderDrawer mWaterMarkDrawer;
21 | private DisplayRenderDrawer mDisplayDrawer;
22 | private RecordRenderDrawer mRecordDrawer;
23 |
24 | public RenderDrawerGroups(Context context) {
25 | this.mOriginalDrawer = new OriginalRenderDrawer();
26 | this.mWaterMarkDrawer = new WaterMarkRenderDrawer(context);
27 | this.mDisplayDrawer = new DisplayRenderDrawer();
28 | this.mRecordDrawer = new RecordRenderDrawer(context);
29 | this.mFrameBuffer = 0;
30 | this.mInputTexture = 0;
31 | }
32 |
33 | public void setInputTexture(int texture) {
34 | this.mInputTexture = texture;
35 | }
36 |
37 | public void bindFrameBuffer(int textureId) {
38 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, mFrameBuffer);
39 | GLES30.glFramebufferTexture2D(GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0, GLES30.GL_TEXTURE_2D, textureId, 0);
40 | }
41 |
42 | public void unBindFrameBuffer() {
43 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
44 | }
45 |
46 | public void deleteFrameBuffer() {
47 | GLES30.glDeleteFramebuffers(1, new int[]{mFrameBuffer}, 0);
48 | GLES30.glDeleteTextures(1, new int[]{mInputTexture}, 0);
49 | }
50 |
51 | public void create() {
52 | this.mOriginalDrawer.create();
53 | this.mWaterMarkDrawer.create();
54 | this.mDisplayDrawer.create();
55 | this.mRecordDrawer.create();
56 | }
57 |
58 | public void surfaceChangedSize(int width, int height) {
59 | mFrameBuffer = GlesUtil.createFrameBuffer();
60 | mOriginalDrawer.surfaceChangedSize(width, height);
61 | mWaterMarkDrawer.surfaceChangedSize(width, height);
62 | mDisplayDrawer.surfaceChangedSize(width, height);
63 | mRecordDrawer.surfaceChangedSize(width, height);
64 |
65 | this.mOriginalDrawer.setInputTextureId(mInputTexture);
66 | int textureId = this.mOriginalDrawer.getOutputTextureId();
67 | mWaterMarkDrawer.setInputTextureId(textureId);
68 | mDisplayDrawer.setInputTextureId(textureId);
69 | mRecordDrawer.setInputTextureId(textureId);
70 | }
71 |
72 | public void drawRender(BaseRenderDrawer drawer, boolean useFrameBuffer, long timestamp, float[] transformMatrix) {
73 | if (useFrameBuffer) {
74 | bindFrameBuffer(drawer.getOutputTextureId());
75 | }
76 | drawer.draw(timestamp, transformMatrix);
77 | if (useFrameBuffer) {
78 | unBindFrameBuffer();
79 | }
80 | }
81 |
82 | public void draw(long timestamp, float[] transformMatrix) {
83 | if (mInputTexture == 0 || mFrameBuffer == 0) {
84 | Log.e(TAG, "draw: mInputTexture or mFramebuffer or list is zero");
85 | return;
86 | }
87 | drawRender(mOriginalDrawer, true, timestamp, transformMatrix);
88 | // 绘制顺序会控制着 水印绘制哪一层
89 | //drawRender(mWaterMarkDrawer, true, timestamp, transformMatrix);
90 | drawRender(mDisplayDrawer, false, timestamp, transformMatrix);
91 | drawRender(mWaterMarkDrawer, true, timestamp, transformMatrix);
92 | drawRender(mRecordDrawer, false, timestamp, transformMatrix);
93 | }
94 |
95 | public void startRecord() {
96 | mRecordDrawer.startRecord();
97 | }
98 |
99 | public void stopRecord() {
100 | mRecordDrawer.stopRecord();
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/render/WaterMarkRenderDrawer.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.render;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.opengl.GLES30;
7 |
8 | import com.jscheng.scamera.R;
9 | import com.jscheng.scamera.render.BaseRenderDrawer;
10 | import com.jscheng.scamera.util.GlesUtil;
11 |
12 | /**
13 | * Created By Chengjunsen on 2018/8/29
14 | */
15 | public class WaterMarkRenderDrawer extends BaseRenderDrawer{
16 | private int mMarkTextureId;
17 | private int mInputTextureId;
18 | private Bitmap mBitmap;
19 | private int avPosition;
20 | private int afPosition;
21 | private int sTexture;
22 |
23 | public WaterMarkRenderDrawer(Context context) {
24 | mBitmap = BitmapFactory.decodeResource(context.getResources(), R.mipmap.watermark);
25 | }
26 | @Override
27 | public void setInputTextureId(int textureId) {
28 | this.mInputTextureId = textureId;
29 | }
30 |
31 | @Override
32 | public int getOutputTextureId() {
33 | return mInputTextureId;
34 | }
35 |
36 | @Override
37 | protected void onCreated() {
38 |
39 | }
40 |
41 | @Override
42 | protected void onChanged(int width, int height) {
43 | mMarkTextureId = GlesUtil.loadBitmapTexture(mBitmap);
44 | avPosition = GLES30.glGetAttribLocation(mProgram, "av_Position");
45 | afPosition = GLES30.glGetAttribLocation(mProgram, "af_Position");
46 | sTexture = GLES30.glGetUniformLocation(mProgram, "sTexture");
47 | }
48 |
49 | @Override
50 | public void draw(long timestamp, float[] transformMatrix) {
51 | useProgram();
52 | //clear();
53 | viewPort(40, 75, mBitmap.getWidth() * 2, mBitmap.getHeight() * 2);
54 | GLES30.glDisable(GLES30.GL_DEPTH_TEST);
55 | GLES30.glEnable(GLES30.GL_BLEND);
56 | GLES30.glBlendFunc(GLES30.GL_SRC_COLOR, GLES30.GL_DST_ALPHA);
57 | onDraw();
58 | GLES30.glDisable(GLES30.GL_BLEND);
59 | }
60 |
61 | @Override
62 | protected void onDraw() {
63 | GLES30.glEnableVertexAttribArray(avPosition);
64 | GLES30.glEnableVertexAttribArray(afPosition);
65 | //设置顶点位置值
66 | //GLES30.glVertexAttribPointer(avPosition, CoordsPerVertexCount, GLES30.GL_FLOAT, false, VertexStride, mVertexBuffer);
67 | //设置纹理位置值
68 | //GLES30.glVertexAttribPointer(afPosition, CoordsPerTextureCount, GLES30.GL_FLOAT, false, TextureStride, mFrameTextureBuffer);
69 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mVertexBufferId);
70 | GLES30.glVertexAttribPointer(avPosition, CoordsPerVertexCount, GLES30.GL_FLOAT, false, 0, 0);
71 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, mFrameTextureBufferId);
72 | GLES30.glVertexAttribPointer(afPosition, CoordsPerTextureCount, GLES30.GL_FLOAT, false, 0, 0);
73 | GLES30.glBindBuffer(GLES30.GL_ARRAY_BUFFER, 0);
74 | GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
75 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, mMarkTextureId);
76 | GLES30.glUniform1i(sTexture, 0);
77 | //绘制 GLES30.GL_TRIANGLE_STRIP:复用坐标
78 | GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, VertexCount);
79 | GLES30.glDisableVertexAttribArray(avPosition);
80 | GLES30.glDisableVertexAttribArray(afPosition);
81 | }
82 |
83 | @Override
84 | protected String getVertexSource() {
85 | final String source =
86 | "attribute vec4 av_Position; " +
87 | "attribute vec2 af_Position; " +
88 | "varying vec2 v_texPo; " +
89 | "void main() { " +
90 | " v_texPo = af_Position; " +
91 | " gl_Position = av_Position; " +
92 | "}";
93 | return source;
94 | }
95 |
96 | @Override
97 | protected String getFragmentSource() {
98 | final String source =
99 | "precision mediump float; " +
100 | "varying vec2 v_texPo; " +
101 | "uniform sampler2D sTexture; " +
102 | "void main() { " +
103 | " gl_FragColor = texture2D(sTexture, v_texPo); " +
104 | "} ";
105 | return source;
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/util/CameraUtil.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.util;
2 |
3 |
4 | import android.app.Activity;
5 | import android.content.Context;
6 | import android.content.pm.PackageManager;
7 | import android.graphics.ImageFormat;
8 | import android.graphics.Point;
9 | import android.graphics.Rect;
10 | import android.graphics.SurfaceTexture;
11 | import android.hardware.Camera;
12 | import android.os.Build;
13 | import android.util.Log;
14 | import android.util.Size;
15 | import android.view.Surface;
16 | import android.view.SurfaceHolder;
17 |
18 | import java.io.IOException;
19 | import java.util.ArrayList;
20 | import java.util.Collections;
21 | import java.util.Comparator;
22 | import java.util.Iterator;
23 | import java.util.List;
24 |
25 | import static com.jscheng.scamera.util.LogUtil.TAG;
26 |
27 | /**
28 | * Created By Chengjunsen on 2018/8/23
29 | */
30 | public class CameraUtil {
31 | private static Camera mCamera = null;
32 | private static int mCameraID = Camera.CameraInfo.CAMERA_FACING_BACK;
33 |
34 | /**
35 | * 检查camera硬件
36 | * @param context
37 | * @return
38 | */
39 | private boolean checkCameraHardware(Context context) {
40 | if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
41 | return true;
42 | } else {
43 | return false;
44 | }
45 | }
46 |
47 | public static void openCamera() {
48 | mCamera = Camera.open(mCameraID);
49 | if (mCamera == null) {
50 | throw new RuntimeException("Unable to open camera");
51 | }
52 | }
53 |
54 | public static Camera getCamera() {
55 | return mCamera;
56 | }
57 |
58 | public static void releaseCamera() {
59 | if (mCamera != null) {
60 | mCamera.stopPreview();
61 | mCamera.release();
62 | mCamera = null;
63 | }
64 | }
65 |
66 | public static void switchCameraId() {
67 | mCameraID = isBackCamera() ? Camera.CameraInfo.CAMERA_FACING_FRONT : Camera.CameraInfo.CAMERA_FACING_BACK;
68 | }
69 |
70 | public static boolean isBackCamera() {
71 | return mCameraID == Camera.CameraInfo.CAMERA_FACING_BACK;
72 | }
73 |
74 | public static void setDisplay(SurfaceTexture surfaceTexture) {
75 | try {
76 | if (mCamera != null) {
77 | mCamera.setPreviewTexture(surfaceTexture);
78 | }
79 | } catch (IOException e) {
80 | e.printStackTrace();
81 | }
82 | }
83 |
84 | public static void startPreview(Activity activity, int width, int height) {
85 | if (mCamera != null) {
86 | int mOrientation = getCameraPreviewOrientation(activity, mCameraID);
87 | mCamera.setDisplayOrientation(mOrientation);
88 |
89 | Camera.Parameters parameters = mCamera.getParameters();
90 | Camera.Size bestPreviewSize = getOptimalSize(parameters.getSupportedPreviewSizes(), width, height);
91 | parameters.setPreviewSize(bestPreviewSize.width, bestPreviewSize.height);
92 | Camera.Size bestPictureSize = getOptimalSize(parameters.getSupportedPictureSizes(), width, height);
93 | parameters.setPictureSize(bestPictureSize.width, bestPictureSize.height);
94 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
95 | mCamera.setParameters(parameters);
96 | mCamera.startPreview();
97 | Log.e(TAG, "camera startPreview: (" + width + " x " + height +")");
98 | }
99 | }
100 |
101 | /**
102 | * 获取最合适的尺寸
103 | * @param supportList
104 | * @param width
105 | * @param height
106 | * @return
107 | */
108 | private static Camera.Size getOptimalSize(List supportList, int width, int height) {
109 | // camera的宽度是大于高度的,这里要保证expectWidth > expectHeight
110 | int expectWidth = Math.max(width, height);
111 | int expectHeight = Math.min(width, height);
112 | // 根据宽度进行排序
113 | Collections.sort(supportList, new Comparator() {
114 | @Override
115 | public int compare(Camera.Size pre, Camera.Size after) {
116 | if (pre.width > after.width) {
117 | return 1;
118 | } else if (pre.width < after.width) {
119 | return -1;
120 | }
121 | return 0;
122 | }
123 | });
124 |
125 | Camera.Size result = supportList.get(0);
126 | boolean widthOrHeight = false; // 判断存在宽或高相等的Size
127 | // 辗转计算宽高最接近的值
128 | for (Camera.Size size: supportList) {
129 | // 如果宽高相等,则直接返回
130 | if (size.width == expectWidth && size.height == expectHeight) {
131 | result = size;
132 | break;
133 | }
134 | // 仅仅是宽度相等,计算高度最接近的size
135 | if (size.width == expectWidth) {
136 | widthOrHeight = true;
137 | if (Math.abs(result.height - expectHeight)
138 | > Math.abs(size.height - expectHeight)) {
139 | result = size;
140 | }
141 | }
142 | // 高度相等,则计算宽度最接近的Size
143 | else if (size.height == expectHeight) {
144 | widthOrHeight = true;
145 | if (Math.abs(result.width - expectWidth)
146 | > Math.abs(size.width - expectWidth)) {
147 | result = size;
148 | }
149 | }
150 | // 如果之前的查找不存在宽或高相等的情况,则计算宽度和高度都最接近的期望值的Size
151 | else if (!widthOrHeight) {
152 | if (Math.abs(result.width - expectWidth)
153 | > Math.abs(size.width - expectWidth)
154 | && Math.abs(result.height - expectHeight)
155 | > Math.abs(size.height - expectHeight)) {
156 | result = size;
157 | }
158 | }
159 | }
160 | return result;
161 | }
162 |
163 | public static int getCameraPreviewOrientation(Activity activity, int cameraId) {
164 | if (mCamera == null) {
165 | throw new RuntimeException("mCamera is null");
166 | }
167 | Camera.CameraInfo info = new Camera.CameraInfo();
168 | Camera.getCameraInfo(cameraId, info);
169 | int result;
170 | int degrees = getRotation(activity);
171 | //前置
172 | if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
173 | result = (info.orientation + degrees) % 360;
174 | result = (360 - result) % 360;
175 | }
176 | //后置
177 | else {
178 | result = (info.orientation - degrees + 360) % 360;
179 | }
180 | return result;
181 | }
182 |
183 | /**
184 | * 对焦
185 | * @param focusPoint 焦点位置
186 | * @param screenSize 屏幕尺寸
187 | * @param callback 对焦成功或失败的callback
188 | * @return
189 | */
190 | public static boolean newCameraFocus(Point focusPoint, Size screenSize, Camera.AutoFocusCallback callback) {
191 | if (mCamera == null) {
192 | throw new RuntimeException("mCamera is null");
193 | }
194 | Point cameraFoucusPoint = convertToCameraPoint(screenSize, focusPoint);
195 | Rect cameraFoucusRect = convertToCameraRect(cameraFoucusPoint, 100);
196 | Camera.Parameters parameters = mCamera.getParameters();
197 | if (Build.VERSION.SDK_INT > 14) {
198 | if (parameters.getMaxNumFocusAreas() <= 0) {
199 | return focus(callback);
200 | }
201 | clearCameraFocus();
202 | List focusAreas = new ArrayList();
203 | // 100是权重
204 | focusAreas.add(new Camera.Area(cameraFoucusRect, 100));
205 | parameters.setFocusAreas(focusAreas);
206 | // 设置感光区域
207 | parameters.setMeteringAreas(focusAreas);
208 | try {
209 | mCamera.setParameters(parameters);
210 | } catch (Exception e) {
211 | e.printStackTrace();
212 | return false;
213 | }
214 | }
215 | return focus(callback);
216 | }
217 |
218 | private static boolean focus(Camera.AutoFocusCallback callback) {
219 | if (mCamera == null) {
220 | return false;
221 | }
222 | mCamera.cancelAutoFocus();
223 | mCamera.autoFocus(callback);
224 | return true;
225 | }
226 |
227 | /**
228 | * 清除焦点
229 | */
230 | public static void clearCameraFocus() {
231 | if (mCamera == null) {
232 | throw new RuntimeException("mCamera is null");
233 | }
234 | mCamera.cancelAutoFocus();
235 | Camera.Parameters parameters = mCamera.getParameters();
236 | parameters.setFocusAreas(null);
237 | parameters.setMeteringAreas(null);
238 | try {
239 | mCamera.setParameters(parameters);
240 | } catch (Exception e) {
241 | e.printStackTrace();
242 | }
243 | }
244 |
245 | /**
246 | * 将屏幕坐标转换成camera坐标
247 | * @param screenSize
248 | * @param focusPoint
249 | * @return cameraPoint
250 | */
251 | private static Point convertToCameraPoint(Size screenSize, Point focusPoint){
252 | int newX = focusPoint.y * 2000/screenSize.getHeight() - 1000;
253 | int newY = -focusPoint.x * 2000/screenSize.getWidth() + 1000;
254 | return new Point(newX, newY);
255 | }
256 |
257 | private static Rect convertToCameraRect(Point centerPoint, int radius) {
258 | int left = limit(centerPoint.x - radius, 1000, -1000);
259 | int right = limit(centerPoint.x + radius, 1000, -1000);
260 | int top = limit(centerPoint.y - radius, 1000, -1000);
261 | int bottom = limit(centerPoint.y + radius, 1000, -1000);
262 | return new Rect(left, top, right, bottom);
263 | }
264 |
265 | private static int limit(int s, int max, int min) {
266 | if (s > max) { return max; }
267 | if (s < min) { return min; }
268 | return s;
269 | }
270 |
271 | public static int getRotation(Activity activity) {
272 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
273 | int degrees = 0;
274 | switch (rotation) {
275 | case Surface.ROTATION_0:
276 | degrees = 0;
277 | break;
278 | case Surface.ROTATION_90:
279 | degrees = 90;
280 | break;
281 | case Surface.ROTATION_180:
282 | degrees = 180;
283 | break;
284 | case Surface.ROTATION_270:
285 | degrees = 270;
286 | break;
287 | }
288 | return degrees;
289 | }
290 | }
291 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/util/DimemsionUtil.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.util;
2 |
3 | import android.content.Context;
4 |
5 | /**
6 | * Created By Chengjunsen on 2018/8/24
7 | */
8 | public class DimemsionUtil {
9 | public static int dp2px(Context context, float dpValue){
10 | float scale=context.getResources().getDisplayMetrics().density;
11 | return (int)(dpValue*scale + 0.5f);
12 | }
13 |
14 | public static int px2dp(Context context,float pxValue){
15 | float scale=context.getResources().getDisplayMetrics().density;
16 | return (int)(pxValue/scale + 0.5f);
17 | }
18 |
19 | public static int sp2px(Context context,float spValue){
20 | float fontScale=context.getResources().getDisplayMetrics().scaledDensity;
21 | return (int) (spValue*fontScale + 0.5f);
22 | }
23 |
24 | public static int px2sp(Context context,float pxValue){
25 | float fontScale=context.getResources().getDisplayMetrics().scaledDensity;
26 | return (int) (pxValue/fontScale + 0.5f);
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/util/EGLHelper.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.util;
2 |
3 | import android.content.Context;
4 | import android.opengl.EGL14;
5 | import android.opengl.EGLConfig;
6 | import android.opengl.EGLContext;
7 | import android.opengl.EGLDisplay;
8 | import android.opengl.EGLExt;
9 | import android.opengl.EGLSurface;
10 | import android.util.Log;
11 |
12 | import javax.microedition.khronos.egl.EGL10;
13 |
14 | import static com.jscheng.scamera.util.LogUtil.TAG;
15 |
16 | /**
17 | * Created By Chengjunsen on 2018/9/20
18 | */
19 | public class EGLHelper {
20 | private EGLDisplay mEglDisplay = EGL14.EGL_NO_DISPLAY;
21 | private EGLConfig mEglConfig;
22 | private EGLContext mEglContext = EGL14.EGL_NO_CONTEXT;
23 |
24 | /**
25 | * 创建openGL环境
26 | */
27 | public void createGL() {
28 | createGL(EGL14.EGL_NO_CONTEXT);
29 | }
30 |
31 | public void createGL(EGLContext mEglContext) {
32 | // 设置显示设备
33 | setDisplay(EGL14.EGL_DEFAULT_DISPLAY);
34 | // 设置属性
35 | setConfig();
36 | // 创建上下文
37 | createContext(mEglContext);
38 | }
39 |
40 | /**
41 | * 设置显示设备
42 | */
43 | public void setDisplay(int key) {
44 | // 获取显示默认设备
45 | mEglDisplay = EGL14.eglGetDisplay(key);
46 | // 初始化
47 | int version[] = new int[2];
48 | if (!EGL14.eglInitialize(mEglDisplay, version, 0, version, 1)) {
49 | throw new RuntimeException("EGL error" + EGL14.eglGetError());
50 | }
51 | Log.d(TAG, EGL14.eglQueryString(mEglDisplay, EGL14.EGL_VENDOR));
52 | Log.d(TAG, EGL14.eglQueryString(mEglDisplay, EGL14.EGL_VERSION));
53 | Log.d(TAG, EGL14.eglQueryString(mEglDisplay, EGL14.EGL_EXTENSIONS));
54 | }
55 |
56 | public void setConfig() {
57 | int configAttribs[] = {
58 | EGL10.EGL_SURFACE_TYPE, EGL10.EGL_WINDOW_BIT, // 渲染类型
59 | EGL10.EGL_RED_SIZE, 8, // 指定 RGB 中的 R 大小(bits)
60 | EGL10.EGL_GREEN_SIZE, 8, // 指定 G 大小
61 | EGL10.EGL_BLUE_SIZE, 8, // 指定 B 大小
62 | EGL10.EGL_ALPHA_SIZE, 8, // 指定 Alpha 大小
63 | EGL10.EGL_DEPTH_SIZE, 8, // 指定深度(Z Buffer) 大小
64 | EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, // 指定渲染 api 类别,
65 | EGL10.EGL_NONE
66 | };
67 | setConfig(configAttribs);
68 | }
69 |
70 | public void setConfig(int configAttribs[]) {
71 | int numConfigs[] = new int[1];
72 | EGLConfig configs[] = new EGLConfig[1];
73 | if (!EGL14.eglChooseConfig(mEglDisplay, configAttribs, 0, configs, 0, configs.length, numConfigs, 0)) {
74 | throw new RuntimeException("EGL error " + EGL14.eglGetError());
75 | }
76 | mEglConfig = configs[0];
77 | }
78 |
79 | public void createContext(EGLContext context) {
80 | // 创建openGL上下文
81 | int contextAttribs[] = {
82 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
83 | EGL14.EGL_NONE
84 | };
85 | mEglContext = EGL14.eglCreateContext(mEglDisplay, mEglConfig, context, contextAttribs, 0);
86 | if (mEglContext == EGL14.EGL_NO_CONTEXT) {
87 | throw new RuntimeException("EGL error " + EGL14.eglGetError());
88 | }
89 | }
90 |
91 | public void destroyGL() {
92 | EGL14.eglDestroyContext(mEglDisplay, mEglContext);
93 | mEglContext = EGL14.EGL_NO_CONTEXT;
94 | mEglDisplay = EGL14.EGL_NO_DISPLAY;
95 | }
96 |
97 | public EGLSurface createWindowSurface(EGLConfig config, Object surface) {
98 | EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEglDisplay, config, surface, new int[]{EGL14.EGL_NONE}, 0);
99 | if (eglSurface == EGL14.EGL_NO_SURFACE) {
100 | Log.d(TAG, "createWindowSurface" + EGL14.eglGetError());
101 | return null;
102 | }
103 | return eglSurface;
104 | }
105 |
106 | public EGLSurface createWindowSurface(Object surface) {
107 | return createWindowSurface(mEglConfig, surface);
108 | }
109 |
110 | public EGLSurface createPbufferSurface(EGLConfig config, int width, int height) {
111 | return EGL14.eglCreatePbufferSurface(mEglDisplay, config, new int[]{EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE}, 0);
112 | }
113 |
114 | public boolean makeCurrent(EGLSurface draw, EGLSurface read, EGLContext context) {
115 | if (!EGL14.eglMakeCurrent(mEglDisplay, draw, read, context)) {
116 | Log.d(TAG, "makeCurrent" + EGL14.eglGetError());
117 | return false;
118 | }
119 | return true;
120 | }
121 |
122 | public boolean makeCurrent(EGLSurface surface, EGLContext context) {
123 | return makeCurrent(surface, surface, context);
124 | }
125 |
126 | public boolean makeCurrent(EGLSurface surface) {
127 | return makeCurrent(surface, mEglContext);
128 | }
129 |
130 | public boolean setPresentationTime(EGLSurface surface, long timeStamp) {
131 | if (!EGLExt.eglPresentationTimeANDROID(mEglDisplay, surface, timeStamp)) {
132 | Log.d(TAG, "setPresentationTime" + EGL14.eglGetError());
133 | return false;
134 | }
135 | return true;
136 | }
137 |
138 | public boolean swapBuffers(EGLSurface surface) {
139 | if (!EGL14.eglSwapBuffers(mEglDisplay, surface)) {
140 | Log.d(TAG, "swapBuffers" + EGL14.eglGetError());
141 | return false;
142 | }
143 | return true;
144 | }
145 |
146 | public boolean destroyGL(EGLSurface surface, EGLContext context) {
147 | EGL14.eglMakeCurrent(mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
148 | if (surface != null) {
149 | EGL14.eglDestroySurface(mEglDisplay, surface);
150 | }
151 | if (context != null) {
152 | EGL14.eglDestroyContext(mEglDisplay, context);
153 | }
154 | EGL14.eglTerminate(mEglDisplay);
155 | return true;
156 | }
157 |
158 | public void destroySurface(EGLSurface surface) {
159 | EGL14.eglDestroySurface(mEglDisplay, surface);
160 | }
161 |
162 | }
163 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/util/GlesUtil.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.util;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.graphics.BitmapFactory;
6 | import android.opengl.GLES11Ext;
7 | import android.opengl.GLES30;
8 | import android.opengl.GLES30;
9 | import android.opengl.GLUtils;
10 | import android.util.Log;
11 |
12 | import java.nio.IntBuffer;
13 |
14 | import javax.microedition.khronos.opengles.GL10;
15 |
16 | import static com.jscheng.scamera.util.LogUtil.TAG;
17 |
18 | /**
19 | * Created By Chengjunsen on 2018/8/29
20 | */
21 | public class GlesUtil {
22 |
23 | public static int createProgram(String vertexSource, String fragmentSource) {
24 | int mVertexShader = loadShader(GLES30.GL_VERTEX_SHADER, vertexSource);
25 | int mFragmentShader = loadShader(GLES30.GL_FRAGMENT_SHADER, fragmentSource);
26 | int program = GLES30.glCreateProgram();
27 | GLES30.glAttachShader(program, mVertexShader);
28 | GLES30.glAttachShader(program, mFragmentShader);
29 | GLES30.glLinkProgram(program);
30 | int [] status = new int[1];
31 | GLES30.glGetProgramiv(program, GLES30.GL_LINK_STATUS, status, 0);
32 | if (status[0] != GLES30.GL_TRUE) {
33 | Log.e(TAG, "createProgam: link error");
34 | Log.e(TAG, "createProgam: " + GLES30.glGetProgramInfoLog(program));
35 | GLES30.glDeleteProgram(program);
36 | return 0;
37 | }
38 | GLES30.glDeleteShader(mVertexShader);
39 | GLES30.glDeleteShader(mFragmentShader);
40 | return program;
41 | }
42 |
43 | public static int loadShader(int shaderType, String shaderSource) {
44 | int shader = GLES30.glCreateShader(shaderType);
45 | GLES30.glShaderSource(shader, shaderSource);
46 | GLES30.glCompileShader(shader);
47 | int status[] = new int[1];
48 | GLES30.glGetShaderiv(shader, GLES30.GL_COMPILE_STATUS, status, 0);
49 | if (status[0] == 0) {
50 | Log.e(TAG, "loadShader: compiler error");
51 | Log.e(TAG, "loadShader: " + GLES30.glGetShaderInfoLog(shader) );
52 | GLES30.glDeleteShader(shader);
53 | return 0;
54 | }
55 | return shader;
56 | }
57 |
58 | public static void checkFrameBufferError() {
59 | int status= GLES30.glCheckFramebufferStatus(GLES30.GL_FRAMEBUFFER);
60 | if(status !=GLES30.GL_FRAMEBUFFER_COMPLETE) {
61 | Log.e(TAG, "checkFrameBuffer error: " + status);
62 | throw new RuntimeException("status:" + status + ", hex:" + Integer.toHexString(status));
63 | }
64 | }
65 |
66 | public static void checkError() {
67 | if (GLES30.glGetError() != GLES30.GL_NO_ERROR) {
68 | Log.e(TAG, "createOutputTexture: " + GLES30.glGetError() );
69 | }
70 | }
71 |
72 | public static int createPixelsBuffer(int width, int height) {
73 | int[] buffers = new int[1];
74 | GLES30.glGenBuffers(1, buffers, 0);
75 | checkError();
76 | return buffers[0];
77 | }
78 |
79 | public static void createPixelsBuffers(int[] buffers, int width, int height) {
80 | GLES30.glGenBuffers(buffers.length, buffers, 0);
81 | for (int i = 0; i < buffers.length; i++) {
82 | GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, buffers[i]);
83 | GLES30.glBufferData(GLES30.GL_PIXEL_PACK_BUFFER, width * height * 4, null, GLES30.GL_DYNAMIC_READ);
84 | }
85 | GLES30.glBindBuffer(GLES30.GL_PIXEL_PACK_BUFFER, 0);
86 | }
87 |
88 | public static int createFrameBuffer() {
89 | int[] buffers = new int[1];
90 | GLES30.glGenFramebuffers(1, buffers, 0);
91 | checkError();
92 | return buffers[0];
93 | }
94 |
95 | public static int createRenderBuffer() {
96 | int[] render = new int[1];
97 | GLES30.glGenRenderbuffers(1, render, 0);
98 | checkError();
99 | return render[0];
100 | }
101 |
102 | public static int createCameraTexture() {
103 | int[] texture = new int[1];
104 | GLES30.glGenTextures(1, texture, 0);
105 | GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
106 | GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
107 | GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
108 | GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
109 | GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
110 | GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
111 | return texture[0];
112 | }
113 |
114 | public static int createFrameTexture(int width, int height) {
115 | if (width <= 0 || height <= 0) {
116 | Log.e(TAG, "createOutputTexture: width or height is 0");
117 | return -1;
118 | }
119 | int[] textures = new int[1];
120 | GLES30.glGenTextures(1, textures, 0);
121 | if (textures[0] == 0) {
122 | Log.e(TAG, "createFrameTexture: glGenTextures is 0");
123 | return -1;
124 | }
125 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textures[0]);
126 | GLES30.glTexImage2D(GLES30.GL_TEXTURE_2D, 0, GLES30.GL_RGBA, width, height, 0, GLES30.GL_RGBA, GLES30.GL_UNSIGNED_BYTE, null);
127 | //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
128 | GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
129 | //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
130 | GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
131 | //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
132 | GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_NEAREST);
133 | //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
134 | GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_NEAREST);
135 | GlesUtil.checkError();
136 | return textures[0];
137 | }
138 |
139 | public static int loadBitmapTexture(Bitmap bitmap) {
140 | int[] textureIds = new int[1];
141 | GLES30.glGenTextures(1, textureIds, 0);
142 | if (textureIds[0] == 0) {
143 | Log.e(TAG, "loadBitmapTexture: glGenTextures is 0");
144 | return -1;
145 | }
146 | //绑定纹理
147 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textureIds[0]);
148 | //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
149 | GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER,GLES30.GL_NEAREST);
150 | //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
151 | GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D,GLES30.GL_TEXTURE_MAG_FILTER,GLES30.GL_LINEAR);
152 | //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
153 | GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S,GLES30.GL_CLAMP_TO_EDGE);
154 | //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
155 | GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T,GLES30.GL_CLAMP_TO_EDGE);
156 | //根据以上指定的参数,生成一个2D纹理
157 | GLUtils.texImage2D(GLES30.GL_TEXTURE_2D, 0, bitmap, 0);
158 | GLES30.glGenerateMipmap(GLES30.GL_TEXTURE_2D);
159 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
160 | return textureIds[0];
161 | }
162 |
163 | public static int loadBitmapTexture(Context context, int resourceId) {
164 | Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId);
165 | if (bitmap == null) {
166 | Log.e(TAG, "loadBitmapTexture:bitmap is null");
167 | return -1;
168 | }
169 | int textureId = loadBitmapTexture(bitmap);
170 | bitmap.recycle();
171 | return textureId;
172 | }
173 |
174 | public static void bindFrameTexture(int frameBufferId, int textureId){
175 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, frameBufferId);
176 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textureId);
177 | GLES30.glFramebufferTexture2D(GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0, GLES30.GL_TEXTURE_2D, textureId, 0);
178 | GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
179 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
180 | GlesUtil.checkError();
181 | }
182 |
183 | public static void bindFrameRender(int frameBufferId, int renderId, int width, int height) {
184 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, frameBufferId);
185 | GLES30.glBindRenderbuffer(GLES30.GL_RENDERBUFFER, renderId);
186 | GLES30.glRenderbufferStorage(GLES30.GL_RENDERBUFFER, GLES30.GL_DEPTH_COMPONENT16, width, height);
187 | GLES30.glFramebufferRenderbuffer(GLES30.GL_FRAMEBUFFER, GLES30.GL_DEPTH_ATTACHMENT, GLES30.GL_RENDERBUFFER, renderId);
188 | GLES30.glBindRenderbuffer(GLES30.GL_RENDERBUFFER, 0);
189 | GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
190 | }
191 | }
192 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/util/LogUtil.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.util;
2 |
3 | /**
4 | * Created By Chengjunsen on 2018/8/25
5 | */
6 | public class LogUtil {
7 | public final static String TAG = "CJS";
8 | }
9 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/util/PermisstionUtil.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.util;
2 |
3 | import android.Manifest;
4 | import android.app.Activity;
5 | import android.content.Context;
6 | import android.content.DialogInterface;
7 | import android.content.Intent;
8 | import android.content.pm.PackageManager;
9 | import android.net.Uri;
10 | import android.os.Build;
11 | import android.support.v4.app.ActivityCompat;
12 | import android.support.v7.app.AlertDialog;
13 | import android.widget.Toast;
14 | import java.util.ArrayList;
15 | import java.util.List;
16 |
17 | /**
18 | * Created By Chengjunsen on 2018/8/22
19 | */
20 | public class PermisstionUtil {
21 | public static final String[] CALENDAR;
22 | public static final String[] CAMERA;
23 | public static final String[] CONTACTS;
24 | public static final String[] LOCATION;
25 | public static final String[] MICROPHONE;
26 | public static final String[] PHONE;
27 | public static final String[] SENSORS;
28 | public static final String[] SMS;
29 | public static final String[] STORAGE;
30 |
31 | /**
32 | * 单个权限请求检测
33 | * @param context
34 | * @param permissionName
35 | * @return
36 | */
37 | public static boolean isPermissionGranted(Context context, String permissionName) {
38 | if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
39 | return true;
40 | }
41 | //判断是否需要请求允许权限
42 | int hasPermision = context.checkSelfPermission(permissionName);
43 | if (hasPermision != PackageManager.PERMISSION_GRANTED) {
44 | return false;
45 | }
46 | return true;
47 | }
48 |
49 | /**
50 | * 多个权限请求检测,返回list,如果list.size为空说明权限全部有了不需要请求,否则请求没有的
51 | * @param context
52 | * @param permArray
53 | * @return
54 | */
55 | public static List isPermissionsAllGranted(Context context, String[] permArray) {
56 | List list = new ArrayList<>();
57 | if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
58 | return list;
59 | }
60 | for (int i = 0; permArray != null && i < permArray.length; i++) {
61 | //获得批量请求但被禁止的权限列表
62 | if (PackageManager.PERMISSION_GRANTED != context.checkSelfPermission(permArray[i])) {
63 | list.add(permArray[i]);
64 | }
65 | }
66 | return list;
67 | }
68 |
69 | /**
70 | * 判断是否已拒绝过权限
71 | * @param context
72 | * @param permission
73 | * @return
74 | */
75 | public static boolean judgePermission(Context context, String permission) {
76 | if (ActivityCompat.shouldShowRequestPermissionRationale((Activity) context, permission)) {
77 | return true;
78 | } else {
79 | return false;
80 | }
81 | }
82 |
83 | /**
84 | * 判断是否已拒绝过权限
85 | * @param context
86 | * @param permissions
87 | * @return
88 | */
89 | public static boolean judgePermission(Context context, String[] permissions) {
90 | for (String permission : permissions) {
91 | if (ActivityCompat.shouldShowRequestPermissionRationale((Activity) context, permission)) {
92 | return true;
93 | }
94 | }
95 | return false;
96 | }
97 |
98 | /**
99 | * 请求权限
100 | * @param context
101 | * @param permission
102 | * @param requestCode
103 | */
104 | public static void requestPermission(Context context, String permission, int requestCode) {
105 | ActivityCompat.requestPermissions((Activity) context, new String[]{permission}, requestCode);
106 | }
107 |
108 | /**
109 | * 请求多个权限
110 | * @param context
111 | * @param permissions
112 | * @param requestCode
113 | */
114 | public static void requestPermissions(Context context, String[] permissions, int requestCode) {
115 | ActivityCompat.requestPermissions((Activity) context, permissions, requestCode);
116 | }
117 |
118 |
119 | /**
120 | * 申请权限复合接口
121 | * 检查权限 -> 申请权限 -> 被拒绝则打开对话窗口
122 | * @param context
123 | * @param permissions
124 | * @param requestCode
125 | * @param hint
126 | * @return
127 | */
128 | public static boolean checkPermissionsAndRequest(Context context, String[] permissions, int requestCode, String hint) {
129 | List notGrantPermissions = PermisstionUtil.isPermissionsAllGranted(context, permissions);
130 | if(notGrantPermissions.isEmpty()){
131 | return true;
132 | }
133 | if (PermisstionUtil.judgePermission(context, permissions)) {
134 | PermisstionUtil.showPermissionAlterDialog(context, hint);
135 | } else {
136 | PermisstionUtil.requestPermissions(context, permissions, requestCode);
137 | }
138 | return false;
139 | }
140 |
141 | public static void showPermissionAlterDialog(final Context context, String hint) {
142 | new AlertDialog.Builder(context)
143 | .setTitle("提示")
144 | .setMessage(hint)
145 | .setNegativeButton("取消", new DialogInterface.OnClickListener() {
146 | @Override
147 | public void onClick(DialogInterface dialog, int which) {
148 | dialog.dismiss();
149 | }
150 | })
151 | .setPositiveButton("设置", new DialogInterface.OnClickListener() {
152 | @Override
153 | public void onClick(DialogInterface dialog, int which) {
154 | //前往应用详情界面
155 | try {
156 | Uri packUri = Uri.parse("package:" + context.getPackageName());
157 | Intent intent = new Intent(android.provider.Settings.ACTION_APPLICATION_DETAILS_SETTINGS, packUri);
158 | intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
159 | context.startActivity(intent);
160 | } catch (Exception e) {
161 | Toast.makeText(context, "跳转失败", Toast.LENGTH_SHORT).show();
162 | }
163 | dialog.dismiss();
164 | }
165 | }).create().show();
166 | }
167 | static {
168 | if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
169 | CALENDAR = new String[]{};
170 | CAMERA = new String[]{};
171 | CONTACTS = new String[]{};
172 | LOCATION = new String[]{};
173 | MICROPHONE = new String[]{};
174 | PHONE = new String[]{};
175 | SENSORS = new String[]{};
176 | SMS = new String[]{};
177 | STORAGE = new String[]{};
178 | } else {
179 | CALENDAR = new String[]{
180 | Manifest.permission.READ_CALENDAR,
181 | Manifest.permission.WRITE_CALENDAR};
182 |
183 | CAMERA = new String[]{
184 | Manifest.permission.CAMERA};
185 |
186 | CONTACTS = new String[]{
187 | Manifest.permission.READ_CONTACTS,
188 | Manifest.permission.WRITE_CONTACTS,
189 | Manifest.permission.GET_ACCOUNTS};
190 |
191 | LOCATION = new String[]{
192 | Manifest.permission.ACCESS_FINE_LOCATION,
193 | Manifest.permission.ACCESS_COARSE_LOCATION};
194 |
195 | MICROPHONE = new String[]{
196 | Manifest.permission.RECORD_AUDIO};
197 |
198 | PHONE = new String[]{
199 | Manifest.permission.READ_PHONE_STATE,
200 | Manifest.permission.CALL_PHONE,
201 | Manifest.permission.READ_CALL_LOG,
202 | Manifest.permission.WRITE_CALL_LOG,
203 | Manifest.permission.USE_SIP,
204 | Manifest.permission.PROCESS_OUTGOING_CALLS};
205 |
206 | SENSORS = new String[]{
207 | Manifest.permission.BODY_SENSORS};
208 |
209 | SMS = new String[]{
210 | Manifest.permission.SEND_SMS,
211 | Manifest.permission.RECEIVE_SMS,
212 | Manifest.permission.READ_SMS,
213 | Manifest.permission.RECEIVE_WAP_PUSH,
214 | Manifest.permission.RECEIVE_MMS};
215 |
216 | STORAGE = new String[]{
217 | Manifest.permission.READ_EXTERNAL_STORAGE,
218 | Manifest.permission.WRITE_EXTERNAL_STORAGE};
219 | }
220 | }
221 |
222 | }
223 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/util/StorageUtil.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.util;
2 |
3 | import android.os.Environment;
4 |
5 | import java.io.File;
6 |
7 | /**
8 | * Created By Chengjunsen on 2018/9/5
9 | */
10 | public class StorageUtil {
11 | public static String getDirName() {
12 | return "SCamera";
13 | }
14 |
15 | public static String getSDPath() {
16 | // 判断是否挂载
17 | if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
18 | return Environment.getExternalStorageDirectory().getAbsolutePath();
19 | }
20 | return Environment.getRootDirectory().getAbsolutePath();
21 | }
22 |
23 | public static String getImagePath() {
24 | String path = getSDPath() + "/" + getDirName() + "/image/";
25 | checkDirExist(path);
26 | return path;
27 | }
28 |
29 | public static String getVedioPath(boolean checkDirExist) {
30 | String path = getSDPath() + "/" + getDirName() + "/video/";
31 | checkDirExist(path);
32 | return path;
33 | }
34 |
35 | public static boolean checkDirExist(String path) {
36 | File mDir = new File(path);
37 | if (!mDir.exists()) {
38 | return mDir.mkdirs();
39 | }
40 | return true;
41 | }
42 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/view/CameraFragment.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.view;
2 |
3 | import android.annotation.SuppressLint;
4 | import android.content.Context;
5 | import android.content.pm.PackageManager;
6 | import android.graphics.Bitmap;
7 | import android.graphics.BitmapFactory;
8 | import android.graphics.Point;
9 | import android.graphics.SurfaceTexture;
10 | import android.hardware.Camera;
11 | import android.os.Bundle;
12 | import android.os.Handler;
13 | import android.os.Looper;
14 | import android.os.Message;
15 | import android.support.annotation.NonNull;
16 | import android.support.annotation.Nullable;
17 | import android.support.v4.app.Fragment;
18 | import android.util.Log;
19 | import android.util.Size;
20 | import android.view.LayoutInflater;
21 | import android.view.MotionEvent;
22 | import android.view.View;
23 | import android.view.ViewGroup;
24 | import android.widget.ImageView;
25 |
26 | import com.jscheng.scamera.R;
27 | import com.jscheng.scamera.util.CameraUtil;
28 | import com.jscheng.scamera.util.PermisstionUtil;
29 | import com.jscheng.scamera.widget.CameraFocusView;
30 | import com.jscheng.scamera.widget.CameraGLSurfaceView;
31 | import com.jscheng.scamera.widget.CameraProgressButton;
32 | import com.jscheng.scamera.widget.CameraSwitchView;
33 |
34 | import java.nio.ByteBuffer;
35 | import java.util.concurrent.ExecutorService;
36 | import java.util.concurrent.Executors;
37 |
38 | import static com.jscheng.scamera.util.LogUtil.TAG;
39 |
40 | /**
41 | * Created By Chengjunsen on 2018/8/22
42 | */
43 | public class CameraFragment extends Fragment implements CameraProgressButton.Listener, CameraGLSurfaceView.CameraGLSurfaceViewCallback, CameraSensor.CameraSensorListener{
44 | private final static int REQUEST_CODE = 1;
45 | private final static int MSG_START_PREVIEW = 1;
46 | private final static int MSG_SWITCH_CAMERA = 2;
47 | private final static int MSG_RELEASE_PREVIEW = 3;
48 | private final static int MSG_MANUAL_FOCUS = 4;
49 | private final static int MSG_ROCK = 5;
50 |
51 | private CameraGLSurfaceView mCameraView;
52 | private CameraSensor mCameraSensor;
53 | private CameraProgressButton mProgressBtn;
54 | private CameraFocusView mFocusView;
55 | private CameraSwitchView mSwitchView;
56 | private boolean isFocusing;
57 | private Size mPreviewSize;
58 | private Handler mCameraHanlder;
59 |
60 | @Override
61 | public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
62 | View contentView = inflater.inflate(R.layout.fragment_camera, container, false);
63 | initCameraHandler();
64 | initView(contentView);
65 | return contentView;
66 | }
67 |
68 | private void initView(View contentView) {
69 | isFocusing = false;
70 | mPreviewSize = null;
71 | mCameraView = contentView.findViewById(R.id.camera_view);
72 | mProgressBtn = contentView.findViewById(R.id.progress_btn);
73 | mFocusView = contentView.findViewById(R.id.focus_view);
74 | mSwitchView = contentView.findViewById(R.id.switch_view);
75 |
76 | mCameraSensor = new CameraSensor(getContext());
77 | mCameraSensor.setCameraSensorListener(this);
78 | mProgressBtn.setProgressListener(this);
79 |
80 | mCameraView.setCallback(this);
81 | mCameraView.setOnTouchListener(new View.OnTouchListener() {
82 | @Override
83 | public boolean onTouch(View view, MotionEvent event) {
84 | if (event.getAction() == MotionEvent.ACTION_DOWN) {
85 | focus((int)event.getX(), (int)event.getY(), false);
86 | return true;
87 | }
88 | return false;
89 | }
90 | });
91 | mSwitchView.setOnClickListener(new View.OnClickListener(){
92 | @Override
93 | public void onClick(View view) {
94 | mCameraHanlder.sendEmptyMessage(MSG_SWITCH_CAMERA);
95 | }
96 | });
97 | }
98 |
99 | private void initCameraHandler() {
100 | mCameraHanlder = new Handler(Looper.getMainLooper()) {
101 | @Override
102 | public void handleMessage(Message msg) {
103 | switch (msg.what) {
104 | case MSG_START_PREVIEW:
105 | startPreview();
106 | break;
107 | case MSG_RELEASE_PREVIEW:
108 | releasePreview();
109 | break;
110 | case MSG_SWITCH_CAMERA:
111 | switchCamera();
112 | break;
113 | case MSG_MANUAL_FOCUS:
114 | manualFocus(msg.arg1, msg.arg2);
115 | break;
116 | case MSG_ROCK:
117 | autoFocus();
118 | break;
119 | default:
120 | break;
121 | }
122 | }
123 | };
124 | }
125 |
126 | @Override
127 | public void onAttach(Context context) {
128 | super.onAttach(context);
129 | }
130 |
131 | @Override
132 | public void onDetach() {
133 | mCameraHanlder.sendEmptyMessage(MSG_RELEASE_PREVIEW);
134 | super.onDetach();
135 | }
136 |
137 | @Override
138 | public void onSurfaceViewCreate(SurfaceTexture texture) {
139 |
140 | }
141 |
142 | @Override
143 | public void onSurfaceViewChange(int width, int height) {
144 | Log.e(TAG, "surfaceChanged: ( " + width +" x " + height +" )");
145 | mPreviewSize = new Size(width, height);
146 | mCameraHanlder.sendEmptyMessage(MSG_START_PREVIEW);
147 | }
148 |
149 | public void startPreview() {
150 | if (mPreviewSize != null && requestPermission() ) {
151 | if (CameraUtil.getCamera() == null) {
152 | CameraUtil.openCamera();
153 | Log.e(TAG, "openCamera" );
154 | CameraUtil.setDisplay(mCameraView.getSurfaceTexture());
155 | }
156 | CameraUtil.startPreview(getActivity(), mPreviewSize.getWidth(), mPreviewSize.getHeight());
157 | mCameraSensor.start();
158 | mSwitchView.setOrientation(mCameraSensor.getX(), mCameraSensor.getY(), mCameraSensor.getZ());
159 | }
160 | }
161 |
162 | public void releasePreview() {
163 | CameraUtil.releaseCamera();
164 | mCameraSensor.stop();
165 | mFocusView.cancelFocus();
166 | Log.e(TAG, "releasePreview releaseCamera" );
167 | }
168 |
169 | public void switchCamera() {
170 | mFocusView.cancelFocus();
171 | if (CameraUtil.getCamera() != null && mPreviewSize != null) {
172 | mCameraView.releaseSurfaceTexture();
173 | CameraUtil.releaseCamera();
174 | CameraUtil.switchCameraId();
175 | CameraUtil.openCamera();
176 | mCameraView.resumeSurfaceTexture();
177 | CameraUtil.setDisplay(mCameraView.getSurfaceTexture());
178 | CameraUtil.startPreview(getActivity(), mPreviewSize.getWidth(), mPreviewSize.getHeight());
179 | }
180 | }
181 |
182 | public void autoFocus() {
183 | if (CameraUtil.isBackCamera() && CameraUtil.getCamera() != null) {
184 | focus(mCameraView.getWidth() / 2, mCameraView.getHeight() / 2, true);
185 | }
186 | mSwitchView.setOrientation(mCameraSensor.getX(), mCameraSensor.getY(), mCameraSensor.getZ());
187 | }
188 |
189 | public void manualFocus(int x, int y) {
190 | focus(x, y, false);
191 | }
192 |
193 | private void focus(final int x, final int y, final boolean isAutoFocus) {
194 | if (CameraUtil.getCamera() == null || !CameraUtil.isBackCamera()) {
195 | return;
196 | }
197 | if (isFocusing && isAutoFocus) {
198 | return;
199 | }
200 | isFocusing = true;
201 | Point focusPoint = new Point(x, y);
202 | Size screenSize = new Size(mCameraView.getWidth(), mCameraView.getHeight());
203 | if (!isAutoFocus) {
204 | mFocusView.beginFocus(x, y);
205 | }
206 | CameraUtil.newCameraFocus(focusPoint, screenSize, new Camera.AutoFocusCallback() {
207 | @Override
208 | public void onAutoFocus(boolean success, Camera camera) {
209 | isFocusing = false;
210 | if (!isAutoFocus) {
211 | mFocusView.endFocus(success);
212 | }
213 | }
214 | });
215 | }
216 |
217 | @Override
218 | public void onPause() {
219 | super.onPause();
220 | releasePreview();
221 | }
222 |
223 | @Override
224 | public void onResume() {
225 | super.onResume();
226 | startPreview();
227 | }
228 |
229 | @Override
230 | public void onShortPress() {
231 | if (requestPermission()) {
232 | takePicture();
233 | }
234 | }
235 |
236 | @SuppressLint("NewApi")
237 | private void takePicture() {
238 |
239 | }
240 |
241 | @Override
242 | public void onStartLongPress() {
243 | if (requestPermission()) {
244 | mCameraView.startRecord();
245 | }
246 | }
247 |
248 | @Override
249 | public void onEndLongPress() {
250 | mCameraView.stopRecord();
251 | }
252 |
253 | @Override
254 | public void onEndMaxProgress() {
255 | }
256 |
257 | @Override
258 | public void onRock() {
259 | mCameraHanlder.sendEmptyMessage(MSG_ROCK);
260 | }
261 |
262 | private boolean requestPermission() {
263 | return PermisstionUtil.checkPermissionsAndRequest(getContext(), PermisstionUtil.CAMERA, REQUEST_CODE, "请求相机权限被拒绝")
264 | && PermisstionUtil.checkPermissionsAndRequest(getContext(), PermisstionUtil.STORAGE, REQUEST_CODE, "请求访问SD卡权限被拒绝");
265 | }
266 |
267 | @Override
268 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
269 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
270 | if (requestCode == requestCode ) {
271 | mCameraHanlder.sendEmptyMessage(MSG_START_PREVIEW);
272 | }
273 | }
274 | }
275 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/view/CameraSensor.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.view;
2 |
3 | import android.content.Context;
4 | import android.hardware.Sensor;
5 | import android.hardware.SensorEvent;
6 | import android.hardware.SensorEventListener;
7 | import android.hardware.SensorManager;
8 |
9 | /**
10 | * Created By Chengjunsen on 2018/8/24
11 | */
12 | public class CameraSensor implements SensorEventListener{
13 | private SensorManager mSensorManager;
14 | private Sensor mSensor;
15 | private int lastX, lastY, lastZ;
16 | private CameraSensorListener mCameraSensorListener;
17 |
18 | public CameraSensor(Context context) {
19 | mSensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
20 | mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
21 | mCameraSensorListener = null;
22 | reset();
23 | }
24 | /**
25 | * 方向改变时会调用
26 | */
27 | @Override
28 | public void onSensorChanged(SensorEvent sensorEvent) {
29 | if (sensorEvent.sensor == null) {
30 | return;
31 | }
32 | if (sensorEvent.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
33 | int x = (int) sensorEvent.values[0];
34 | int y = (int) sensorEvent.values[1];
35 | int z = (int) sensorEvent.values[2];
36 | int px = Math.abs(lastX - x);
37 | int py = Math.abs(lastY - y);
38 | int pz = Math.abs(lastZ - z);
39 | lastX = x;
40 | lastY = y;
41 | lastZ = z;
42 |
43 | if (px > 2.5 || py > 2.5 || pz > 2.5) {
44 | if (mCameraSensorListener != null) {
45 | mCameraSensorListener.onRock();
46 | }
47 | }
48 | }
49 | }
50 |
51 | /**
52 | * 精度改变会调用
53 | */
54 | @Override
55 | public void onAccuracyChanged(Sensor sensor, int i) {
56 |
57 | }
58 |
59 | public void start() {
60 | // samplingPeriodUs:指定获取传感器频率, SensorManager.SENSOR_DELAY_NORMAL:正常频率
61 | mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_NORMAL);
62 | reset();
63 | }
64 |
65 | public void stop() {
66 | mSensorManager.unregisterListener(this, mSensor);
67 | }
68 |
69 | private void reset() {
70 | lastX = 0;
71 | lastY = 0;
72 | lastZ = 0;
73 | }
74 |
75 | public void setCameraSensorListener(CameraSensorListener listener) {
76 | mCameraSensorListener = listener;
77 | }
78 |
79 | public int getX() {
80 | return lastX;
81 | }
82 |
83 | public int getY() {
84 | return lastY;
85 | }
86 |
87 | public int getZ() {
88 | return lastZ;
89 | }
90 |
91 | public interface CameraSensorListener {
92 | void onRock();
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/view/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.view;
2 |
3 | import android.support.v4.app.FragmentTransaction;
4 | import android.support.v7.app.AppCompatActivity;
5 | import android.os.Bundle;
6 | import android.view.Window;
7 | import android.view.WindowManager;
8 |
9 | import com.jscheng.scamera.BaseActivity;
10 | import com.jscheng.scamera.R;
11 |
12 | /**
13 | * Created By Chengjunsen on 2018/8/22
14 | */
15 | public class MainActivity extends BaseActivity {
16 | private CameraFragment mCameraFragment;
17 | @Override
18 | protected void onCreate(Bundle savedInstanceState) {
19 | super.onCreate(savedInstanceState);
20 | requestWindowFeature(Window.FEATURE_NO_TITLE);
21 | getWindow().setFlags(WindowManager.LayoutParams. FLAG_FULLSCREEN, WindowManager.LayoutParams. FLAG_FULLSCREEN);
22 | setContentView(R.layout.activity_main);
23 |
24 | mCameraFragment = new CameraFragment();
25 | FragmentTransaction transaction = getSupportFragmentManager().beginTransaction();
26 | transaction.add(R.id.main_container, mCameraFragment);
27 | transaction.commit();
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/widget/CameraFocusView.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.widget;
2 |
3 | import android.content.Context;
4 | import android.content.res.TypedArray;
5 | import android.graphics.Canvas;
6 | import android.graphics.Color;
7 | import android.graphics.Paint;
8 | import android.os.Handler;
9 | import android.os.Looper;
10 | import android.support.annotation.Nullable;
11 | import android.util.AttributeSet;
12 | import android.view.View;
13 |
14 | import com.jscheng.scamera.R;
15 |
16 | /**
17 | * Created By Chengjunsen on 2018/8/24
18 | * 相机对焦view
19 | */
20 | public class CameraFocusView extends View {
21 | private int mStrokeWidth;
22 | private int prepareColor;
23 | private int finishColor;
24 | private int mPaintColor;
25 | private boolean isFocusing;
26 | private Paint mPaint;
27 | private int mDuration;
28 | private Handler mDurationHandler;
29 |
30 | public CameraFocusView(Context context) {
31 | super(context);
32 | init(context, null);
33 | }
34 |
35 | public CameraFocusView(Context context, @Nullable AttributeSet attrs) {
36 | super(context, attrs);
37 | init(context, attrs);
38 | }
39 |
40 | public CameraFocusView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
41 | super(context, attrs, defStyleAttr);
42 | init(context, attrs);
43 | }
44 |
45 | private void init(Context context, @Nullable AttributeSet attrs) {
46 | TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.camera_focus_view);
47 | this.mStrokeWidth = (int) typedArray.getDimension(R.styleable.camera_focus_view_stroke_width, 5);
48 | this.prepareColor = typedArray.getColor(R.styleable.camera_focus_view_prepare_color, Color.RED);
49 | this.finishColor = typedArray.getColor(R.styleable.camera_focus_view_finish_color, Color.YELLOW);
50 | this.mPaint = new Paint();
51 | this.mPaintColor = prepareColor;
52 | this.mDuration = 1000;
53 | this.mDurationHandler = new Handler(Looper.getMainLooper());
54 | this.setVisibility(GONE);
55 | }
56 |
57 | public void beginFocus(int centerX, int centerY) {
58 | mPaintColor = prepareColor;
59 | isFocusing = true;
60 | int x = centerX - getMeasuredWidth()/2;
61 | int y = centerY - getMeasuredHeight()/2;
62 | setX(x);
63 | setY(y);
64 | setVisibility(VISIBLE);
65 | invalidate();
66 | }
67 |
68 | public void endFocus(boolean isSuccess) {
69 | isFocusing = false;
70 | if (isSuccess) {
71 | mPaintColor = finishColor;
72 | mDurationHandler.postDelayed(new Runnable() {
73 | @Override
74 | public void run() {
75 | if (!isFocusing) {
76 | setVisibility(GONE);
77 | }
78 | }
79 | }, mDuration);
80 | invalidate();
81 | } else {
82 | setVisibility(GONE);
83 | }
84 | }
85 | public void cancelFocus() {
86 | isFocusing = false;
87 | setVisibility(GONE);
88 | }
89 | public void setDuration(int duration) {
90 | mDuration = duration;
91 | }
92 |
93 | @Override
94 | protected void onDraw(Canvas canvas) {
95 | super.onDraw(canvas);
96 | int width = getMeasuredWidth();
97 | int height = getMeasuredHeight();
98 | mPaint.setStrokeWidth(mStrokeWidth);
99 | mPaint.setAntiAlias(true);
100 | mPaint.setColor(mPaintColor);
101 | mPaint.setStyle(Paint.Style.FILL);
102 | canvas.drawLine(0, 0, width/3, 0, mPaint);
103 | canvas.drawLine(width*2/3, 0, width, 0, mPaint);
104 | canvas.drawLine(0, height, width/3, height, mPaint);
105 | canvas.drawLine(width*2/3, height, width, height, mPaint);
106 |
107 | canvas.drawLine(0, 0, 0, height/3, mPaint);
108 | canvas.drawLine(0, height*2/3, 0, height, mPaint);
109 | canvas.drawLine(width, 0, width, height/3, mPaint);
110 | canvas.drawLine(width, height*2/3, width, height, mPaint);
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/widget/CameraGLSurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.widget;
2 |
3 | import android.content.Context;
4 | import android.graphics.SurfaceTexture;
5 | import android.opengl.EGL14;
6 | import android.opengl.GLSurfaceView;
7 | import android.util.AttributeSet;
8 | import com.jscheng.scamera.render.CameraSurfaceRender;
9 | import java.nio.ByteBuffer;
10 |
11 | /**
12 | * Created By Chengjunsen on 2018/8/25
13 | */
14 | public class CameraGLSurfaceView extends GLSurfaceView implements CameraSurfaceRender.CameraSufaceRenderCallback{
15 | private CameraSurfaceRender mRender;
16 | private CameraGLSurfaceViewCallback mCallback;
17 |
18 | public CameraGLSurfaceView(Context context) {
19 | super(context, null);
20 | }
21 |
22 | public CameraGLSurfaceView(Context context, AttributeSet attrs) {
23 | super(context, attrs);
24 | init(context);
25 | }
26 |
27 | private void init(Context context) {
28 | setEGLContextClientVersion(3);
29 | setDebugFlags(GLSurfaceView.DEBUG_CHECK_GL_ERROR);
30 | mRender = new CameraSurfaceRender(context);
31 | mRender.setCallback(this);
32 | this.setRenderer(mRender);
33 | this.setRenderMode(RENDERMODE_WHEN_DIRTY);
34 | }
35 |
36 | public SurfaceTexture getSurfaceTexture() {
37 | return mRender.getCameraSurfaceTexture();
38 | }
39 |
40 | @Override
41 | public void onRequestRender() {
42 | requestRender();
43 | }
44 |
45 | @Override
46 | public void onCreate() {
47 | if (mCallback != null) {
48 | mCallback.onSurfaceViewCreate(getSurfaceTexture());
49 | }
50 | }
51 |
52 | @Override
53 | public void onChanged(int width, int height) {
54 | if (mCallback != null) {
55 | mCallback.onSurfaceViewChange(width, height);
56 | }
57 | }
58 |
59 | @Override
60 | public void onDraw() {
61 |
62 | }
63 |
64 | public void setCallback(CameraGLSurfaceViewCallback mCallback) {
65 | this.mCallback = mCallback;
66 | }
67 |
68 | public void releaseSurfaceTexture() {
69 | mRender.releaseSurfaceTexture();
70 | }
71 |
72 | public void resumeSurfaceTexture() {
73 | mRender.resumeSurfaceTexture();
74 | }
75 |
76 | public void startRecord() {
77 | mRender.startRecord();
78 | }
79 |
80 | public void stopRecord() {
81 | mRender.stopRecord();
82 | }
83 |
84 | public interface CameraGLSurfaceViewCallback {
85 | void onSurfaceViewCreate(SurfaceTexture texture);
86 | void onSurfaceViewChange(int width, int height);
87 | }
88 | }
89 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/widget/CameraProgressButton.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.widget;
2 |
3 | import android.content.Context;
4 | import android.content.res.TypedArray;
5 | import android.graphics.Canvas;
6 | import android.graphics.Color;
7 | import android.graphics.Paint;
8 | import android.graphics.RectF;
9 | import android.os.Handler;
10 | import android.os.Looper;
11 | import android.os.Message;
12 | import android.support.annotation.Nullable;
13 | import android.util.AttributeSet;
14 | import android.view.MotionEvent;
15 | import android.view.View;
16 |
17 | import com.jscheng.scamera.R;
18 |
19 | /**
20 | * Created By Chengjunsen on 2018/8/22
21 | */
22 | public class CameraProgressButton extends View{
23 | private int mMaxProgress = 10000; // 默认10s
24 | private Paint mBgPaint;
25 | private Paint mStrokePaint;
26 | private RectF mRectF;
27 | private int progress;
28 | private int mCircleColor;
29 | private int mCircleLineColor;
30 | private Handler mTouchHandler;
31 | private Listener mListener;
32 |
33 | public CameraProgressButton(Context context) {
34 | super(context);
35 | init(context, null);
36 | }
37 |
38 | public CameraProgressButton(Context context, @Nullable AttributeSet attrs) {
39 | super(context, attrs);
40 | init(context, attrs);
41 | }
42 |
43 | private void init(Context context,@Nullable AttributeSet attrs) {
44 | TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.camera_progress_button);
45 | mCircleColor = typedArray.getColor(R.styleable.camera_progress_button_circle, Color.RED);
46 | mCircleLineColor = typedArray.getColor(R.styleable.camera_progress_button_circle_line, Color.BLACK);
47 |
48 | mStrokePaint = new Paint();
49 | mBgPaint = new Paint();
50 | mRectF = new RectF();
51 | progress = 0;
52 | mTouchHandler = new InnerTouchHandler();
53 | mListener = null;
54 | }
55 |
56 | @Override
57 | protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
58 | super.onLayout(changed, left, top, right, bottom);
59 | }
60 |
61 | @Override
62 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
63 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
64 | }
65 |
66 | @Override
67 | protected void onDraw(Canvas canvas) {
68 | int width = Math.min(getWidth(), getHeight());
69 | int radius = progress == 0 ? width/3 : width/2;
70 | int mStrokeWidth = width / 10;
71 |
72 | int centerX = width/2;
73 | int centerY = width/2;
74 |
75 | float progressSweepAngle = 0;
76 | if (progress > 0 && progress < mMaxProgress) {
77 | progressSweepAngle = ((float) progress / mMaxProgress) * 360;
78 | } else if (progress >= mMaxProgress) {
79 | progressSweepAngle = 360;
80 | }
81 |
82 | // 设置画笔相关属性
83 | mStrokePaint.setAntiAlias(true);
84 | mStrokePaint.setStrokeWidth(mStrokeWidth);
85 | mStrokePaint.setStyle(Paint.Style.STROKE);
86 | mStrokePaint.setColor(mCircleLineColor);
87 |
88 | // 位置
89 | mRectF.left = centerX - radius + mStrokeWidth/2;
90 | mRectF.top = centerY - radius + mStrokeWidth/2;
91 | mRectF.right = centerX + radius - mStrokeWidth/2;
92 | mRectF.bottom = centerY + radius - mStrokeWidth/2;
93 |
94 | // 实心圆
95 | mBgPaint.setAntiAlias(true);
96 | mBgPaint.setStrokeWidth(mStrokeWidth);
97 | mBgPaint.setStyle(Paint.Style.FILL);
98 | mBgPaint.setColor(mCircleColor);
99 |
100 | canvas.drawCircle(centerX, centerY, radius - mStrokeWidth, mBgPaint);
101 | canvas.drawArc(mRectF, -90, progressSweepAngle, false, mStrokePaint);
102 | }
103 |
104 | public void setMaxProgress(int maxProgress) {
105 | this.mMaxProgress = maxProgress;
106 | }
107 |
108 | public void setProgress(int progress) {
109 | this.progress = progress > mMaxProgress ? mMaxProgress : progress;
110 | invalidate();
111 | }
112 |
113 | public int getProgress() {
114 | return progress;
115 | }
116 |
117 | @Override
118 | public boolean onTouchEvent(MotionEvent event) {
119 | switch (event.getAction()) {
120 | case MotionEvent.ACTION_DOWN:
121 | mTouchHandler.sendEmptyMessage(InnerTouchHandler.ACTION_DOWN);
122 | return true;
123 | case MotionEvent.ACTION_UP:
124 | mTouchHandler.sendEmptyMessage(InnerTouchHandler.ACTION_UP);
125 | return true;
126 | default:
127 | return super.onTouchEvent(event);
128 | }
129 | }
130 |
131 | public interface Listener {
132 | void onShortPress();
133 | void onStartLongPress();
134 | void onEndLongPress();
135 | void onEndMaxProgress();
136 | }
137 |
138 | public void setProgressListener(Listener mListener) {
139 | this.mListener = mListener;
140 | }
141 |
142 | private class InnerTouchHandler extends Handler {
143 | public static final int ACTION_DOWN = 1;
144 | public static final int ACTION_UP = 2;
145 | public static final int SCHEDULE_PRESSING = 3;
146 | public int LONG_PRESS_DURATION = 300;
147 | public int EACH_DURATION = 100;
148 |
149 | private boolean isPress = false;
150 | public InnerTouchHandler() {
151 | super(Looper.getMainLooper());
152 | }
153 |
154 | @Override
155 | public void handleMessage(Message msg) {
156 | switch (msg.what) {
157 | case ACTION_DOWN:
158 | if (!isPress) {
159 | isPress = true;
160 | sendEmptyMessageDelayed(SCHEDULE_PRESSING, LONG_PRESS_DURATION);
161 | }
162 | break;
163 | case ACTION_UP:
164 | if (isPress) {
165 | if (mListener != null) {
166 | if (getProgress() == 0) {
167 | mListener.onShortPress();
168 | } else {
169 | mListener.onEndLongPress();
170 | }
171 | }
172 | isPress = false;
173 | setProgress(0);
174 | }
175 | break;
176 | case SCHEDULE_PRESSING:
177 | if (isPress) {
178 | int endProgress = getProgress() + EACH_DURATION;
179 | if (mListener != null) {
180 | if (getProgress() == 0) {
181 | mListener.onStartLongPress();
182 | } else if (endProgress >= mMaxProgress){
183 | mListener.onEndMaxProgress();
184 | }
185 | }
186 | setProgress(endProgress);
187 | sendEmptyMessageDelayed(SCHEDULE_PRESSING, EACH_DURATION);
188 | }
189 | break;
190 | default:
191 | break;
192 | }
193 | }
194 | }
195 | }
196 |
--------------------------------------------------------------------------------
/app/src/main/java/com/jscheng/scamera/widget/CameraSwitchView.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera.widget;
2 |
3 | import android.content.Context;
4 | import android.support.annotation.Nullable;
5 | import android.util.AttributeSet;
6 | import android.util.Log;
7 |
8 | /**
9 | * Created By Chengjunsen on 2018/8/24
10 | */
11 | public class CameraSwitchView extends android.support.v7.widget.AppCompatImageView {
12 | private static final int ORIENTATION_UP = 0;
13 | private static final int ORIENTATION_BOTTOM = 180;
14 | private static final int ORIENTATION_LEFT = 90;
15 | private static final int ORIENTATION_RIGHT = 270;
16 | private Context mContext;
17 | public CameraSwitchView(Context context) {
18 | super(context);
19 | init(context);
20 | }
21 |
22 | public CameraSwitchView(Context context, @Nullable AttributeSet attrs) {
23 | super(context, attrs);
24 | init(context);
25 | }
26 |
27 | public CameraSwitchView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
28 | super(context, attrs, defStyleAttr);
29 | init(context);
30 | }
31 |
32 | private void init(Context context) {
33 | mContext = context;
34 | }
35 |
36 | public void setOrientation(int x, int y, int z) {
37 | int type = getOrientationPosition(x, y, z);
38 | this.setRotation(type);
39 | }
40 |
41 | private int getOrientationPosition(int x, int y, int z) {
42 | if (Math.abs(x) > Math.abs(y)) { //横屏倾斜
43 | if (x > 4) { //左边倾斜
44 | return ORIENTATION_LEFT;
45 | } else if (x < -4) { //右边倾斜
46 | return ORIENTATION_RIGHT;
47 | } else {
48 | return ORIENTATION_UP;
49 | }
50 | } else {
51 | if (y > 7) { // 左边倾斜
52 | return ORIENTATION_UP;
53 | } else if (y < -7) { //右边倾斜
54 | return ORIENTATION_BOTTOM;
55 | } else {
56 | return ORIENTATION_UP;
57 | }
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
9 |
10 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
12 |
13 |
21 |
22 |
33 |
34 |
39 |
40 |
45 |
46 |
51 |
52 |
57 |
58 |
62 |
63 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/camera_close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxhdpi/camera_close.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/camera_flash_close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxhdpi/camera_flash_close.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/camera_flash_open.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxhdpi/camera_flash_open.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/camera_ok.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxhdpi/camera_ok.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/camera_switch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxhdpi/camera_switch.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/watermark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxhdpi/watermark.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values/attrs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | SCamera
3 |
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/app/src/test/java/com/jscheng/scamera/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package com.jscheng.scamera;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 |
5 | repositories {
6 | google()
7 | jcenter()
8 | }
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:3.1.2'
11 |
12 |
13 | // NOTE: Do not place your application dependencies here; they belong
14 | // in the individual module build.gradle files
15 | }
16 | }
17 |
18 | allprojects {
19 | repositories {
20 | google()
21 | jcenter()
22 | }
23 | }
24 |
25 | task clean(type: Delete) {
26 | delete rootProject.buildDir
27 | }
28 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChyengJason/FboCamera/e802bc19f12875da215be5035978354f3863a37e/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Aug 22 14:10:18 CST 2018
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------