├── app
├── .gitignore
├── src
│ └── main
│ │ ├── res
│ │ ├── values
│ │ │ ├── strings.xml
│ │ │ ├── colors.xml
│ │ │ └── styles.xml
│ │ ├── mipmap-hdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxxhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ └── layout
│ │ │ └── activity_main.xml
│ │ ├── java
│ │ └── io
│ │ │ └── github
│ │ │ └── junyuecao
│ │ │ └── croppedscreenrecorder
│ │ │ ├── RecordCallback.java
│ │ │ ├── gles
│ │ │ ├── OffscreenSurface.java
│ │ │ ├── FullFrameRect.java
│ │ │ ├── WindowSurface.java
│ │ │ ├── FlatShadedProgram.java
│ │ │ ├── Sprite2d.java
│ │ │ ├── EglSurfaceBase.java
│ │ │ ├── Drawable2d.java
│ │ │ ├── GlUtil.java
│ │ │ ├── GeneratedTexture.java
│ │ │ ├── EglCore.java
│ │ │ └── Texture2dProgram.java
│ │ │ ├── CroppedDrawable2d.java
│ │ │ ├── Utils.java
│ │ │ ├── MainFrameRect.java
│ │ │ ├── MainActivity.java
│ │ │ ├── ScreenCapture.java
│ │ │ ├── VideoEncoderCore.java
│ │ │ └── TextureMovieEncoder.java
│ │ └── AndroidManifest.xml
├── proguard-rules.pro
└── build.gradle
├── settings.gradle
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── .gitignore
├── README.md
├── gradle.properties
├── gradlew.bat
└── gradlew
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
22 | * It's good practice to explicitly release() the surface, preferably from a "finally" block.
23 | */
24 | public class OffscreenSurface extends EglSurfaceBase {
25 | /**
26 | * Creates an off-screen surface with the specified width and height.
27 | */
28 | public OffscreenSurface(EglCore eglCore, int width, int height) {
29 | super(eglCore);
30 | createOffscreenSurface(width, height);
31 | }
32 |
33 | /**
34 | * Releases any resources associated with the surface.
35 | */
36 | public void release() {
37 | releaseEglSurface();
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
40 | * This must be called with the appropriate EGL context current (i.e. the one that was 41 | * current when the constructor was called). If we're about to destroy the EGL context, 42 | * there's no value in having the caller make it current just to do this cleanup, so you 43 | * can pass a flag that will tell this function to skip any EGL-context-specific cleanup. 44 | */ 45 | public void release(boolean doEglCleanup) { 46 | if (mProgram != null) { 47 | if (doEglCleanup) { 48 | mProgram.release(); 49 | } 50 | mProgram = null; 51 | } 52 | } 53 | 54 | /** 55 | * Returns the program currently in use. 56 | */ 57 | public Texture2dProgram getProgram() { 58 | return mProgram; 59 | } 60 | 61 | /** 62 | * Changes the program. The previous program will be released. 63 | *
64 | * The appropriate EGL context must be current. 65 | */ 66 | public void changeProgram(Texture2dProgram program) { 67 | mProgram.release(); 68 | mProgram = program; 69 | } 70 | 71 | /** 72 | * Creates a texture object suitable for use with drawFrame(). 73 | */ 74 | public int createTextureObject() { 75 | return mProgram.createTextureObject(); 76 | } 77 | 78 | /** 79 | * Draws a viewport-filling rect, texturing it with the specified texture object. 80 | */ 81 | public void drawFrame(int textureId, float[] texMatrix) { 82 | // Use the identity matrix for MVP so our 2x2 FULL_RECTANGLE covers the viewport. 83 | mProgram.draw(GlUtil.IDENTITY_MATRIX, mRectDrawable.getVertexArray(), 0, 84 | mRectDrawable.getVertexCount(), mRectDrawable.getCoordsPerVertex(), 85 | mRectDrawable.getVertexStride(), 86 | texMatrix, mRectDrawable.getTexCoordArray(), textureId, 87 | mRectDrawable.getTexCoordStride()); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/gles/WindowSurface.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2013 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.junyuecao.croppedscreenrecorder.gles; 18 | 19 | import android.graphics.SurfaceTexture; 20 | import android.view.Surface; 21 | 22 | /** 23 | * Recordable EGL window surface. 24 | *
25 | * It's good practice to explicitly release() the surface, preferably from a "finally" block. 26 | */ 27 | public class WindowSurface extends EglSurfaceBase { 28 | private Surface mSurface; 29 | private boolean mReleaseSurface; 30 | 31 | /** 32 | * Associates an EGL surface with the native window surface. 33 | *
34 | * Set releaseSurface to true if you want the Surface to be released when release() is 35 | * called. This is convenient, but can interfere with framework classes that expect to 36 | * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the 37 | * surfaceDestroyed() callback won't fire). 38 | */ 39 | public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) { 40 | super(eglCore); 41 | createWindowSurface(surface); 42 | mSurface = surface; 43 | mReleaseSurface = releaseSurface; 44 | } 45 | 46 | /** 47 | * Associates an EGL surface with the SurfaceTexture. 48 | */ 49 | public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) { 50 | super(eglCore); 51 | createWindowSurface(surfaceTexture); 52 | } 53 | 54 | /** 55 | * Releases any resources associated with the EGL surface (and, if configured to do so, 56 | * with the Surface as well). 57 | *
58 | * Does not require that the surface's EGL context be current. 59 | */ 60 | public void release() { 61 | releaseEglSurface(); 62 | if (mSurface != null) { 63 | if (mReleaseSurface) { 64 | mSurface.release(); 65 | } 66 | mSurface = null; 67 | } 68 | } 69 | 70 | /** 71 | * Recreate the EGLSurface, using the new EglBase. The caller should have already 72 | * freed the old EGLSurface with releaseEglSurface(). 73 | *
74 | * This is useful when we want to update the EGLSurface associated with a Surface. 75 | * For example, if we want to share with a different EGLContext, which can only 76 | * be done by tearing down and recreating the context. (That's handled by the caller; 77 | * this just creates a new EGLSurface for the Surface we were handed earlier.) 78 | *
79 | * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a 80 | * context somewhere, the create call will fail with complaints from the Surface 81 | * about already being connected. 82 | */ 83 | public void recreate(EglCore newEglCore) { 84 | if (mSurface == null) { 85 | throw new RuntimeException("not yet implemented for SurfaceTexture"); 86 | } 87 | mEglCore = newEglCore; // switch to new context 88 | createWindowSurface(mSurface); // create new surface 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/CroppedDrawable2d.java: -------------------------------------------------------------------------------- 1 | package io.github.junyuecao.croppedscreenrecorder; 2 | 3 | import io.github.junyuecao.croppedscreenrecorder.gles.Drawable2d; 4 | 5 | import java.nio.ByteBuffer; 6 | import java.nio.ByteOrder; 7 | import java.nio.FloatBuffer; 8 | 9 | 10 | /** 11 | * Tweaked version of Drawable2d that crop the texture coordinates. 12 | */ 13 | public class CroppedDrawable2d extends Drawable2d { 14 | private static final String TAG = "CroppedDrawable2d"; 15 | 16 | private static final int SIZEOF_FLOAT = 4; 17 | 18 | private FloatBuffer mTweakedTexCoordArray; 19 | private float mTopCropped = 0.0f; 20 | private float mBottomCropped = 1.0f; 21 | private boolean mRecalculate; 22 | 23 | /** 24 | * Trivial constructor. 25 | */ 26 | public CroppedDrawable2d(Prefab shape) { 27 | super(shape); 28 | mRecalculate = true; 29 | } 30 | 31 | public float getBottomCropped() { 32 | return mBottomCropped; 33 | } 34 | 35 | /** 36 | * @param bottomCropped defines the proportion to be cut on the top 37 | */ 38 | public void setBottomCropped(float bottomCropped) { 39 | if (bottomCropped < 0.0f || bottomCropped > 1.0f) { 40 | throw new RuntimeException("invalid crop " + bottomCropped); 41 | } 42 | mBottomCropped = bottomCropped; 43 | mRecalculate = true; 44 | } 45 | 46 | /** 47 | * @param crop defines the proportion to be cut on the top 48 | */ 49 | public void setTopCropped(float crop) { 50 | if (crop < 0.0f || crop > 1.0f) { 51 | throw new RuntimeException("invalid crop " + crop); 52 | } 53 | mTopCropped = crop; 54 | mRecalculate = true; 55 | } 56 | 57 | /** 58 | * Returns the array of texture coordinates. The first time this is called, we generate 59 | * a modified version of the array from the parent class. 60 | *
61 | * To avoid allocations, this returns internal state. The caller must not modify it. 62 | * 63 | * @see Drawable2d#FULL_RECTANGLE_TEX_COORDS 64 | */ 65 | @Override 66 | public FloatBuffer getTexCoordArray() { 67 | if (mRecalculate) { 68 | //Log.v(TAG, "Scaling to " + mScale); 69 | FloatBuffer parentBuf = super.getTexCoordArray(); 70 | int count = parentBuf.capacity(); 71 | 72 | if (mTweakedTexCoordArray == null) { 73 | ByteBuffer bb = ByteBuffer.allocateDirect(count * SIZEOF_FLOAT); 74 | bb.order(ByteOrder.nativeOrder()); 75 | mTweakedTexCoordArray = bb.asFloatBuffer(); 76 | } 77 | 78 | // Texture coordinates range from 0.0 to 1.0, inclusive. We do a simple scale 79 | // here, but we could get much fancier if we wanted to (say) zoom in and pan 80 | // around. 81 | FloatBuffer fb = mTweakedTexCoordArray; 82 | for (int i = 0; i < count; i++) { 83 | float fl = parentBuf.get(i); 84 | if (i == 0 || i == 4) { 85 | fl = 0.0f; 86 | } else if (i == 2 || i == 6) { 87 | fl = 1.0f; 88 | } else if (i == 1 || i == 3) { 89 | // Crop the bottom 90 | fl = mBottomCropped; 91 | } else if (i == 5 || i == 7) { 92 | // Crop the top 93 | fl = 1.0f - mTopCropped; 94 | } 95 | 96 | fb.put(i, fl); 97 | } 98 | 99 | mRecalculate = false; 100 | } 101 | 102 | return mTweakedTexCoordArray; 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/Utils.java: -------------------------------------------------------------------------------- 1 | package io.github.junyuecao.croppedscreenrecorder; 2 | 3 | import android.content.Context; 4 | import android.content.res.Configuration; 5 | import android.content.res.Resources; 6 | import android.graphics.Point; 7 | import android.os.Build; 8 | import android.util.DisplayMetrics; 9 | import android.view.Display; 10 | import android.view.KeyCharacterMap; 11 | import android.view.KeyEvent; 12 | import android.view.ViewConfiguration; 13 | import android.view.WindowManager; 14 | 15 | import java.lang.reflect.Method; 16 | 17 | public class Utils { 18 | /** 19 | * in pixels 20 | */ 21 | public static int getStatusBarHeight(Context context) { 22 | int result = 0; 23 | int resourceId = context.getResources().getIdentifier("status_bar_height", "dimen", "android"); 24 | if (resourceId > 0) { 25 | result = context.getResources().getDimensionPixelSize(resourceId); 26 | } 27 | return result; 28 | } 29 | 30 | /** 31 | * @param context context 32 | * @return pixels in float 33 | */ 34 | public static int getNavBarHeight(Context context) { 35 | int result = 0; 36 | boolean hasMenuKey = ViewConfiguration.get(context).hasPermanentMenuKey(); 37 | boolean hasBackKey = KeyCharacterMap.deviceHasKey(KeyEvent.KEYCODE_BACK); 38 | 39 | if(!hasMenuKey && !hasBackKey) { 40 | //The device has a navigation bar 41 | Resources resources = context.getResources(); 42 | 43 | int orientation = resources.getConfiguration().orientation; 44 | int resourceId; 45 | if (isTablet(context)){ 46 | resourceId = resources.getIdentifier(orientation == Configuration.ORIENTATION_PORTRAIT ? "navigation_bar_height" : "navigation_bar_height_landscape", "dimen", "android"); 47 | } else { 48 | resourceId = resources.getIdentifier(orientation == Configuration.ORIENTATION_PORTRAIT ? "navigation_bar_height" : "navigation_bar_width", "dimen", "android"); 49 | } 50 | 51 | if (resourceId > 0) { 52 | return resources.getDimensionPixelSize(resourceId); 53 | } 54 | } 55 | return result; 56 | } 57 | 58 | private static boolean isTablet(Context c) { 59 | return (c.getResources().getConfiguration().screenLayout 60 | & Configuration.SCREENLAYOUT_SIZE_MASK) 61 | >= Configuration.SCREENLAYOUT_SIZE_LARGE; 62 | } 63 | 64 | public static int getRealHeight(Context context) { 65 | final DisplayMetrics metrics = new DisplayMetrics(); 66 | WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); 67 | Display display = wm.getDefaultDisplay(); 68 | Method mGetRawH = null; 69 | 70 | int realHeight; 71 | // For JellyBeans and onward 72 | if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { 73 | display.getRealMetrics(metrics); 74 | realHeight = metrics.heightPixels; 75 | } else { 76 | try { 77 | // Below Jellybeans you can use reflection method 78 | mGetRawH = Display.class.getMethod("getRawHeight"); 79 | realHeight = (Integer) mGetRawH.invoke(display); 80 | } catch (Exception e){ 81 | display.getMetrics(metrics); 82 | realHeight = metrics.heightPixels; 83 | } 84 | } 85 | 86 | return realHeight; 87 | } 88 | 89 | public static int getScreenWidth(Context context) { 90 | if (context != null) { 91 | WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); 92 | Point point = new Point(); 93 | if (wm != null) { 94 | wm.getDefaultDisplay().getSize(point); 95 | return point.x; 96 | } 97 | } 98 | return 320; 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/MainFrameRect.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.junyuecao.croppedscreenrecorder; 18 | 19 | import io.github.junyuecao.croppedscreenrecorder.gles.Drawable2d; 20 | import io.github.junyuecao.croppedscreenrecorder.gles.GlUtil; 21 | import io.github.junyuecao.croppedscreenrecorder.gles.Texture2dProgram; 22 | 23 | /** 24 | * This class is used to cut the top and bottom area of the screen and 25 | * just keep the center main part. 26 | * 27 | * In this demo, we'll cut the status bar and navigation bar of the screen 28 | */ 29 | public class MainFrameRect { 30 | private final CroppedDrawable2d mRectDrawable; 31 | private Texture2dProgram mProgram; 32 | 33 | /** 34 | * Prepares the object. 35 | * 36 | * @param program The program to use. FullFrameRect takes ownership, and will release 37 | * the program when no longer needed. 38 | */ 39 | public MainFrameRect(Texture2dProgram program) { 40 | mRectDrawable = new CroppedDrawable2d(Drawable2d.Prefab.FULL_RECTANGLE); 41 | mProgram = program; 42 | } 43 | 44 | /** 45 | * Releases resources. 46 | *
47 | * This must be called with the appropriate EGL context current (i.e. the one that was 48 | * current when the constructor was called). If we're about to destroy the EGL context, 49 | * there's no value in having the caller make it current just to do this cleanup, so you 50 | * can pass a flag that will tell this function to skip any EGL-context-specific cleanup. 51 | */ 52 | public void release(boolean doEglCleanup) { 53 | if (mProgram != null) { 54 | if (doEglCleanup) { 55 | mProgram.release(); 56 | } 57 | mProgram = null; 58 | } 59 | } 60 | 61 | /** 62 | * Returns the program currently in use. 63 | */ 64 | public Texture2dProgram getProgram() { 65 | return mProgram; 66 | } 67 | 68 | /** 69 | * Changes the program. The previous program will be released. 70 | *
71 | * The appropriate EGL context must be current. 72 | */ 73 | public void changeProgram(Texture2dProgram program) { 74 | mProgram.release(); 75 | mProgram = program; 76 | } 77 | 78 | /** 79 | * @param bottomCropped defines the bottom area to be cut. from 0f-1f. 80 | */ 81 | public void setBottomCropped(float bottomCropped) { 82 | mRectDrawable.setBottomCropped(bottomCropped); 83 | } 84 | 85 | /** 86 | * @param topCropped defines the top area to be cut. from 0f-1f. 87 | */ 88 | public void setTopCropped(float topCropped) { 89 | mRectDrawable.setTopCropped(topCropped); 90 | } 91 | 92 | /** 93 | * Creates a texture object suitable for use with drawFrame(). 94 | */ 95 | public int createTextureObject() { 96 | return mProgram.createTextureObject(); 97 | } 98 | 99 | /** 100 | * Draws a viewport-filling rect, texturing it with the specified texture object. 101 | */ 102 | public void drawFrame(int textureId, float[] texMatrix) { 103 | // Use the identity matrix for MVP so our 2x2 FULL_RECTANGLE covers the viewport. 104 | mProgram.draw(GlUtil.IDENTITY_MATRIX, mRectDrawable.getVertexArray(), 0, 105 | mRectDrawable.getVertexCount(), mRectDrawable.getCoordsPerVertex(), 106 | mRectDrawable.getVertexStride(), 107 | texMatrix, mRectDrawable.getTexCoordArray(), textureId, 108 | mRectDrawable.getTexCoordStride()); 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/gles/FlatShadedProgram.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.junyuecao.croppedscreenrecorder.gles; 18 | 19 | import android.opengl.GLES20; 20 | import android.util.Log; 21 | 22 | import java.nio.FloatBuffer; 23 | 24 | /** 25 | * GL program and supporting functions for flat-shaded rendering. 26 | */ 27 | public class FlatShadedProgram { 28 | private static final String TAG = GlUtil.TAG; 29 | 30 | private static final String VERTEX_SHADER = 31 | "uniform mat4 uMVPMatrix;" + 32 | "attribute vec4 aPosition;" + 33 | "void main() {" + 34 | " gl_Position = uMVPMatrix * aPosition;" + 35 | "}"; 36 | 37 | private static final String FRAGMENT_SHADER = 38 | "precision mediump float;" + 39 | "uniform vec4 uColor;" + 40 | "void main() {" + 41 | " gl_FragColor = uColor;" + 42 | "}"; 43 | 44 | // Handles to the GL program and various components of it. 45 | private int mProgramHandle = -1; 46 | private int muColorLoc = -1; 47 | private int muMVPMatrixLoc = -1; 48 | private int maPositionLoc = -1; 49 | 50 | 51 | /** 52 | * Prepares the program in the current EGL context. 53 | */ 54 | public FlatShadedProgram() { 55 | mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 56 | if (mProgramHandle == 0) { 57 | throw new RuntimeException("Unable to create program"); 58 | } 59 | Log.d(TAG, "Created program " + mProgramHandle); 60 | 61 | // get locations of attributes and uniforms 62 | 63 | maPositionLoc = GLES20.glGetAttribLocation(mProgramHandle, "aPosition"); 64 | GlUtil.checkLocation(maPositionLoc, "aPosition"); 65 | muMVPMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uMVPMatrix"); 66 | GlUtil.checkLocation(muMVPMatrixLoc, "uMVPMatrix"); 67 | muColorLoc = GLES20.glGetUniformLocation(mProgramHandle, "uColor"); 68 | GlUtil.checkLocation(muColorLoc, "uColor"); 69 | } 70 | 71 | /** 72 | * Releases the program. 73 | */ 74 | public void release() { 75 | GLES20.glDeleteProgram(mProgramHandle); 76 | mProgramHandle = -1; 77 | } 78 | 79 | /** 80 | * Issues the draw call. Does the full setup on every call. 81 | * 82 | * @param mvpMatrix The 4x4 projection matrix. 83 | * @param color A 4-element color vector. 84 | * @param vertexBuffer Buffer with vertex data. 85 | * @param firstVertex Index of first vertex to use in vertexBuffer. 86 | * @param vertexCount Number of vertices in vertexBuffer. 87 | * @param coordsPerVertex The number of coordinates per vertex (e.g. x,y is 2). 88 | * @param vertexStride Width, in bytes, of the data for each vertex (often vertexCount * 89 | * sizeof(float)). 90 | */ 91 | public void draw(float[] mvpMatrix, float[] color, FloatBuffer vertexBuffer, 92 | int firstVertex, int vertexCount, int coordsPerVertex, int vertexStride) { 93 | GlUtil.checkGlError("draw start"); 94 | 95 | // Select the program. 96 | GLES20.glUseProgram(mProgramHandle); 97 | GlUtil.checkGlError("glUseProgram"); 98 | 99 | // Copy the model / view / projection matrix over. 100 | GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mvpMatrix, 0); 101 | GlUtil.checkGlError("glUniformMatrix4fv"); 102 | 103 | // Copy the color vector in. 104 | GLES20.glUniform4fv(muColorLoc, 1, color, 0); 105 | GlUtil.checkGlError("glUniform4fv "); 106 | 107 | // Enable the "aPosition" vertex attribute. 108 | GLES20.glEnableVertexAttribArray(maPositionLoc); 109 | GlUtil.checkGlError("glEnableVertexAttribArray"); 110 | 111 | // Connect vertexBuffer to "aPosition". 112 | GLES20.glVertexAttribPointer(maPositionLoc, coordsPerVertex, 113 | GLES20.GL_FLOAT, false, vertexStride, vertexBuffer); 114 | GlUtil.checkGlError("glVertexAttribPointer"); 115 | 116 | // Draw the rect. 117 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, firstVertex, vertexCount); 118 | GlUtil.checkGlError("glDrawArrays"); 119 | 120 | // Done -- disable vertex array and program. 121 | GLES20.glDisableVertexAttribArray(maPositionLoc); 122 | GLES20.glUseProgram(0); 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # Attempt to set APP_HOME 46 | # Resolve links: $0 may be a link 47 | PRG="$0" 48 | # Need this for relative symlinks. 49 | while [ -h "$PRG" ] ; do 50 | ls=`ls -ld "$PRG"` 51 | link=`expr "$ls" : '.*-> \(.*\)$'` 52 | if expr "$link" : '/.*' > /dev/null; then 53 | PRG="$link" 54 | else 55 | PRG=`dirname "$PRG"`"/$link" 56 | fi 57 | done 58 | SAVED="`pwd`" 59 | cd "`dirname \"$PRG\"`/" >/dev/null 60 | APP_HOME="`pwd -P`" 61 | cd "$SAVED" >/dev/null 62 | 63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 64 | 65 | # Determine the Java command to use to start the JVM. 66 | if [ -n "$JAVA_HOME" ] ; then 67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 68 | # IBM's JDK on AIX uses strange locations for the executables 69 | JAVACMD="$JAVA_HOME/jre/sh/java" 70 | else 71 | JAVACMD="$JAVA_HOME/bin/java" 72 | fi 73 | if [ ! -x "$JAVACMD" ] ; then 74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 75 | 76 | Please set the JAVA_HOME variable in your environment to match the 77 | location of your Java installation." 78 | fi 79 | else 80 | JAVACMD="java" 81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 82 | 83 | Please set the JAVA_HOME variable in your environment to match the 84 | location of your Java installation." 85 | fi 86 | 87 | # Increase the maximum file descriptors if we can. 88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 89 | MAX_FD_LIMIT=`ulimit -H -n` 90 | if [ $? -eq 0 ] ; then 91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 92 | MAX_FD="$MAX_FD_LIMIT" 93 | fi 94 | ulimit -n $MAX_FD 95 | if [ $? -ne 0 ] ; then 96 | warn "Could not set maximum file descriptor limit: $MAX_FD" 97 | fi 98 | else 99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 100 | fi 101 | fi 102 | 103 | # For Darwin, add options to specify how the application appears in the dock 104 | if $darwin; then 105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 106 | fi 107 | 108 | # For Cygwin, switch paths to Windows format before running java 109 | if $cygwin ; then 110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 112 | JAVACMD=`cygpath --unix "$JAVACMD"` 113 | 114 | # We build the pattern for arguments to be converted via cygpath 115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 116 | SEP="" 117 | for dir in $ROOTDIRSRAW ; do 118 | ROOTDIRS="$ROOTDIRS$SEP$dir" 119 | SEP="|" 120 | done 121 | OURCYGPATTERN="(^($ROOTDIRS))" 122 | # Add a user-defined pattern to the cygpath arguments 123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 125 | fi 126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 127 | i=0 128 | for arg in "$@" ; do 129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 131 | 132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 134 | else 135 | eval `echo args$i`="\"$arg\"" 136 | fi 137 | i=$((i+1)) 138 | done 139 | case $i in 140 | (0) set -- ;; 141 | (1) set -- "$args0" ;; 142 | (2) set -- "$args0" "$args1" ;; 143 | (3) set -- "$args0" "$args1" "$args2" ;; 144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 150 | esac 151 | fi 152 | 153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 154 | function splitJvmOpts() { 155 | JVM_OPTS=("$@") 156 | } 157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 159 | 160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 161 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/gles/Sprite2d.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.junyuecao.croppedscreenrecorder.gles; 18 | 19 | import android.opengl.Matrix; 20 | 21 | /** 22 | * Base class for a 2d object. Includes position, scale, rotation, and flat-shaded color. 23 | */ 24 | public class Sprite2d { 25 | private static final String TAG = GlUtil.TAG; 26 | 27 | private Drawable2d mDrawable; 28 | private float mColor[]; 29 | private int mTextureId; 30 | private float mAngle; 31 | private float mScaleX, mScaleY; 32 | private float mPosX, mPosY; 33 | 34 | private float[] mModelViewMatrix; 35 | private boolean mMatrixReady; 36 | 37 | private float[] mScratchMatrix = new float[16]; 38 | 39 | public Sprite2d(Drawable2d drawable) { 40 | mDrawable = drawable; 41 | mColor = new float[4]; 42 | mColor[3] = 1.0f; 43 | mTextureId = -1; 44 | 45 | mModelViewMatrix = new float[16]; 46 | mMatrixReady = false; 47 | } 48 | 49 | /** 50 | * Re-computes mModelViewMatrix, based on the current values for rotation, scale, and 51 | * translation. 52 | */ 53 | private void recomputeMatrix() { 54 | float[] modelView = mModelViewMatrix; 55 | 56 | Matrix.setIdentityM(modelView, 0); 57 | Matrix.translateM(modelView, 0, mPosX, mPosY, 0.0f); 58 | if (mAngle != 0.0f) { 59 | Matrix.rotateM(modelView, 0, mAngle, 0.0f, 0.0f, 1.0f); 60 | } 61 | Matrix.scaleM(modelView, 0, mScaleX, mScaleY, 1.0f); 62 | mMatrixReady = true; 63 | } 64 | 65 | /** 66 | * Returns the sprite scale along the X axis. 67 | */ 68 | public float getScaleX() { 69 | return mScaleX; 70 | } 71 | 72 | /** 73 | * Returns the sprite scale along the Y axis. 74 | */ 75 | public float getScaleY() { 76 | return mScaleY; 77 | } 78 | 79 | /** 80 | * Sets the sprite scale (size). 81 | */ 82 | public void setScale(float scaleX, float scaleY) { 83 | mScaleX = scaleX; 84 | mScaleY = scaleY; 85 | mMatrixReady = false; 86 | } 87 | 88 | /** 89 | * Gets the sprite rotation angle, in degrees. 90 | */ 91 | public float getRotation() { 92 | return mAngle; 93 | } 94 | 95 | /** 96 | * Sets the sprite rotation angle, in degrees. Sprite will rotate counter-clockwise. 97 | */ 98 | public void setRotation(float angle) { 99 | // Normalize. We're not expecting it to be way off, so just iterate. 100 | while (angle >= 360.0f) { 101 | angle -= 360.0f; 102 | } 103 | while (angle <= -360.0f) { 104 | angle += 360.0f; 105 | } 106 | mAngle = angle; 107 | mMatrixReady = false; 108 | } 109 | 110 | /** 111 | * Returns the position on the X axis. 112 | */ 113 | public float getPositionX() { 114 | return mPosX; 115 | } 116 | 117 | /** 118 | * Returns the position on the Y axis. 119 | */ 120 | public float getPositionY() { 121 | return mPosY; 122 | } 123 | 124 | /** 125 | * Sets the sprite position. 126 | */ 127 | public void setPosition(float posX, float posY) { 128 | mPosX = posX; 129 | mPosY = posY; 130 | mMatrixReady = false; 131 | } 132 | 133 | /** 134 | * Returns the model-view matrix. 135 | *
136 | * To avoid allocations, this returns internal state. The caller must not modify it. 137 | */ 138 | public float[] getModelViewMatrix() { 139 | if (!mMatrixReady) { 140 | recomputeMatrix(); 141 | } 142 | return mModelViewMatrix; 143 | } 144 | 145 | /** 146 | * Sets color to use for flat-shaded rendering. Has no effect on textured rendering. 147 | */ 148 | public void setColor(float red, float green, float blue) { 149 | mColor[0] = red; 150 | mColor[1] = green; 151 | mColor[2] = blue; 152 | } 153 | 154 | /** 155 | * Sets texture to use for textured rendering. Has no effect on flat-shaded rendering. 156 | */ 157 | public void setTexture(int textureId) { 158 | mTextureId = textureId; 159 | } 160 | 161 | /** 162 | * Returns the color. 163 | *
164 | * To avoid allocations, this returns internal state. The caller must not modify it. 165 | */ 166 | public float[] getColor() { 167 | return mColor; 168 | } 169 | 170 | /** 171 | * Draws the rectangle with the supplied program and projection matrix. 172 | */ 173 | public void draw(FlatShadedProgram program, float[] projectionMatrix) { 174 | // Compute model/view/projection matrix. 175 | Matrix.multiplyMM(mScratchMatrix, 0, projectionMatrix, 0, getModelViewMatrix(), 0); 176 | 177 | program.draw(mScratchMatrix, mColor, mDrawable.getVertexArray(), 0, 178 | mDrawable.getVertexCount(), mDrawable.getCoordsPerVertex(), 179 | mDrawable.getVertexStride()); 180 | } 181 | 182 | /** 183 | * Draws the rectangle with the supplied program and projection matrix. 184 | */ 185 | public void draw(Texture2dProgram program, float[] projectionMatrix) { 186 | // Compute model/view/projection matrix. 187 | Matrix.multiplyMM(mScratchMatrix, 0, projectionMatrix, 0, getModelViewMatrix(), 0); 188 | 189 | program.draw(mScratchMatrix, mDrawable.getVertexArray(), 0, 190 | mDrawable.getVertexCount(), mDrawable.getCoordsPerVertex(), 191 | mDrawable.getVertexStride(), GlUtil.IDENTITY_MATRIX, mDrawable.getTexCoordArray(), 192 | mTextureId, mDrawable.getTexCoordStride()); 193 | } 194 | 195 | @Override 196 | public String toString() { 197 | return "[Sprite2d pos=" + mPosX + "," + mPosY + 198 | " scale=" + mScaleX + "," + mScaleY + " angle=" + mAngle + 199 | " color={" + mColor[0] + "," + mColor[1] + "," + mColor[2] + 200 | "} drawable=" + mDrawable + "]"; 201 | } 202 | } 203 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/MainActivity.java: -------------------------------------------------------------------------------- 1 | package io.github.junyuecao.croppedscreenrecorder; 2 | 3 | import android.Manifest; 4 | import android.content.Intent; 5 | import android.media.projection.MediaProjection; 6 | import android.os.Build; 7 | import android.os.Bundle; 8 | import android.os.Handler; 9 | import android.support.annotation.NonNull; 10 | import android.support.annotation.RequiresApi; 11 | import android.support.v4.view.MotionEventCompat; 12 | import android.support.v7.app.AppCompatActivity; 13 | import android.view.MotionEvent; 14 | import android.view.View; 15 | import android.widget.Button; 16 | import android.widget.LinearLayout; 17 | import android.widget.TextView; 18 | import android.widget.Toast; 19 | import permissions.dispatcher.NeedsPermission; 20 | import permissions.dispatcher.OnNeverAskAgain; 21 | import permissions.dispatcher.OnPermissionDenied; 22 | import permissions.dispatcher.RuntimePermissions; 23 | 24 | import java.util.Date; 25 | import java.util.Timer; 26 | import java.util.TimerTask; 27 | 28 | @RuntimePermissions 29 | public class MainActivity extends AppCompatActivity implements View.OnClickListener, View.OnTouchListener, 30 | RecordCallback { 31 | 32 | ScreenCapture mScreenCapture; 33 | Timer mTimer; 34 | TextView mTime; 35 | Handler mHandler; 36 | private Button mStart; 37 | private LinearLayout mRecordLayout; 38 | private Button mRecord; 39 | private Button mCancel; 40 | 41 | @Override 42 | public void onClick(View v) { 43 | switch (v.getId()) { 44 | case R.id.start: 45 | MainActivityPermissionsDispatcher.tryRecordScreenWithCheck(this); 46 | break; 47 | case R.id.cancel: 48 | mRecordLayout.setVisibility(View.GONE); 49 | mStart.setVisibility(View.VISIBLE); 50 | mScreenCapture.stopProjection(); 51 | break; 52 | } 53 | } 54 | 55 | @Override 56 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, 57 | @NonNull int[] grantResults) { 58 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 59 | // NOTE: delegate the permission handling to generated method 60 | MainActivityPermissionsDispatcher.onRequestPermissionsResult(this, requestCode, grantResults); 61 | } 62 | 63 | @Override 64 | public boolean onTouch(View v, MotionEvent event) { 65 | if (v.getId() == R.id.record) { 66 | // Touch and hold to record, release to stop record 67 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { 68 | switch (MotionEventCompat.getActionMasked(event)) { 69 | case MotionEvent.ACTION_DOWN: 70 | mScreenCapture.attachRecorder(); 71 | return true; 72 | case MotionEvent.ACTION_UP: 73 | mScreenCapture.detachRecorder(); 74 | return true; 75 | } 76 | } 77 | } 78 | return false; 79 | } 80 | 81 | @Override 82 | public void onRecordSuccess(String filePath, String coverPath, long duration) { 83 | Toast.makeText(this, "Record successfully: " + filePath, Toast.LENGTH_SHORT).show(); 84 | } 85 | 86 | @Override 87 | public void onRecordFailed(Throwable e, long duration) { 88 | Toast.makeText(this, "Record failed with error : " + e.getMessage(), Toast.LENGTH_SHORT).show(); 89 | } 90 | 91 | @Override 92 | public void onRecordedDurationChanged(long ms) { 93 | // We don't need it yet 94 | } 95 | 96 | @Override 97 | protected void onCreate(Bundle savedInstanceState) { 98 | super.onCreate(savedInstanceState); 99 | setContentView(R.layout.activity_main); 100 | mHandler = new Handler(); 101 | mTime = (TextView) findViewById(R.id.time); 102 | 103 | mStart = (Button) findViewById(R.id.start); 104 | mRecordLayout = (LinearLayout) findViewById(R.id.recordLayout); 105 | mRecord = (Button) findViewById(R.id.record); 106 | mCancel = (Button) findViewById(R.id.cancel); 107 | 108 | mStart.setOnClickListener(this); 109 | mCancel.setOnClickListener(this); 110 | mRecord.setOnTouchListener(this); 111 | } 112 | 113 | @Override 114 | protected void onResume() { 115 | super.onResume(); 116 | mTimer = new Timer(); 117 | mTimer.scheduleAtFixedRate(new TimerTask() { 118 | @Override 119 | public void run() { 120 | mHandler.post(new Runnable() { 121 | @Override 122 | public void run() { 123 | mTime.setText("" + System.currentTimeMillis() + "\n" + (new Date().toString())); 124 | } 125 | }); 126 | } 127 | }, 0, 100); 128 | } 129 | 130 | @Override 131 | protected void onPause() { 132 | super.onPause(); 133 | mTimer.cancel(); 134 | mTimer = null; 135 | } 136 | 137 | @Override 138 | protected void onDestroy() { 139 | super.onDestroy(); 140 | if (mScreenCapture != null && mScreenCapture.isProjecting()) { 141 | mScreenCapture.stopProjection(); 142 | } 143 | } 144 | 145 | @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) 146 | @NeedsPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) 147 | void tryRecordScreen() { 148 | if (mScreenCapture == null) { 149 | mScreenCapture = new ScreenCapture(this); 150 | } 151 | mScreenCapture.setMediaProjectionReadyListener(new ScreenCapture.OnMediaProjectionReadyListener() { 152 | @Override 153 | public void onMediaProjectionReady(MediaProjection mediaProjection) { 154 | mRecordLayout.setVisibility(View.VISIBLE); 155 | mStart.setVisibility(View.GONE); 156 | } 157 | }); 158 | mScreenCapture.setRecordCallback(this); 159 | mScreenCapture.requestScreenCapture(); 160 | } 161 | 162 | @OnPermissionDenied(Manifest.permission.WRITE_EXTERNAL_STORAGE) 163 | void showDenied() { 164 | Toast.makeText(this, "Need permission to work properly", Toast.LENGTH_SHORT).show(); 165 | } 166 | 167 | @OnNeverAskAgain(Manifest.permission.WRITE_EXTERNAL_STORAGE) 168 | void showNeverAsk() { 169 | Toast.makeText(this, "Need permission to work properly", Toast.LENGTH_SHORT).show(); 170 | } 171 | 172 | @Override 173 | protected void onActivityResult(int requestCode, int resultCode, Intent data) { 174 | super.onActivityResult(requestCode, resultCode, data); 175 | 176 | if (requestCode == ScreenCapture.CAPTURE_REQUEST_CODE && resultCode == RESULT_OK) { 177 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { 178 | mScreenCapture.startProjection(data); 179 | } 180 | return; 181 | } 182 | } 183 | 184 | } 185 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/gles/EglSurfaceBase.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2013 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.junyuecao.croppedscreenrecorder.gles; 18 | 19 | import android.graphics.Bitmap; 20 | import android.opengl.EGL14; 21 | import android.opengl.EGLSurface; 22 | import android.opengl.GLES20; 23 | import android.util.Log; 24 | 25 | import java.io.BufferedOutputStream; 26 | import java.io.File; 27 | import java.io.FileOutputStream; 28 | import java.io.IOException; 29 | import java.nio.ByteBuffer; 30 | import java.nio.ByteOrder; 31 | 32 | /** 33 | * Common base class for EGL surfaces. 34 | *
35 | * There can be multiple surfaces associated with a single context. 36 | */ 37 | public class EglSurfaceBase { 38 | protected static final String TAG = GlUtil.TAG; 39 | 40 | // EglCore object we're associated with. It may be associated with multiple surfaces. 41 | protected EglCore mEglCore; 42 | 43 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; 44 | private int mWidth = -1; 45 | private int mHeight = -1; 46 | 47 | protected EglSurfaceBase(EglCore eglCore) { 48 | mEglCore = eglCore; 49 | } 50 | 51 | /** 52 | * Creates a window surface. 53 | *
54 | * @param surface May be a Surface or SurfaceTexture. 55 | */ 56 | public void createWindowSurface(Object surface) { 57 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) { 58 | throw new IllegalStateException("surface already created"); 59 | } 60 | mEGLSurface = mEglCore.createWindowSurface(surface); 61 | 62 | // Don't cache width/height here, because the size of the underlying surface can change 63 | // out from under us (see e.g. HardwareScalerActivity). 64 | //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); 65 | //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); 66 | } 67 | 68 | /** 69 | * Creates an off-screen surface. 70 | */ 71 | public void createOffscreenSurface(int width, int height) { 72 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) { 73 | throw new IllegalStateException("surface already created"); 74 | } 75 | mEGLSurface = mEglCore.createOffscreenSurface(width, height); 76 | mWidth = width; 77 | mHeight = height; 78 | } 79 | 80 | /** 81 | * Returns the surface's width, in pixels. 82 | *
83 | * If this is called on a window surface, and the underlying surface is in the process 84 | * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged" 85 | * callback). The size should match after the next buffer swap. 86 | */ 87 | public int getWidth() { 88 | if (mWidth < 0) { 89 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); 90 | } else { 91 | return mWidth; 92 | } 93 | } 94 | 95 | /** 96 | * Returns the surface's height, in pixels. 97 | */ 98 | public int getHeight() { 99 | if (mHeight < 0) { 100 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); 101 | } else { 102 | return mHeight; 103 | } 104 | } 105 | 106 | /** 107 | * Release the EGL surface. 108 | */ 109 | public void releaseEglSurface() { 110 | mEglCore.releaseSurface(mEGLSurface); 111 | mEGLSurface = EGL14.EGL_NO_SURFACE; 112 | mWidth = mHeight = -1; 113 | } 114 | 115 | /** 116 | * Makes our EGL context and surface current. 117 | */ 118 | public void makeCurrent() { 119 | mEglCore.makeCurrent(mEGLSurface); 120 | } 121 | 122 | /** 123 | * Makes our EGL context and surface current for drawing, using the supplied surface 124 | * for reading. 125 | */ 126 | public void makeCurrentReadFrom(EglSurfaceBase readSurface) { 127 | mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface); 128 | } 129 | 130 | /** 131 | * Calls eglSwapBuffers. Use this to "publish" the current frame. 132 | * 133 | * @return false on failure 134 | */ 135 | public boolean swapBuffers() { 136 | boolean result = mEglCore.swapBuffers(mEGLSurface); 137 | if (!result) { 138 | Log.d(TAG, "WARNING: swapBuffers() failed"); 139 | } 140 | return result; 141 | } 142 | 143 | /** 144 | * Sends the presentation time stamp to EGL. 145 | * 146 | * @param nsecs Timestamp, in nanoseconds. 147 | */ 148 | public void setPresentationTime(long nsecs) { 149 | mEglCore.setPresentationTime(mEGLSurface, nsecs); 150 | } 151 | 152 | /** 153 | * Saves the EGL surface to a file. 154 | *
155 | * Expects that this object's EGL surface is current. 156 | */ 157 | public void saveFrame(File file) throws IOException { 158 | if (!mEglCore.isCurrent(mEGLSurface)) { 159 | throw new RuntimeException("Expected EGL context/surface is not current"); 160 | } 161 | 162 | // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA 163 | // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap 164 | // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the 165 | // Bitmap "copy pixels" method wants the same format GL provides. 166 | // 167 | // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling 168 | // here often. 169 | // 170 | // Making this even more interesting is the upside-down nature of GL, which means 171 | // our output will look upside down relative to what appears on screen if the 172 | // typical GL conventions are used. 173 | 174 | String filename = file.toString(); 175 | 176 | int width = getWidth(); 177 | int height = getHeight(); 178 | ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); 179 | buf.order(ByteOrder.LITTLE_ENDIAN); 180 | GLES20.glReadPixels(0, 0, width, height, 181 | GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); 182 | GlUtil.checkGlError("glReadPixels"); 183 | buf.rewind(); 184 | 185 | BufferedOutputStream bos = null; 186 | try { 187 | bos = new BufferedOutputStream(new FileOutputStream(filename)); 188 | Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 189 | bmp.copyPixelsFromBuffer(buf); 190 | bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); 191 | bmp.recycle(); 192 | } finally { 193 | if (bos != null) bos.close(); 194 | } 195 | Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); 196 | } 197 | } 198 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/gles/Drawable2d.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.junyuecao.croppedscreenrecorder.gles; 18 | 19 | import java.nio.FloatBuffer; 20 | 21 | /** 22 | * Base class for stuff we like to draw. 23 | */ 24 | public class Drawable2d { 25 | private static final int SIZEOF_FLOAT = 4; 26 | 27 | /** 28 | * Simple equilateral triangle (1.0 per side). Centered on (0,0). 29 | */ 30 | private static final float TRIANGLE_COORDS[] = { 31 | 0.0f, 0.577350269f, // 0 top 32 | -0.5f, -0.288675135f, // 1 bottom left 33 | 0.5f, -0.288675135f // 2 bottom right 34 | }; 35 | private static final float TRIANGLE_TEX_COORDS[] = { 36 | 0.5f, 0.0f, // 0 top center 37 | 0.0f, 1.0f, // 1 bottom left 38 | 1.0f, 1.0f, // 2 bottom right 39 | }; 40 | private static final FloatBuffer TRIANGLE_BUF = 41 | GlUtil.createFloatBuffer(TRIANGLE_COORDS); 42 | private static final FloatBuffer TRIANGLE_TEX_BUF = 43 | GlUtil.createFloatBuffer(TRIANGLE_TEX_COORDS); 44 | 45 | /** 46 | * Simple square, specified as a triangle strip. The square is centered on (0,0) and has 47 | * a size of 1x1. 48 | *
49 | * Triangles are 0-1-2 and 2-1-3 (counter-clockwise winding). 50 | */ 51 | private static final float RECTANGLE_COORDS[] = { 52 | -0.5f, -0.5f, // 0 bottom left 53 | 0.5f, -0.5f, // 1 bottom right 54 | -0.5f, 0.5f, // 2 top left 55 | 0.5f, 0.5f, // 3 top right 56 | }; 57 | private static final float RECTANGLE_TEX_COORDS[] = { 58 | 0.0f, 1.0f, // 0 bottom left 59 | 1.0f, 1.0f, // 1 bottom right 60 | 0.0f, 0.0f, // 2 top left 61 | 1.0f, 0.0f // 3 top right 62 | }; 63 | private static final FloatBuffer RECTANGLE_BUF = 64 | GlUtil.createFloatBuffer(RECTANGLE_COORDS); 65 | private static final FloatBuffer RECTANGLE_TEX_BUF = 66 | GlUtil.createFloatBuffer(RECTANGLE_TEX_COORDS); 67 | 68 | /** 69 | * A "full" square, extending from -1 to +1 in both dimensions. When the model/view/projection 70 | * matrix is identity, this will exactly cover the viewport. 71 | *
72 | * The texture coordinates are Y-inverted relative to RECTANGLE. (This seems to work out 73 | * right with external textures from SurfaceTexture.) 74 | */ 75 | private static final float FULL_RECTANGLE_COORDS[] = { 76 | -1.0f, -1.0f, // 0 bottom left 77 | 1.0f, -1.0f, // 1 bottom right 78 | -1.0f, 1.0f, // 2 top left 79 | 1.0f, 1.0f, // 3 top right 80 | }; 81 | private static final float FULL_RECTANGLE_TEX_COORDS[] = { 82 | 0.0f, 0.0f, // 0 bottom left 83 | 1.0f, 0.0f, // 1 bottom right 84 | 0.0f, 1.0f, // 2 top left 85 | 1.0f, 1.0f // 3 top right 86 | }; 87 | private static final FloatBuffer FULL_RECTANGLE_BUF = 88 | GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS); 89 | private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = 90 | GlUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS); 91 | 92 | 93 | private FloatBuffer mVertexArray; 94 | private FloatBuffer mTexCoordArray; 95 | private int mVertexCount; 96 | private int mCoordsPerVertex; 97 | private int mVertexStride; 98 | private int mTexCoordStride; 99 | private Prefab mPrefab; 100 | 101 | /** 102 | * Enum values for constructor. 103 | */ 104 | public enum Prefab { 105 | TRIANGLE, RECTANGLE, FULL_RECTANGLE 106 | } 107 | 108 | /** 109 | * Prepares a drawable from a "pre-fabricated" shape definition. 110 | *
111 | * Does no EGL/GL operations, so this can be done at any time. 112 | */ 113 | public Drawable2d(Prefab shape) { 114 | switch (shape) { 115 | case TRIANGLE: 116 | mVertexArray = TRIANGLE_BUF; 117 | mTexCoordArray = TRIANGLE_TEX_BUF; 118 | mCoordsPerVertex = 2; 119 | mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; 120 | mVertexCount = TRIANGLE_COORDS.length / mCoordsPerVertex; 121 | break; 122 | case RECTANGLE: 123 | mVertexArray = RECTANGLE_BUF; 124 | mTexCoordArray = RECTANGLE_TEX_BUF; 125 | mCoordsPerVertex = 2; 126 | mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; 127 | mVertexCount = RECTANGLE_COORDS.length / mCoordsPerVertex; 128 | break; 129 | case FULL_RECTANGLE: 130 | mVertexArray = FULL_RECTANGLE_BUF; 131 | mTexCoordArray = FULL_RECTANGLE_TEX_BUF; 132 | mCoordsPerVertex = 2; 133 | mVertexStride = mCoordsPerVertex * SIZEOF_FLOAT; 134 | mVertexCount = FULL_RECTANGLE_COORDS.length / mCoordsPerVertex; 135 | break; 136 | default: 137 | throw new RuntimeException("Unknown shape " + shape); 138 | } 139 | mTexCoordStride = 2 * SIZEOF_FLOAT; 140 | mPrefab = shape; 141 | } 142 | 143 | /** 144 | * Returns the array of vertices. 145 | *
146 | * To avoid allocations, this returns internal state. The caller must not modify it. 147 | */ 148 | public FloatBuffer getVertexArray() { 149 | return mVertexArray; 150 | } 151 | 152 | /** 153 | * Returns the array of texture coordinates. 154 | *
155 | * To avoid allocations, this returns internal state. The caller must not modify it. 156 | */ 157 | public FloatBuffer getTexCoordArray() { 158 | return mTexCoordArray; 159 | } 160 | 161 | /** 162 | * Returns the number of vertices stored in the vertex array. 163 | */ 164 | public int getVertexCount() { 165 | return mVertexCount; 166 | } 167 | 168 | /** 169 | * Returns the width, in bytes, of the data for each vertex. 170 | */ 171 | public int getVertexStride() { 172 | return mVertexStride; 173 | } 174 | 175 | /** 176 | * Returns the width, in bytes, of the data for each texture coordinate. 177 | */ 178 | public int getTexCoordStride() { 179 | return mTexCoordStride; 180 | } 181 | 182 | /** 183 | * Returns the number of position coordinates per vertex. This will be 2 or 3. 184 | */ 185 | public int getCoordsPerVertex() { 186 | return mCoordsPerVertex; 187 | } 188 | 189 | @Override 190 | public String toString() { 191 | if (mPrefab != null) { 192 | return "[Drawable2d: " + mPrefab + "]"; 193 | } else { 194 | return "[Drawable2d: ...]"; 195 | } 196 | } 197 | } 198 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/gles/GlUtil.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.junyuecao.croppedscreenrecorder.gles; 18 | 19 | import android.opengl.GLES20; 20 | import android.opengl.GLES30; 21 | import android.opengl.Matrix; 22 | import android.util.Log; 23 | 24 | import java.nio.ByteBuffer; 25 | import java.nio.ByteOrder; 26 | import java.nio.FloatBuffer; 27 | 28 | /** 29 | * Some OpenGL utility functions. 30 | */ 31 | public class GlUtil { 32 | public static final String TAG = "Grafika"; 33 | 34 | /** Identity matrix for general use. Don't modify or life will get weird. */ 35 | public static final float[] IDENTITY_MATRIX; 36 | static { 37 | IDENTITY_MATRIX = new float[16]; 38 | Matrix.setIdentityM(IDENTITY_MATRIX, 0); 39 | } 40 | 41 | private static final int SIZEOF_FLOAT = 4; 42 | 43 | 44 | private GlUtil() {} // do not instantiate 45 | 46 | /** 47 | * Creates a new program from the supplied vertex and fragment shaders. 48 | * 49 | * @return A handle to the program, or 0 on failure. 50 | */ 51 | public static int createProgram(String vertexSource, String fragmentSource) { 52 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 53 | if (vertexShader == 0) { 54 | return 0; 55 | } 56 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 57 | if (pixelShader == 0) { 58 | return 0; 59 | } 60 | 61 | int program = GLES20.glCreateProgram(); 62 | checkGlError("glCreateProgram"); 63 | if (program == 0) { 64 | Log.e(TAG, "Could not create program"); 65 | } 66 | GLES20.glAttachShader(program, vertexShader); 67 | checkGlError("glAttachShader"); 68 | GLES20.glAttachShader(program, pixelShader); 69 | checkGlError("glAttachShader"); 70 | GLES20.glLinkProgram(program); 71 | int[] linkStatus = new int[1]; 72 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 73 | if (linkStatus[0] != GLES20.GL_TRUE) { 74 | Log.e(TAG, "Could not link program: "); 75 | Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 76 | GLES20.glDeleteProgram(program); 77 | program = 0; 78 | } 79 | return program; 80 | } 81 | 82 | /** 83 | * Compiles the provided shader source. 84 | * 85 | * @return A handle to the shader, or 0 on failure. 86 | */ 87 | public static int loadShader(int shaderType, String source) { 88 | int shader = GLES20.glCreateShader(shaderType); 89 | checkGlError("glCreateShader type=" + shaderType); 90 | GLES20.glShaderSource(shader, source); 91 | GLES20.glCompileShader(shader); 92 | int[] compiled = new int[1]; 93 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 94 | if (compiled[0] == 0) { 95 | Log.e(TAG, "Could not compile shader " + shaderType + ":"); 96 | Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 97 | GLES20.glDeleteShader(shader); 98 | shader = 0; 99 | } 100 | return shader; 101 | } 102 | 103 | /** 104 | * Checks to see if a GLES error has been raised. 105 | */ 106 | public static void checkGlError(String op) { 107 | int error = GLES20.glGetError(); 108 | if (error != GLES20.GL_NO_ERROR) { 109 | String msg = op + ": glError 0x" + Integer.toHexString(error); 110 | Log.e(TAG, msg); 111 | throw new RuntimeException(msg); 112 | } 113 | } 114 | 115 | /** 116 | * Checks to see if the location we obtained is valid. GLES returns -1 if a label 117 | * could not be found, but does not set the GL error. 118 | *
119 | * Throws a RuntimeException if the location is invalid. 120 | */ 121 | public static void checkLocation(int location, String label) { 122 | if (location < 0) { 123 | throw new RuntimeException("Unable to locate '" + label + "' in program"); 124 | } 125 | } 126 | 127 | /** 128 | * Creates a texture from raw data. 129 | * 130 | * @param data Image data, in a "direct" ByteBuffer. 131 | * @param width Texture width, in pixels (not bytes). 132 | * @param height Texture height, in pixels. 133 | * @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA). 134 | * @return Handle to texture. 135 | */ 136 | public static int createImageTexture(ByteBuffer data, int width, int height, int format) { 137 | int[] textureHandles = new int[1]; 138 | int textureHandle; 139 | 140 | GLES20.glGenTextures(1, textureHandles, 0); 141 | textureHandle = textureHandles[0]; 142 | GlUtil.checkGlError("glGenTextures"); 143 | 144 | // Bind the texture handle to the 2D texture target. 145 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle); 146 | 147 | // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering 148 | // is smaller or larger than the source image. 149 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, 150 | GLES20.GL_LINEAR); 151 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, 152 | GLES20.GL_LINEAR); 153 | GlUtil.checkGlError("loadImageTexture"); 154 | 155 | // Load the data from the buffer into the texture handle. 156 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format, 157 | width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data); 158 | GlUtil.checkGlError("loadImageTexture"); 159 | 160 | return textureHandle; 161 | } 162 | 163 | /** 164 | * Allocates a direct float buffer, and populates it with the float array data. 165 | */ 166 | public static FloatBuffer createFloatBuffer(float[] coords) { 167 | // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. 168 | ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT); 169 | bb.order(ByteOrder.nativeOrder()); 170 | FloatBuffer fb = bb.asFloatBuffer(); 171 | fb.put(coords); 172 | fb.position(0); 173 | return fb; 174 | } 175 | 176 | /** 177 | * Writes GL version info to the log. 178 | */ 179 | public static void logVersionInfo() { 180 | Log.i(TAG, "vendor : " + GLES20.glGetString(GLES20.GL_VENDOR)); 181 | Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER)); 182 | Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION)); 183 | 184 | if (false) { 185 | int[] values = new int[1]; 186 | GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0); 187 | int majorVersion = values[0]; 188 | GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0); 189 | int minorVersion = values[0]; 190 | if (GLES30.glGetError() == GLES30.GL_NO_ERROR) { 191 | Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion); 192 | } 193 | } 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /app/src/main/java/io/github/junyuecao/croppedscreenrecorder/gles/GeneratedTexture.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Google Inc. All rights reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package io.github.junyuecao.croppedscreenrecorder.gles; 18 | 19 | import android.opengl.GLES20; 20 | 21 | import java.nio.ByteBuffer; 22 | 23 | /** 24 | * Code for generating images useful for testing textures. 25 | */ 26 | public class GeneratedTexture { 27 | //private static final String TAG = GlUtil.TAG; 28 | 29 | public enum Image { COARSE, FINE }; 30 | 31 | // Basic colors, in little-endian RGBA. 32 | private static final int BLACK = 0x00000000; 33 | private static final int RED = 0x000000ff; 34 | private static final int GREEN = 0x0000ff00; 35 | private static final int BLUE = 0x00ff0000; 36 | private static final int MAGENTA = RED | BLUE; 37 | private static final int YELLOW = RED | GREEN; 38 | private static final int CYAN = GREEN | BLUE; 39 | private static final int WHITE = RED | GREEN | BLUE; 40 | private static final int OPAQUE = (int) 0xff000000L; 41 | private static final int HALF = (int) 0x80000000L; 42 | private static final int LOW = (int) 0x40000000L; 43 | private static final int TRANSP = 0; 44 | 45 | private static final int GRID[] = new int[] { // must be 16 elements 46 | OPAQUE|RED, OPAQUE|YELLOW, OPAQUE|GREEN, OPAQUE|MAGENTA, 47 | OPAQUE|WHITE, LOW|RED, LOW|GREEN, OPAQUE|YELLOW, 48 | OPAQUE|MAGENTA, TRANSP|GREEN, HALF|RED, OPAQUE|BLACK, 49 | OPAQUE|CYAN, OPAQUE|MAGENTA, OPAQUE|CYAN, OPAQUE|BLUE, 50 | }; 51 | 52 | private static final int TEX_SIZE = 64; // must be power of 2 53 | private static final int FORMAT = GLES20.GL_RGBA; 54 | private static final int BYTES_PER_PIXEL = 4; // RGBA 55 | 56 | // Generate test image data. This must come after the other values are initialized. 57 | private static final ByteBuffer sCoarseImageData = generateCoarseData(); 58 | private static final ByteBuffer sFineImageData = generateFineData(); 59 | 60 | 61 | /** 62 | * Creates a test texture in the current GL context. 63 | *
64 | * This follows image conventions, so the pixel data at offset zero is intended to appear 65 | * in the top-left corner. Color values for non-opaque alpha will be pre-multiplied. 66 | * 67 | * @return Handle to texture. 68 | */ 69 | public static int createTestTexture(Image which) { 70 | ByteBuffer buf; 71 | switch (which) { 72 | case COARSE: 73 | buf = sCoarseImageData; 74 | break; 75 | case FINE: 76 | buf = sFineImageData; 77 | break; 78 | default: 79 | throw new RuntimeException("unknown image"); 80 | } 81 | return GlUtil.createImageTexture(buf, TEX_SIZE, TEX_SIZE, FORMAT); 82 | } 83 | 84 | /** 85 | * Generates a "coarse" test image. We want to create a 4x4 block pattern with obvious color 86 | * values in the corners, so that we can confirm orientation and coverage. We also 87 | * leave a couple of alpha holes to check that channel. Single pixels are set in two of 88 | * the corners to make it easy to see if we're cutting the texture off at the edge. 89 | *
90 | * Like most image formats, the pixel data begins with the top-left corner, which is 91 | * upside-down relative to OpenGL conventions. The texture coordinates should be flipped 92 | * vertically. Using an asymmetric patterns lets us check that we're doing that right. 93 | *
94 | * Colors use pre-multiplied alpha (so set glBlendFunc appropriately).
95 | *
96 | * @return A direct ByteBuffer with the 8888 RGBA data.
97 | */
98 | private static ByteBuffer generateCoarseData() {
99 | byte[] buf = new byte[TEX_SIZE * TEX_SIZE * BYTES_PER_PIXEL];
100 | final int scale = TEX_SIZE / 4; // convert 64x64 --> 4x4
101 |
102 | for (int i = 0; i < buf.length; i += BYTES_PER_PIXEL) {
103 | int texRow = (i / BYTES_PER_PIXEL) / TEX_SIZE;
104 | int texCol = (i / BYTES_PER_PIXEL) % TEX_SIZE;
105 |
106 | int gridRow = texRow / scale; // 0-3
107 | int gridCol = texCol / scale; // 0-3
108 | int gridIndex = (gridRow * 4) + gridCol; // 0-15
109 |
110 | int color = GRID[gridIndex];
111 |
112 | // override the pixels in two corners to check coverage
113 | if (i == 0) {
114 | color = OPAQUE | WHITE;
115 | } else if (i == buf.length - BYTES_PER_PIXEL) {
116 | color = OPAQUE | WHITE;
117 | }
118 |
119 | // extract RGBA; use "int" instead of "byte" to get unsigned values
120 | int red = color & 0xff;
121 | int green = (color >> 8) & 0xff;
122 | int blue = (color >> 16) & 0xff;
123 | int alpha = (color >> 24) & 0xff;
124 |
125 | // pre-multiply colors and store in buffer
126 | float alphaM = alpha / 255.0f;
127 | buf[i] = (byte) (red * alphaM);
128 | buf[i+1] = (byte) (green * alphaM);
129 | buf[i+2] = (byte) (blue * alphaM);
130 | buf[i+3] = (byte) alpha;
131 | }
132 |
133 | ByteBuffer byteBuf = ByteBuffer.allocateDirect(buf.length);
134 | byteBuf.put(buf);
135 | byteBuf.position(0);
136 | return byteBuf;
137 | }
138 |
139 | /**
140 | * Generates a fine-grained test image.
141 | *
142 | * @return A direct ByteBuffer with the 8888 RGBA data.
143 | */
144 | private static ByteBuffer generateFineData() {
145 | byte[] buf = new byte[TEX_SIZE * TEX_SIZE * BYTES_PER_PIXEL];
146 |
147 | // top/left: single-pixel red/blue
148 | checkerPattern(buf, 0, 0, TEX_SIZE / 2, TEX_SIZE / 2,
149 | OPAQUE|RED, OPAQUE|BLUE, 0x01);
150 | // bottom/right: two-pixel red/green
151 | checkerPattern(buf, TEX_SIZE / 2, TEX_SIZE / 2, TEX_SIZE, TEX_SIZE,
152 | OPAQUE|RED, OPAQUE|GREEN, 0x02);
153 | // bottom/left: four-pixel blue/green
154 | checkerPattern(buf, 0, TEX_SIZE / 2, TEX_SIZE / 2, TEX_SIZE,
155 | OPAQUE|BLUE, OPAQUE|GREEN, 0x04);
156 | // top/right: eight-pixel black/white
157 | checkerPattern(buf, TEX_SIZE / 2, 0, TEX_SIZE, TEX_SIZE / 2,
158 | OPAQUE|WHITE, OPAQUE|BLACK, 0x08);
159 |
160 | ByteBuffer byteBuf = ByteBuffer.allocateDirect(buf.length);
161 | byteBuf.put(buf);
162 | byteBuf.position(0);
163 | return byteBuf;
164 | }
165 |
166 | private static void checkerPattern(byte[] buf, int left, int top, int right, int bottom,
167 | int color1, int color2, int bit) {
168 | for (int row = top; row < bottom; row++) {
169 | int rowOffset = row * TEX_SIZE * BYTES_PER_PIXEL;
170 | for (int col = left; col < right; col++) {
171 | int offset = rowOffset + col * BYTES_PER_PIXEL;
172 | int color;
173 | if (((row & bit) ^ (col & bit)) == 0) {
174 | color = color1;
175 | } else {
176 | color = color2;
177 | }
178 |
179 | // extract RGBA; use "int" instead of "byte" to get unsigned values
180 | int red = color & 0xff;
181 | int green = (color >> 8) & 0xff;
182 | int blue = (color >> 16) & 0xff;
183 | int alpha = (color >> 24) & 0xff;
184 |
185 | // pre-multiply colors and store in buffer
186 | float alphaM = alpha / 255.0f;
187 | buf[offset] = (byte) (red * alphaM);
188 | buf[offset+1] = (byte) (green * alphaM);
189 | buf[offset+2] = (byte) (blue * alphaM);
190 | buf[offset+3] = (byte) alpha;
191 | }
192 | }
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/app/src/main/java/io/github/junyuecao/croppedscreenrecorder/ScreenCapture.java:
--------------------------------------------------------------------------------
1 | package io.github.junyuecao.croppedscreenrecorder;
2 |
3 | import static android.content.Context.MEDIA_PROJECTION_SERVICE;
4 | import static io.github.junyuecao.croppedscreenrecorder.VideoEncoderCore.DEFAULT_CHANNEL_CONFIG;
5 | import static io.github.junyuecao.croppedscreenrecorder.VideoEncoderCore.DEFAULT_DATA_FORMAT;
6 | import static io.github.junyuecao.croppedscreenrecorder.VideoEncoderCore.DEFAULT_SAMPLE_RATE;
7 |
8 | import android.app.Activity;
9 | import android.content.Context;
10 | import android.content.Intent;
11 | import android.hardware.display.DisplayManager;
12 | import android.hardware.display.VirtualDisplay;
13 | import android.media.AudioRecord;
14 | import android.media.MediaRecorder;
15 | import android.media.projection.MediaProjection;
16 | import android.media.projection.MediaProjectionManager;
17 | import android.opengl.EGL14;
18 | import android.opengl.EGLContext;
19 | import android.os.Build;
20 | import android.os.Environment;
21 | import android.support.annotation.NonNull;
22 | import android.support.annotation.RequiresApi;
23 | import android.util.DisplayMetrics;
24 | import android.util.Log;
25 | import android.view.Surface;
26 |
27 | import java.io.File;
28 | import java.lang.ref.WeakReference;
29 | import java.nio.ByteBuffer;
30 |
31 | /**
32 | * Screen capture
33 | * process:
34 | * 1,request for capture permission,
35 | * 2,start projection, (running)
36 | * 3,attach encoder, (recording)
37 | * 4,detach encoder when finish,
38 | * 5,close projection and destroy
39 | */
40 | @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
41 | public class ScreenCapture {
42 |
43 | private static final String TAG = "ScreenCapture";
44 | public static final int CAPTURE_REQUEST_CODE = 8080;
45 | private final WeakReference
32 | * The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
33 | */
34 | public final class EglCore {
35 | private static final String TAG = GlUtil.TAG;
36 |
37 | /**
38 | * Constructor flag: surface must be recordable. This discourages EGL from using a
39 | * pixel format that cannot be converted efficiently to something usable by the video
40 | * encoder.
41 | */
42 | public static final int FLAG_RECORDABLE = 0x01;
43 |
44 | /**
45 | * Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this
46 | * flag, GLES2 is used.
47 | */
48 | public static final int FLAG_TRY_GLES3 = 0x02;
49 |
50 | // Android-specific extension.
51 | private static final int EGL_RECORDABLE_ANDROID = 0x3142;
52 |
53 | private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
54 | private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
55 | private EGLConfig mEGLConfig = null;
56 | private int mGlVersion = -1;
57 |
58 |
59 | /**
60 | * Prepares EGL display and context.
61 | *
62 | * Equivalent to EglCore(null, 0).
63 | */
64 | public EglCore() {
65 | this(null, 0);
66 | }
67 |
68 | /**
69 | * Prepares EGL display and context.
70 | *
71 | * @param sharedContext The context to share, or null if sharing is not desired.
72 | * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
73 | */
74 | public EglCore(EGLContext sharedContext, int flags) {
75 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
76 | throw new RuntimeException("EGL already set up");
77 | }
78 |
79 | if (sharedContext == null) {
80 | sharedContext = EGL14.EGL_NO_CONTEXT;
81 | }
82 |
83 | mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
84 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
85 | throw new RuntimeException("unable to get EGL14 display");
86 | }
87 | int[] version = new int[2];
88 | if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
89 | mEGLDisplay = null;
90 | throw new RuntimeException("unable to initialize EGL14");
91 | }
92 |
93 | // Try to get a GLES3 context, if requested.
94 | if ((flags & FLAG_TRY_GLES3) != 0) {
95 | //Log.d(TAG, "Trying GLES 3");
96 | EGLConfig config = getConfig(flags, 3);
97 | if (config != null) {
98 | int[] attrib3_list = {
99 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
100 | EGL14.EGL_NONE
101 | };
102 | EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
103 | attrib3_list, 0);
104 |
105 | if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
106 | //Log.d(TAG, "Got GLES 3 config");
107 | mEGLConfig = config;
108 | mEGLContext = context;
109 | mGlVersion = 3;
110 | }
111 | }
112 | }
113 | if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed
114 | //Log.d(TAG, "Trying GLES 2");
115 | EGLConfig config = getConfig(flags, 2);
116 | if (config == null) {
117 | throw new RuntimeException("Unable to find a suitable EGLConfig");
118 | }
119 | int[] attrib2_list = {
120 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
121 | EGL14.EGL_NONE
122 | };
123 | EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
124 | attrib2_list, 0);
125 | checkEglError("eglCreateContext");
126 | mEGLConfig = config;
127 | mEGLContext = context;
128 | mGlVersion = 2;
129 | }
130 |
131 | // Confirm with query.
132 | int[] values = new int[1];
133 | EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
134 | values, 0);
135 | Log.d(TAG, "EGLContext created, client version " + values[0]);
136 | }
137 |
138 | /**
139 | * Finds a suitable EGLConfig.
140 | *
141 | * @param flags Bit flags from constructor.
142 | * @param version Must be 2 or 3.
143 | */
144 | private EGLConfig getConfig(int flags, int version) {
145 | int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
146 | if (version >= 3) {
147 | renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
148 | }
149 |
150 | // The actual surface is generally RGBA or RGBX, so situationally omitting alpha
151 | // doesn't really help. It can also lead to a huge performance hit on glReadPixels()
152 | // when reading into a GL_RGBA buffer.
153 | int[] attribList = {
154 | EGL14.EGL_RED_SIZE, 8,
155 | EGL14.EGL_GREEN_SIZE, 8,
156 | EGL14.EGL_BLUE_SIZE, 8,
157 | EGL14.EGL_ALPHA_SIZE, 8,
158 | //EGL14.EGL_DEPTH_SIZE, 16,
159 | //EGL14.EGL_STENCIL_SIZE, 8,
160 | EGL14.EGL_RENDERABLE_TYPE, renderableType,
161 | EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
162 | EGL14.EGL_NONE
163 | };
164 | if ((flags & FLAG_RECORDABLE) != 0) {
165 | attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
166 | attribList[attribList.length - 2] = 1;
167 | }
168 | EGLConfig[] configs = new EGLConfig[1];
169 | int[] numConfigs = new int[1];
170 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
171 | numConfigs, 0)) {
172 | Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
173 | return null;
174 | }
175 | return configs[0];
176 | }
177 |
178 | /**
179 | * Discards all resources held by this class, notably the EGL context. This must be
180 | * called from the thread where the context was created.
181 | *
182 | * On completion, no context will be current.
183 | */
184 | public void release() {
185 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
186 | // Android is unusual in that it uses a reference-counted EGLDisplay. So for
187 | // every eglInitialize() we need an eglTerminate().
188 | EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
189 | EGL14.EGL_NO_CONTEXT);
190 | EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
191 | EGL14.eglReleaseThread();
192 | EGL14.eglTerminate(mEGLDisplay);
193 | }
194 |
195 | mEGLDisplay = EGL14.EGL_NO_DISPLAY;
196 | mEGLContext = EGL14.EGL_NO_CONTEXT;
197 | mEGLConfig = null;
198 | }
199 |
200 | @Override
201 | protected void finalize() throws Throwable {
202 | try {
203 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
204 | // We're limited here -- finalizers don't run on the thread that holds
205 | // the EGL state, so if a surface or context is still current on another
206 | // thread we can't fully release it here. Exceptions thrown from here
207 | // are quietly discarded. Complain in the log file.
208 | Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
209 | release();
210 | }
211 | } finally {
212 | super.finalize();
213 | }
214 | }
215 |
216 | /**
217 | * Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's
218 | * still current in a context.
219 | */
220 | public void releaseSurface(EGLSurface eglSurface) {
221 | EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
222 | }
223 |
224 | /**
225 | * Creates an EGL surface associated with a Surface.
226 | *
227 | * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
228 | */
229 | public EGLSurface createWindowSurface(Object surface) {
230 | if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
231 | throw new RuntimeException("invalid surface: " + surface);
232 | }
233 |
234 | // Create a window surface, and attach it to the Surface we received.
235 | int[] surfaceAttribs = {
236 | EGL14.EGL_NONE
237 | };
238 | EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
239 | surfaceAttribs, 0);
240 | checkEglError("eglCreateWindowSurface");
241 | if (eglSurface == null) {
242 | throw new RuntimeException("surface was null");
243 | }
244 | return eglSurface;
245 | }
246 |
247 | /**
248 | * Creates an EGL surface associated with an offscreen buffer.
249 | */
250 | public EGLSurface createOffscreenSurface(int width, int height) {
251 | int[] surfaceAttribs = {
252 | EGL14.EGL_WIDTH, width,
253 | EGL14.EGL_HEIGHT, height,
254 | EGL14.EGL_NONE
255 | };
256 | EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
257 | surfaceAttribs, 0);
258 | checkEglError("eglCreatePbufferSurface");
259 | if (eglSurface == null) {
260 | throw new RuntimeException("surface was null");
261 | }
262 | return eglSurface;
263 | }
264 |
265 | /**
266 | * Makes our EGL context current, using the supplied surface for both "draw" and "read".
267 | */
268 | public void makeCurrent(EGLSurface eglSurface) {
269 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
270 | // called makeCurrent() before create?
271 | Log.d(TAG, "NOTE: makeCurrent w/o display");
272 | }
273 | if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
274 | throw new RuntimeException("eglMakeCurrent failed");
275 | }
276 | }
277 |
278 | /**
279 | * Makes our EGL context current, using the supplied "draw" and "read" surfaces.
280 | */
281 | public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
282 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
283 | // called makeCurrent() before create?
284 | Log.d(TAG, "NOTE: makeCurrent w/o display");
285 | }
286 | if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
287 | throw new RuntimeException("eglMakeCurrent(draw,read) failed");
288 | }
289 | }
290 |
291 | /**
292 | * Makes no context current.
293 | */
294 | public void makeNothingCurrent() {
295 | if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
296 | EGL14.EGL_NO_CONTEXT)) {
297 | throw new RuntimeException("eglMakeCurrent failed");
298 | }
299 | }
300 |
301 | /**
302 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
303 | *
304 | * @return false on failure
305 | */
306 | public boolean swapBuffers(EGLSurface eglSurface) {
307 | return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
308 | }
309 |
310 | /**
311 | * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
312 | */
313 | public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
314 | EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
315 | }
316 |
317 | /**
318 | * Returns true if our context and the specified surface are current.
319 | */
320 | public boolean isCurrent(EGLSurface eglSurface) {
321 | return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
322 | eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
323 | }
324 |
325 | /**
326 | * Performs a simple surface query.
327 | */
328 | public int querySurface(EGLSurface eglSurface, int what) {
329 | int[] value = new int[1];
330 | EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
331 | return value[0];
332 | }
333 |
334 | /**
335 | * Queries a string value.
336 | */
337 | public String queryString(int what) {
338 | return EGL14.eglQueryString(mEGLDisplay, what);
339 | }
340 |
341 | /**
342 | * Returns the GLES version this context is configured for (currently 2 or 3).
343 | */
344 | public int getGlVersion() {
345 | return mGlVersion;
346 | }
347 |
348 | /**
349 | * Writes the current display, context, and surface to the log.
350 | */
351 | public static void logCurrent(String msg) {
352 | EGLDisplay display;
353 | EGLContext context;
354 | EGLSurface surface;
355 |
356 | display = EGL14.eglGetCurrentDisplay();
357 | context = EGL14.eglGetCurrentContext();
358 | surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
359 | Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context +
360 | ", surface=" + surface);
361 | }
362 |
363 | /**
364 | * Checks for EGL errors. Throws an exception if an error has been raised.
365 | */
366 | private void checkEglError(String msg) {
367 | int error;
368 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
369 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
370 | }
371 | }
372 | }
373 |
--------------------------------------------------------------------------------
/app/src/main/java/io/github/junyuecao/croppedscreenrecorder/gles/Texture2dProgram.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.github.junyuecao.croppedscreenrecorder.gles;
18 |
19 | import android.opengl.GLES11Ext;
20 | import android.opengl.GLES20;
21 | import android.util.Log;
22 |
23 | import java.nio.FloatBuffer;
24 |
25 | /**
26 | * GL program and supporting functions for textured 2D shapes.
27 | */
28 | public class Texture2dProgram {
29 | private static final String TAG = GlUtil.TAG;
30 |
31 | public enum ProgramType {
32 | TEXTURE_2D, TEXTURE_EXT, TEXTURE_EXT_BW, TEXTURE_EXT_FILT
33 | }
34 |
35 | // Simple vertex shader, used for all programs.
36 | private static final String VERTEX_SHADER =
37 | "uniform mat4 uMVPMatrix;\n" +
38 | "uniform mat4 uTexMatrix;\n" +
39 | "attribute vec4 aPosition;\n" +
40 | "attribute vec4 aTextureCoord;\n" +
41 | "varying vec2 vTextureCoord;\n" +
42 | "void main() {\n" +
43 | " gl_Position = uMVPMatrix * aPosition;\n" +
44 | " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n" +
45 | "}\n";
46 |
47 | // Simple fragment shader for use with "normal" 2D textures.
48 | private static final String FRAGMENT_SHADER_2D =
49 | "precision mediump float;\n" +
50 | "varying vec2 vTextureCoord;\n" +
51 | "uniform sampler2D sTexture;\n" +
52 | "void main() {\n" +
53 | " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
54 | "}\n";
55 |
56 | // Simple fragment shader for use with external 2D textures (e.g. what we get from
57 | // SurfaceTexture).
58 | private static final String FRAGMENT_SHADER_EXT =
59 | "#extension GL_OES_EGL_image_external : require\n" +
60 | "precision mediump float;\n" +
61 | "varying vec2 vTextureCoord;\n" +
62 | "uniform samplerExternalOES sTexture;\n" +
63 | "void main() {\n" +
64 | " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
65 | "}\n";
66 |
67 | // Fragment shader that converts color to black & white with a simple transformation.
68 | private static final String FRAGMENT_SHADER_EXT_BW =
69 | "#extension GL_OES_EGL_image_external : require\n" +
70 | "precision mediump float;\n" +
71 | "varying vec2 vTextureCoord;\n" +
72 | "uniform samplerExternalOES sTexture;\n" +
73 | "void main() {\n" +
74 | " vec4 tc = texture2D(sTexture, vTextureCoord);\n" +
75 | " float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;\n" +
76 | " gl_FragColor = vec4(color, color, color, 1.0);\n" +
77 | "}\n";
78 |
79 | // Fragment shader with a convolution filter. The upper-left half will be drawn normally,
80 | // the lower-right half will have the filter applied, and a thin red line will be drawn
81 | // at the border.
82 | //
83 | // This is not optimized for performance. Some things that might make this faster:
84 | // - Remove the conditionals. They're used to present a half & half view with a red
85 | // stripe across the middle, but that's only useful for a demo.
86 | // - Unroll the loop. Ideally the compiler does this for you when it's beneficial.
87 | // - Bake the filter kernel into the shader, instead of passing it through a uniform
88 | // array. That, combined with loop unrolling, should reduce memory accesses.
89 | public static final int KERNEL_SIZE = 9;
90 | private static final String FRAGMENT_SHADER_EXT_FILT =
91 | "#extension GL_OES_EGL_image_external : require\n" +
92 | "#define KERNEL_SIZE " + KERNEL_SIZE + "\n" +
93 | "precision highp float;\n" +
94 | "varying vec2 vTextureCoord;\n" +
95 | "uniform samplerExternalOES sTexture;\n" +
96 | "uniform float uKernel[KERNEL_SIZE];\n" +
97 | "uniform vec2 uTexOffset[KERNEL_SIZE];\n" +
98 | "uniform float uColorAdjust;\n" +
99 | "void main() {\n" +
100 | " int i = 0;\n" +
101 | " vec4 sum = vec4(0.0);\n" +
102 | " if (vTextureCoord.x < vTextureCoord.y - 0.005) {\n" +
103 | " for (i = 0; i < KERNEL_SIZE; i++) {\n" +
104 | " vec4 texc = texture2D(sTexture, vTextureCoord + uTexOffset[i]);\n" +
105 | " sum += texc * uKernel[i];\n" +
106 | " }\n" +
107 | " sum += uColorAdjust;\n" +
108 | " } else if (vTextureCoord.x > vTextureCoord.y + 0.005) {\n" +
109 | " sum = texture2D(sTexture, vTextureCoord);\n" +
110 | " } else {\n" +
111 | " sum.r = 1.0;\n" +
112 | " }\n" +
113 | " gl_FragColor = sum;\n" +
114 | "}\n";
115 |
116 | private ProgramType mProgramType;
117 |
118 | // Handles to the GL program and various components of it.
119 | private int mProgramHandle;
120 | private int muMVPMatrixLoc;
121 | private int muTexMatrixLoc;
122 | private int muKernelLoc;
123 | private int muTexOffsetLoc;
124 | private int muColorAdjustLoc;
125 | private int maPositionLoc;
126 | private int maTextureCoordLoc;
127 |
128 | private int mTextureTarget;
129 |
130 | private float[] mKernel = new float[KERNEL_SIZE];
131 | private float[] mTexOffset;
132 | private float mColorAdjust;
133 |
134 |
135 | /**
136 | * Prepares the program in the current EGL context.
137 | */
138 | public Texture2dProgram(ProgramType programType) {
139 | mProgramType = programType;
140 |
141 | switch (programType) {
142 | case TEXTURE_2D:
143 | mTextureTarget = GLES20.GL_TEXTURE_2D;
144 | mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_2D);
145 | break;
146 | case TEXTURE_EXT:
147 | mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
148 | mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT);
149 | break;
150 | case TEXTURE_EXT_BW:
151 | mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
152 | mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_BW);
153 | break;
154 | case TEXTURE_EXT_FILT:
155 | mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
156 | mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXT_FILT);
157 | break;
158 | default:
159 | throw new RuntimeException("Unhandled type " + programType);
160 | }
161 | if (mProgramHandle == 0) {
162 | throw new RuntimeException("Unable to create program");
163 | }
164 | Log.d(TAG, "Created program " + mProgramHandle + " (" + programType + ")");
165 |
166 | // get locations of attributes and uniforms
167 |
168 | maPositionLoc = GLES20.glGetAttribLocation(mProgramHandle, "aPosition");
169 | GlUtil.checkLocation(maPositionLoc, "aPosition");
170 | maTextureCoordLoc = GLES20.glGetAttribLocation(mProgramHandle, "aTextureCoord");
171 | GlUtil.checkLocation(maTextureCoordLoc, "aTextureCoord");
172 | muMVPMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uMVPMatrix");
173 | GlUtil.checkLocation(muMVPMatrixLoc, "uMVPMatrix");
174 | muTexMatrixLoc = GLES20.glGetUniformLocation(mProgramHandle, "uTexMatrix");
175 | GlUtil.checkLocation(muTexMatrixLoc, "uTexMatrix");
176 | muKernelLoc = GLES20.glGetUniformLocation(mProgramHandle, "uKernel");
177 | if (muKernelLoc < 0) {
178 | // no kernel in this one
179 | muKernelLoc = -1;
180 | muTexOffsetLoc = -1;
181 | muColorAdjustLoc = -1;
182 | } else {
183 | // has kernel, must also have tex offset and color adj
184 | muTexOffsetLoc = GLES20.glGetUniformLocation(mProgramHandle, "uTexOffset");
185 | GlUtil.checkLocation(muTexOffsetLoc, "uTexOffset");
186 | muColorAdjustLoc = GLES20.glGetUniformLocation(mProgramHandle, "uColorAdjust");
187 | GlUtil.checkLocation(muColorAdjustLoc, "uColorAdjust");
188 |
189 | // initialize default values
190 | setKernel(new float[] {0f, 0f, 0f, 0f, 1f, 0f, 0f, 0f, 0f}, 0f);
191 | setTexSize(256, 256);
192 | }
193 | }
194 |
195 | /**
196 | * Releases the program.
197 | *
198 | * The appropriate EGL context must be current (i.e. the one that was used to create
199 | * the program).
200 | */
201 | public void release() {
202 | Log.d(TAG, "deleting program " + mProgramHandle);
203 | GLES20.glDeleteProgram(mProgramHandle);
204 | mProgramHandle = -1;
205 | }
206 |
207 | /**
208 | * Returns the program type.
209 | */
210 | public ProgramType getProgramType() {
211 | return mProgramType;
212 | }
213 |
214 | /**
215 | * Creates a texture object suitable for use with this program.
216 | *
217 | * On exit, the texture will be bound.
218 | */
219 | public int createTextureObject() {
220 | int[] textures = new int[1];
221 | GLES20.glGenTextures(1, textures, 0);
222 | GlUtil.checkGlError("glGenTextures");
223 |
224 | int texId = textures[0];
225 | GLES20.glBindTexture(mTextureTarget, texId);
226 | GlUtil.checkGlError("glBindTexture " + texId);
227 |
228 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
229 | GLES20.GL_NEAREST);
230 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
231 | GLES20.GL_LINEAR);
232 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
233 | GLES20.GL_CLAMP_TO_EDGE);
234 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
235 | GLES20.GL_CLAMP_TO_EDGE);
236 | GlUtil.checkGlError("glTexParameter");
237 |
238 | return texId;
239 | }
240 |
241 | /**
242 | * Configures the convolution filter values.
243 | *
244 | * @param values Normalized filter values; must be KERNEL_SIZE elements.
245 | */
246 | public void setKernel(float[] values, float colorAdj) {
247 | if (values.length != KERNEL_SIZE) {
248 | throw new IllegalArgumentException("Kernel size is " + values.length +
249 | " vs. " + KERNEL_SIZE);
250 | }
251 | System.arraycopy(values, 0, mKernel, 0, KERNEL_SIZE);
252 | mColorAdjust = colorAdj;
253 | //Log.d(TAG, "filt kernel: " + Arrays.toString(mKernel) + ", adj=" + colorAdj);
254 | }
255 |
256 | /**
257 | * Sets the size of the texture. This is used to find adjacent texels when filtering.
258 | */
259 | public void setTexSize(int width, int height) {
260 | float rw = 1.0f / width;
261 | float rh = 1.0f / height;
262 |
263 | // Don't need to create a new array here, but it's syntactically convenient.
264 | mTexOffset = new float[] {
265 | -rw, -rh, 0f, -rh, rw, -rh,
266 | -rw, 0f, 0f, 0f, rw, 0f,
267 | -rw, rh, 0f, rh, rw, rh
268 | };
269 | //Log.d(TAG, "filt size: " + width + "x" + height + ": " + Arrays.toString(mTexOffset));
270 | }
271 |
272 | /**
273 | * Issues the draw call. Does the full setup on every call.
274 | *
275 | * @param mvpMatrix The 4x4 projection matrix.
276 | * @param vertexBuffer Buffer with vertex position data.
277 | * @param firstVertex Index of first vertex to use in vertexBuffer.
278 | * @param vertexCount Number of vertices in vertexBuffer.
279 | * @param coordsPerVertex The number of coordinates per vertex (e.g. x,y is 2).
280 | * @param vertexStride Width, in bytes, of the position data for each vertex (often
281 | * vertexCount * sizeof(float)).
282 | * @param texMatrix A 4x4 transformation matrix for texture coords. (Primarily intended
283 | * for use with SurfaceTexture.)
284 | * @param texBuffer Buffer with vertex texture data.
285 | * @param texStride Width, in bytes, of the texture data for each vertex.
286 | */
287 | public void draw(float[] mvpMatrix, FloatBuffer vertexBuffer, int firstVertex,
288 | int vertexCount, int coordsPerVertex, int vertexStride,
289 | float[] texMatrix, FloatBuffer texBuffer, int textureId, int texStride) {
290 | GlUtil.checkGlError("draw start");
291 |
292 | // Select the program.
293 | GLES20.glUseProgram(mProgramHandle);
294 | GlUtil.checkGlError("glUseProgram");
295 |
296 | // Set the texture.
297 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
298 | GLES20.glBindTexture(mTextureTarget, textureId);
299 |
300 | // Copy the model / view / projection matrix over.
301 | GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mvpMatrix, 0);
302 | GlUtil.checkGlError("glUniformMatrix4fv");
303 |
304 | // Copy the texture transformation matrix over.
305 | GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, texMatrix, 0);
306 | GlUtil.checkGlError("glUniformMatrix4fv");
307 |
308 | // Enable the "aPosition" vertex attribute.
309 | GLES20.glEnableVertexAttribArray(maPositionLoc);
310 | GlUtil.checkGlError("glEnableVertexAttribArray");
311 |
312 | // Connect vertexBuffer to "aPosition".
313 | GLES20.glVertexAttribPointer(maPositionLoc, coordsPerVertex,
314 | GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
315 | GlUtil.checkGlError("glVertexAttribPointer");
316 |
317 | // Enable the "aTextureCoord" vertex attribute.
318 | GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
319 | GlUtil.checkGlError("glEnableVertexAttribArray");
320 |
321 | // Connect texBuffer to "aTextureCoord".
322 | GLES20.glVertexAttribPointer(maTextureCoordLoc, 2,
323 | GLES20.GL_FLOAT, false, texStride, texBuffer);
324 | GlUtil.checkGlError("glVertexAttribPointer");
325 |
326 | // Populate the convolution kernel, if present.
327 | if (muKernelLoc >= 0) {
328 | GLES20.glUniform1fv(muKernelLoc, KERNEL_SIZE, mKernel, 0);
329 | GLES20.glUniform2fv(muTexOffsetLoc, KERNEL_SIZE, mTexOffset, 0);
330 | GLES20.glUniform1f(muColorAdjustLoc, mColorAdjust);
331 | }
332 |
333 | // Draw the rect.
334 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, firstVertex, vertexCount);
335 | GlUtil.checkGlError("glDrawArrays");
336 |
337 | // Done -- disable vertex array, texture, and program.
338 | GLES20.glDisableVertexAttribArray(maPositionLoc);
339 | GLES20.glDisableVertexAttribArray(maTextureCoordLoc);
340 | GLES20.glBindTexture(mTextureTarget, 0);
341 | GLES20.glUseProgram(0);
342 | }
343 | }
344 |
--------------------------------------------------------------------------------
/app/src/main/java/io/github/junyuecao/croppedscreenrecorder/VideoEncoderCore.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.github.junyuecao.croppedscreenrecorder;
18 |
19 | import static android.os.Build.VERSION_CODES.LOLLIPOP;
20 |
21 | import android.media.AudioFormat;
22 | import android.media.MediaCodec;
23 | import android.media.MediaCodecInfo;
24 | import android.media.MediaFormat;
25 | import android.media.MediaMuxer;
26 | import android.os.Handler;
27 | import android.os.Looper;
28 | import android.support.annotation.RequiresApi;
29 | import android.util.Log;
30 | import android.view.Surface;
31 |
32 | import java.io.File;
33 | import java.io.IOException;
34 | import java.nio.ByteBuffer;
35 | import java.util.Timer;
36 | import java.util.TimerTask;
37 |
38 | /**
39 | * This class wraps up the core components used for surface-input video encoding.
40 | *
41 | * Once created, frames are fed to the input surface. Remember to provide the presentation
42 | * time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
43 | * producer side doesn't get backed up.
44 | *
45 | * This class is not thread-safe, with one exception: it is valid to use the input surface
46 | * on one thread, and drain the output on a different thread.
47 | */
48 | @RequiresApi(LOLLIPOP)
49 | public class VideoEncoderCore {
50 | private static final String TAG = "VideoEncoderCore";
51 | private static final boolean VERBOSE = true;
52 | private static final int TIMEOUT_USEC = 10000;
53 | public static final int DEFAULT_SAMPLE_RATE = 48000;
54 | public static final int DEFAULT_CHANNEL_CONFIG = 1;
55 | public static final int DEFAULT_DATA_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
56 |
57 | public static final int MAX_INPUT_SIZE = 65536;
58 | private static final String VIDEO_MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; // H.264 Advanced Video Coding
59 | private static final String AUDIO_MIME_TYPE = MediaFormat.MIMETYPE_AUDIO_AAC;
60 | /** fps */
61 | private static final int FRAME_RATE = 24;
62 | /** 5 seconds between I-frames */
63 | private static final int IFRAME_INTERVAL = 5;
64 | /** Save path */
65 | private final String mPath;
66 |
67 | private Surface mInputSurface;
68 | private MediaMuxer mMuxer;
69 | private MediaCodec mVideoEncoder;
70 | private MediaCodec mAudioEncoder;
71 | private MediaCodec.BufferInfo mVBufferInfo;
72 | private MediaCodec.BufferInfo mABufferInfo;
73 | private int mVTrackIndex;
74 | private int mATrackIndex;
75 | private boolean mMuxerStarted;
76 | private boolean mStreamEnded;
77 | private long mRecordStartedAt = 0;
78 |
79 | private RecordCallback mCallback;
80 | private Handler mMainHandler;
81 | // is audio empty , if true, we should add a frame of audio data to the muxer
82 | private boolean mIsAudioEmpty;
83 |
84 | private Runnable mRecordProgressChangeRunnable = new Runnable() {
85 |
86 | @Override
87 | public void run() {
88 | if (mCallback != null) {
89 | mCallback.onRecordedDurationChanged(System.currentTimeMillis() - mRecordStartedAt);
90 | }
91 | }
92 | };
93 | private String mCoverPath;
94 | private Timer mProgressTimer;
95 | private TimerTask mProgressTask = new TimerTask() {
96 | @Override
97 | public void run() {
98 | mMainHandler.post(mRecordProgressChangeRunnable);
99 | }
100 | };
101 |
102 | /**
103 | * Configures encoder and muxer state, and prepares the input Surface.
104 | */
105 | public VideoEncoderCore(int width, int height, int bitRate, File outputFile)
106 | throws IOException {
107 | mMainHandler = new Handler(Looper.getMainLooper());
108 | mVBufferInfo = new MediaCodec.BufferInfo();
109 | mABufferInfo = new MediaCodec.BufferInfo();
110 |
111 | MediaFormat videoFormat = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, width, height);
112 |
113 | // Set some properties. Failing to specify some of these can cause the MediaCodec
114 | // configure() call to throw an unhelpful exception.
115 | videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
116 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
117 | videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
118 | videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
119 | videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
120 | if (VERBOSE) {
121 | Log.d(TAG, "videoFormat: " + videoFormat);
122 | }
123 |
124 | // Create a MediaCodec encoder, and configure it with our videoFormat. Get a Surface
125 | // we can use for input and wrap it with a class that handles the EGL work.
126 | mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
127 | mVideoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
128 | mInputSurface = mVideoEncoder.createInputSurface();
129 | mVideoEncoder.start();
130 |
131 | MediaFormat audioFormat = MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, DEFAULT_SAMPLE_RATE, DEFAULT_CHANNEL_CONFIG);
132 | audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
133 | audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
134 | audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, MAX_INPUT_SIZE);
135 | mIsAudioEmpty = true;
136 |
137 | mAudioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
138 | mAudioEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
139 | mAudioEncoder.start();
140 | mStreamEnded = false;
141 |
142 | // Create a MediaMuxer. We can't add the video track and start() the muxer here,
143 | // because our MediaFormat doesn't have the Magic Goodies. These can only be
144 | // obtained from the encoder after it has started processing data.
145 | //
146 | // We're not actually interested in multiplexing audio. We just want to convert
147 | // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
148 | mPath = outputFile.toString();
149 | mMuxer = new MediaMuxer(mPath,
150 | MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
151 |
152 | mVTrackIndex = -1;
153 | mATrackIndex = -1;
154 | mMuxerStarted = false;
155 | }
156 |
157 | /**
158 | * Returns the encoder's input surface.
159 | */
160 | public Surface getInputSurface() {
161 | return mInputSurface;
162 | }
163 |
164 | /**
165 | * Releases encoder resources.
166 | */
167 | public void release() {
168 | if (VERBOSE) {
169 | Log.d(TAG, "releasing encoder objects");
170 | }
171 |
172 | if (mVideoEncoder != null) {
173 | mVideoEncoder.stop();
174 | mVideoEncoder.release();
175 | mVideoEncoder = null;
176 | }
177 | if (mAudioEncoder != null) {
178 | mAudioEncoder.stop();
179 | mAudioEncoder.release();
180 | mAudioEncoder = null;
181 | }
182 | if (mProgressTimer != null) {
183 | mProgressTimer.cancel();
184 | mProgressTimer = null;
185 | }
186 | if (mMuxer != null) {
187 | try {
188 | if (mIsAudioEmpty) {
189 | // avoid empty audio track. if the audio track is empty , muxer.stop will failed
190 | byte[] bytes = new byte[2];
191 | ByteBuffer buffer = ByteBuffer.wrap(bytes);
192 | mABufferInfo.set(0, 2, System.nanoTime() / 1000, 0);
193 | buffer.position(mABufferInfo.offset);
194 | buffer.limit(mABufferInfo.offset + mABufferInfo.size);
195 | mMuxer.writeSampleData(mATrackIndex, buffer, mABufferInfo);
196 | }
197 | mMuxer.stop();
198 | if (mCallback != null) {
199 | mMainHandler.post(new Runnable() {
200 | @Override
201 | public void run() {
202 | mCallback.onRecordSuccess(mPath, mCoverPath, System.currentTimeMillis() - mRecordStartedAt);
203 | }
204 | });
205 | }
206 | } catch (final IllegalStateException e) {
207 | Log.w(TAG, "Record failed with error:", e);
208 | if (mCallback != null) {
209 | mMainHandler.post(new Runnable() {
210 | @Override
211 | public void run() {
212 | mCallback.onRecordFailed(e, System.currentTimeMillis() - mRecordStartedAt);
213 | }
214 | });
215 | }
216 | }
217 | try {
218 | mMuxer.release();
219 | } catch (IllegalStateException ex) {
220 | Log.w(TAG, "Record failed with error:", ex);
221 | }
222 |
223 | mMuxer = null;
224 | }
225 |
226 | }
227 |
228 | public String getCoverPath() {
229 | return mCoverPath;
230 | }
231 |
232 | public void setCoverPath(String coverPath) {
233 | mCoverPath = coverPath;
234 | }
235 |
236 | public RecordCallback getRecordCallback() {
237 | return mCallback;
238 | }
239 |
240 | public void setRecordCallback(RecordCallback callback) {
241 | mCallback = callback;
242 | }
243 |
244 | /**
245 | * Extracts all pending data from the encoder and forwards it to the muxer.
246 | *
247 | * If endOfStream is not set, this returns when there is no more data to drain. If it
248 | * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
249 | * Calling this with endOfStream set should be done once, right before stopping the muxer.
250 | *
251 | * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
252 | * not recording audio.
253 | */
254 | public void drainEncoder(boolean endOfStream) {
255 | if (VERBOSE) {
256 | Log.d(TAG, "drainEncoder(" + endOfStream + ")");
257 | }
258 |
259 | if (endOfStream) {
260 | if (VERBOSE) {
261 | Log.d(TAG, "sending EOS to encoder");
262 | }
263 | mVideoEncoder.signalEndOfInputStream();
264 | mStreamEnded = true;
265 | }
266 |
267 | drainVideo(endOfStream);
268 | drainAudio(endOfStream);
269 |
270 | if (mMuxerStarted && mCallback != null) {
271 | mMainHandler.post(mRecordProgressChangeRunnable);
272 | }
273 | }
274 |
275 | private void drainVideo(boolean endOfStream) {
276 | while (true) {
277 | int encoderStatus = mVideoEncoder.dequeueOutputBuffer(mVBufferInfo, TIMEOUT_USEC);
278 | if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
279 | // no output available yet
280 | if (!endOfStream) {
281 | break; // out of while
282 | } else {
283 | if (mStreamEnded) {
284 | break;
285 | }
286 | if (VERBOSE) {
287 | Log.d(TAG, "no video output available, spinning to await EOS");
288 | }
289 | }
290 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
291 | // should happen before receiving buffers, and should only happen once
292 | if (mMuxerStarted) {
293 | throw new RuntimeException("format changed twice");
294 | }
295 | MediaFormat newFormat = mVideoEncoder.getOutputFormat();
296 | Log.d(TAG, "video encoder output format changed: " + newFormat);
297 |
298 | // now that we have the Magic Goodies, start the muxer
299 | mVTrackIndex = mMuxer.addTrack(newFormat);
300 | tryStartMuxer();
301 | } else if (encoderStatus < 0) {
302 | Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
303 | encoderStatus);
304 | // let's ignore it
305 | } else {
306 | if (mMuxerStarted) {
307 | // same as mVideoEncoder.getOutputBuffer(encoderStatus)
308 | ByteBuffer encodedData = mVideoEncoder.getOutputBuffer(encoderStatus);
309 |
310 | if (encodedData == null) {
311 | throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
312 | " was null");
313 | }
314 |
315 | if ((mVBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
316 | // The codec config data was pulled out and fed to the muxer when we got
317 | // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
318 | if (VERBOSE) {
319 | Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
320 | }
321 | mVBufferInfo.size = 0;
322 | }
323 |
324 | if (mVBufferInfo.size != 0) {
325 | if (!mMuxerStarted) {
326 | throw new RuntimeException("muxer hasn't started");
327 | }
328 |
329 | // adjust the ByteBuffer values to match BufferInfo (not needed?)
330 | encodedData.position(mVBufferInfo.offset);
331 | encodedData.limit(mVBufferInfo.offset + mVBufferInfo.size);
332 |
333 | mMuxer.writeSampleData(mVTrackIndex, encodedData, mVBufferInfo);
334 | if (VERBOSE) {
335 | Log.d(TAG, "sent " + mVBufferInfo.size + " video bytes to muxer, ts=" +
336 | mVBufferInfo.presentationTimeUs);
337 | }
338 | }
339 |
340 | mVideoEncoder.releaseOutputBuffer(encoderStatus, false);
341 |
342 | if ((mVBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
343 | if (!endOfStream) {
344 | Log.w(TAG, "reached end of stream unexpectedly");
345 | } else {
346 | if (VERBOSE) {
347 | Log.d(TAG, "end of video stream reached");
348 | }
349 | }
350 | break; // out of while
351 | }
352 | } else {
353 | Log.w(TAG, "Muxer is not started, just return");
354 | // let's ignore it
355 | mVideoEncoder.releaseOutputBuffer(encoderStatus, false);
356 | }
357 | }
358 | }
359 | }
360 |
361 | public void drainAudio(boolean endOfStream) {
362 | while (true) {
363 | // Start to get data from OutputBuffer and write to Muxer
364 | int index = mAudioEncoder.dequeueOutputBuffer(mABufferInfo, TIMEOUT_USEC);
365 | if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
366 | // no output available yet
367 | if (!endOfStream) {
368 | break; // out of while
369 | } else {
370 | if (mStreamEnded) {
371 | break;
372 | }
373 | if (VERBOSE) {
374 | Log.d(TAG, "no audio output available, spinning to await EOS");
375 | }
376 | }
377 | }
378 | if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
379 | if (mATrackIndex != -1) {
380 | throw new RuntimeException("format changed twice");
381 | }
382 | mATrackIndex = mMuxer.addTrack(mAudioEncoder.getOutputFormat());
383 | tryStartMuxer();
384 | } else if (index >= 0) {
385 | if (mMuxerStarted) {
386 | if ((mABufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
387 | // ignore codec config
388 | mABufferInfo.size = 0;
389 | }
390 |
391 | if (mABufferInfo.size != 0) {
392 | ByteBuffer out = mAudioEncoder.getOutputBuffer(index);
393 | out.position(mABufferInfo.offset);
394 | out.limit(mABufferInfo.offset + mABufferInfo.size);
395 | mMuxer.writeSampleData(mATrackIndex, out, mABufferInfo);
396 | mIsAudioEmpty = false;
397 | if (VERBOSE) {
398 | Log.d(TAG, "sent " + mABufferInfo.size + " audio bytes to muxer, ts=" +
399 | mABufferInfo.presentationTimeUs);
400 | }
401 | }
402 |
403 | mAudioEncoder.releaseOutputBuffer(index, false);
404 |
405 | if ((mABufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
406 | if (!endOfStream) {
407 | Log.w(TAG, "reached end of stream unexpectedly");
408 | } else {
409 | if (VERBOSE) {
410 | Log.d(TAG, "end of audio stream reached");
411 | }
412 | }
413 | mStreamEnded = true; // Audio stream ended
414 | break; // out of while
415 | }
416 | } else {
417 | Log.w(TAG, "Muxer is not started, just return");
418 | // let's ignore it
419 | mAudioEncoder.releaseOutputBuffer(index, false); // Don't forget to release it
420 | }
421 | }
422 | }
423 | }
424 |
425 | /**
426 | * Enqueue the audio frame buffers to the encoder
427 | *
428 | * @param buffer the data
429 | * @param size size of the data
430 | * @param endOfStream is this frame the end
431 | */
432 | public void enqueueAudioFrame(ByteBuffer buffer, int size, long presentTimeUs, boolean endOfStream) {
433 | boolean done = false;
434 | while (!done) {
435 | // Start to put data to InputBuffer
436 | int index = mAudioEncoder.dequeueInputBuffer(TIMEOUT_USEC);
437 | if (index >= 0) { // In case we didn't get any input buffer, it may be blocked by all output buffers being
438 | // full, thus try to drain them below if we didn't get any
439 | ByteBuffer in = mAudioEncoder.getInputBuffer(index);
440 | in.clear();
441 | if (size < 0) {
442 | size = 0;
443 | }
444 | if (buffer == null) {
445 | buffer = ByteBuffer.allocate(0);
446 | size = 0;
447 | }
448 | in.position(0);
449 | in.limit(size);
450 | buffer.position(0);
451 | buffer.limit(size);
452 | if (VERBOSE) {
453 | Log.d(TAG, "enqueueAudioFrame: "
454 | + "buffer [pos:" + buffer.position() + ", limit: " + buffer.limit() + "]"
455 | + "in [pos:" + in.position() + ", capacity: " + in.capacity() + "]");
456 | }
457 | in.put(buffer); // Here we should ensure that `size` is smaller than the capacity of the `in` buffer
458 | int flag = endOfStream ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
459 | mAudioEncoder.queueInputBuffer(index, 0, size, presentTimeUs, flag);
460 | done = true; // Done passing the input to the codec, but still check for available output below
461 | } else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
462 | // if input buffers are full try to drain them
463 | if (VERBOSE) {
464 | Log.d(TAG, "no input available, spinning to await EOS");
465 | }
466 | }
467 | }
468 | }
469 |
470 | /**
471 | * Enqueue the audio frame buffers to the encoder
472 | *
473 | * @param buffer the data
474 | * @param size size of the data
475 | * @param endOfStream is this frame the end
476 | */
477 | public void enqueueAudioFrame(ByteBuffer buffer, int size, boolean endOfStream) {
478 | enqueueAudioFrame(buffer, size, System.nanoTime() / 1000, endOfStream);
479 | }
480 |
481 | private void tryStartMuxer() {
482 | if (mVTrackIndex != -1 // Video track is added
483 | && mATrackIndex != -1 // and audio track is added
484 | && !mMuxerStarted) { // and muxer not started
485 | // then start the muxer
486 | mMuxer.start();
487 | mMuxerStarted = true;
488 | mRecordStartedAt = System.currentTimeMillis();
489 | mProgressTimer = new Timer();
490 | mProgressTimer.schedule(mProgressTask, 0, 16);
491 | }
492 | }
493 | }
494 |
--------------------------------------------------------------------------------
/app/src/main/java/io/github/junyuecao/croppedscreenrecorder/TextureMovieEncoder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.github.junyuecao.croppedscreenrecorder;
18 |
19 | import android.graphics.SurfaceTexture;
20 | import android.opengl.EGLContext;
21 | import android.opengl.GLES20;
22 | import android.os.Build;
23 | import android.os.Handler;
24 | import android.os.HandlerThread;
25 | import android.os.Looper;
26 | import android.os.Message;
27 | import android.support.annotation.NonNull;
28 | import android.support.annotation.RequiresApi;
29 | import android.util.Log;
30 | import android.view.Surface;
31 | import io.github.junyuecao.croppedscreenrecorder.gles.EglCore;
32 | import io.github.junyuecao.croppedscreenrecorder.gles.Texture2dProgram;
33 | import io.github.junyuecao.croppedscreenrecorder.gles.WindowSurface;
34 |
35 | import java.io.File;
36 | import java.io.IOException;
37 | import java.lang.ref.WeakReference;
38 | import java.nio.ByteBuffer;
39 |
40 |
41 | /**
42 | * Encode a movie from frames rendered from an external texture image.
43 | *
44 | * The object wraps an encoder running on a dedicated thread. The various control messages
45 | * may be sent from arbitrary threads (typically the app UI thread). The encoder thread
46 | * manages both sides of the encoder (feeding and draining); the only external input is
47 | * the GL texture.
48 | *
49 | * The design is complicated slightly by the need to create an EGL context that shares state
50 | * with a view that gets restarted if (say) the device orientation changes. When the view
51 | * in question is a GLSurfaceView, we don't have full control over the EGL context creation
52 | * on that side, so we have to bend a bit backwards here.
53 | *
54 | * To use:
55 | *
64 | * TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases.
65 | */
66 | @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
67 | public class TextureMovieEncoder implements Runnable, SurfaceTexture.OnFrameAvailableListener {
68 | private static final String TAG = "TextureMovieEncoder";
69 | private static final boolean VERBOSE = true;
70 |
71 | private static final int MSG_START_RECORDING = 0;
72 | private static final int MSG_STOP_RECORDING = 1;
73 | private static final int MSG_FRAME_AVAILABLE = 2;
74 | private static final int MSG_SET_TEXTURE_ID = 3;
75 | private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
76 | private static final int MSG_AUDIO_FRAME_AVAILABLE = 5;
77 | private static final int MSG_QUIT = 6;
78 | // ----- accessed exclusively by encoder thread -----
79 | private WindowSurface mInputWindowSurface;
80 | private EglCore mEglCore;
81 | private MainFrameRect mFullScreen;
82 | private int mTextureId;
83 | private int mFrameNum;
84 | private VideoEncoderCore mVideoEncoder;
85 |
86 | // ----- accessed by multiple threads -----
87 | private volatile EncoderHandler mHandler;
88 |
89 | private final Object mReadyFence = new Object(); // guards ready/running
90 | private boolean mReady;
91 | private boolean mRunning;
92 | private Callback mCallback;
93 | private HandlerThread mVideoFrameSender;
94 | private Handler mVideoFrameHandler;
95 | private SurfaceTexture mSurfaceTexture;
96 | private Runnable mUpdate = new Runnable() {
97 | @Override
98 | public void run() {
99 | if (mSurfaceTexture != null) {
100 | mSurfaceTexture.updateTexImage();
101 | }
102 | }
103 | };
104 | private Surface mSurface;
105 | private float mTopCropped;
106 | private float mBottomCropped;
107 | private float[] mTransform;
108 | private RecordCallback mRecordCallback;
109 | // Should save first frame as a cover
110 | private boolean mFirstFrameSaved;
111 | private int mVideoWidth;
112 | private int mVideoHeight;
113 | private File mCoverImageFile;
114 |
115 | public Callback getCallback() {
116 | return mCallback;
117 | }
118 |
119 | public void setCallback(Callback callback) {
120 | mCallback = callback;
121 | }
122 |
123 | /**
124 | * Tells the video recorder to start recording. (Call from non-encoder thread.)
125 | *
126 | * Creates a new thread, which will create an encoder using the provided configuration.
127 | *
128 | * Returns after the recorder thread has started and is ready to accept Messages. The
129 | * encoder may not yet be fully configured.
130 | */
131 | public void startRecording(EncoderConfig config) {
132 | Log.d(TAG, "Encoder: startRecording()");
133 | synchronized(mReadyFence) {
134 | if (mRunning) {
135 | Log.w(TAG, "Encoder thread already running");
136 | return;
137 | }
138 | mRunning = true;
139 | new Thread(this, "TextureMovieEncoder").start();
140 | while (!mReady) {
141 | try {
142 | mReadyFence.wait();
143 | } catch (InterruptedException ie) {
144 | // ignore
145 | }
146 | }
147 | }
148 |
149 | mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
150 | }
151 |
152 | /**
153 | * Tells the video recorder to stop recording. (Call from non-encoder thread.)
154 | *
155 | * Returns immediately; the encoder/muxer may not yet be finished creating the movie.
156 | *
157 | * TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down
158 | * so we can provide reasonable status UI (and let the caller know that movie encoding
159 | * has completed).
160 | */
161 | public void stopRecording() {
162 | synchronized(mReadyFence) {
163 | if (!mReady) {
164 | return;
165 | }
166 | }
167 |
168 | mHandler.removeCallbacks(mUpdate);
169 | synchronized(this) {
170 | mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
171 | mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
172 | }
173 | // We don't know when these will actually finish (or even start). We don't want to
174 | // delay the UI thread though, so we return immediately.
175 | }
176 |
177 | /**
178 | * Returns true if recording has been started.
179 | */
180 | public boolean isRecording() {
181 | synchronized(mReadyFence) {
182 | return mRunning;
183 | }
184 | }
185 |
186 | /**
187 | * Tells the video recorder to refresh its EGL surface. (Call from non-encoder thread.)
188 | */
189 | public void updateSharedContext(EGLContext sharedContext) {
190 | mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
191 | }
192 |
193 |
194 | /**
195 | * @see #frameAvailable(SurfaceTexture, long)
196 | */
197 | public void frameAvailable(SurfaceTexture st) {
198 | frameAvailable(st, st.getTimestamp());
199 | }
200 |
201 | /**
202 | * Tells the video recorder that a new frame is available. (Call from non-encoder thread.)
203 | *
204 | * This function sends a message and returns immediately. This isn't sufficient -- we
205 | * don't want the caller to latch a new frame until we're done with this one -- but we
206 | * can get away with it so long as the input frame rate is reasonable and the encoder
207 | * thread doesn't stall.
208 | *
209 | * or have a separate "block if still busy" method that the caller can execute immediately
210 | * before it calls updateTexImage(). The latter is preferred because we don't want to
211 | * stall the caller while this thread does work.
212 | * @param timestamp present timestamp in nanosecond
213 | */
214 | public void frameAvailable(SurfaceTexture st, long timestamp) {
215 | synchronized(mReadyFence) {
216 | if (!mReady) {
217 | return;
218 | }
219 | }
220 |
221 | if (mTransform == null) {
222 | mTransform = new float[16];
223 | }
224 | st.getTransformMatrix(mTransform);
225 | if (timestamp == 0) {
226 | // Seeing this after device is toggled off/on with power button. The
227 | // first frame back has a zero timestamp.
228 | //
229 | // MPEG4Writer thinks this is cause to abort() in native code, so it's very
230 | // important that we just ignore the frame.
231 | Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
232 | return;
233 | }
234 |
235 | mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
236 | (int) (timestamp >> 32), (int) timestamp, mTransform));
237 | }
238 |
239 | public void audioFrameAvailable(ByteBuffer buffer, int size, boolean endOfStream) {
240 | synchronized(mReadyFence) {
241 | if (!mReady) {
242 | return;
243 | }
244 | }
245 | if (mVideoEncoder != null) {
246 | mVideoEncoder.enqueueAudioFrame(buffer, size, endOfStream);
247 | }
248 | }
249 |
250 | /**
251 | * Tells the video recorder what texture name to use. This is the external texture that
252 | * we're receiving camera previews in. (Call from non-encoder thread.)
253 | *
254 | * TODO: do something less clumsy
255 | */
256 | public void setTextureId(int id) {
257 | synchronized(mReadyFence) {
258 | if (!mReady) {
259 | return;
260 | }
261 | }
262 | mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
263 | }
264 |
265 | /**
266 | * Encoder thread entry point. Establishes Looper/Handler and waits for messages.
267 | *
268 | *
269 | * @see Thread#run()
270 | */
271 | @Override
272 | public void run() {
273 | // Establish a Looper for this thread, and define a Handler for it.
274 | Looper.prepare();
275 | synchronized(mReadyFence) {
276 | mHandler = new EncoderHandler(this);
277 | mReady = true;
278 | mReadyFence.notify();
279 | }
280 | Looper.loop();
281 |
282 | Log.d(TAG, "Encoder thread exiting");
283 | synchronized(mReadyFence) {
284 | mReady = mRunning = false;
285 | mHandler = null;
286 | }
287 | }
288 |
289 | public void setRecordCallback(RecordCallback recordCallback) {
290 | mRecordCallback = recordCallback;
291 | }
292 |
293 | public RecordCallback getRecordCallback() {
294 | return mRecordCallback;
295 | }
296 |
297 | /**
298 | * Starts recording.
299 | */
300 | private void handleStartRecording(EncoderConfig config) {
301 | Log.d(TAG, "handleStartRecording " + config);
302 | mFrameNum = 0;
303 | prepareEncoder(config);
304 | }
305 |
306 | /**
307 | * Handles notification of an available frame.
308 | *
309 | * The texture is rendered onto the encoder's input surface, along with a moving
310 | * box (just because we can).
311 | *
312 | *
313 | * @param transform The texture transform, from SurfaceTexture.
314 | * @param timestampNanos The frame's timestamp, from SurfaceTexture.
315 | */
316 | private void handleFrameAvailable(float[] transform, long timestampNanos) {
317 | if (VERBOSE) {
318 | Log.d(TAG, "handleFrameAvailable tr=" + transform);
319 | }
320 |
321 | mVideoEncoder.drainEncoder(false);
322 | mFullScreen.drawFrame(mTextureId, transform);
323 |
324 | if (BuildConfig.DEBUG) {
325 | drawBox(mFrameNum++);
326 | }
327 |
328 | // used for save a frame
329 | // saveFirstFrame();
330 |
331 | mInputWindowSurface.setPresentationTime(timestampNanos);
332 | mInputWindowSurface.swapBuffers();
333 | }
334 |
335 | // private void saveFirstFrame() {
336 | // if (mFirstFrameSaved) {
337 | // return;
338 | // }
339 | // int width = mInputWindowSurface.getWidth();
340 | // int height = mInputWindowSurface.getHeight();
341 | // ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
342 | // buf.order(ByteOrder.LITTLE_ENDIAN);
343 | // buf.rewind();
344 | // GLES20.glReadPixels(0, 0, width, height,
345 | // GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
346 | // new Thread(new ImageSaverThread(buf, mCoverImageFile, width, height)).start();
347 | // mVideoEncoder.setCoverPath(mCoverImageFile.getAbsolutePath());
348 | //
349 | // mFirstFrameSaved = true; // 已经保存
350 | // }
351 |
352 | /**
353 | *
354 | * @param mp4 get screenshot file
355 | * @return screenshot file
356 | */
357 | private File getCoverFile(@NonNull File mp4) {
358 | return new File(mp4.getParent(), "cover_" + mp4.getName().replace(".mp4", "") + ".jpg");
359 | }
360 |
361 | /**
362 | * Handles a request to stop encoding.
363 | */
364 | private void handleStopRecording() {
365 | Log.d(TAG, "handleStopRecording");
366 |
367 | mVideoEncoder.drainEncoder(true);
368 | releaseEncoder();
369 | }
370 |
371 | /**
372 | * Sets the texture name that SurfaceTexture will use when frames are received.
373 | */
374 | private void handleSetTexture(int id) {
375 | Log.d(TAG, "handleSetTexture " + id);
376 | mTextureId = id;
377 | }
378 |
379 | /**
380 | * Tears down the EGL surface and context we've been using to feed the MediaCodec input
381 | * surface, and replaces it with a new one that shares with the new context.
382 | *
383 | * This is useful if the old context we were sharing with went away (maybe a GLSurfaceView
384 | * that got torn down) and we need to hook up with the new one.
385 | */
386 | private void handleUpdateSharedContext(EGLContext newSharedContext) {
387 | Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext);
388 |
389 | // Release the EGLSurface and EGLContext.
390 | mInputWindowSurface.releaseEglSurface();
391 | mFullScreen.release(false);
392 | mEglCore.release();
393 |
394 | // Create a new EGLContext and recreate the window surface.
395 | mEglCore = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE);
396 | mInputWindowSurface.recreate(mEglCore);
397 | mInputWindowSurface.makeCurrent();
398 |
399 | // Create new programs and such for the new context.
400 | mFullScreen = new MainFrameRect(new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT));
401 | mFullScreen.setTopCropped(mTopCropped);
402 | mFullScreen.setBottomCropped(mBottomCropped);
403 | }
404 |
405 | private void handleAudioFrameAvailable(boolean endOfStream) {
406 | mVideoEncoder.drainAudio(endOfStream);
407 | }
408 |
409 | private void prepareEncoder(EncoderConfig config) {
410 | mTopCropped = config.mTopCropped;
411 | mBottomCropped = config.mBottomCropped;
412 | mVideoHeight = (int) (config.mHeight * (1f - mTopCropped - mBottomCropped));
413 | if (mVideoHeight % 2 != 0) {
414 | mVideoHeight += 1; // Pixels must be even
415 | }
416 | mVideoWidth = config.mWidth;
417 | mCoverImageFile = getCoverFile(config.mOutputFile);
418 | try {
419 | mVideoEncoder = new VideoEncoderCore(mVideoWidth, mVideoHeight, config.mBitRate, config.mOutputFile);
420 | mVideoEncoder.setRecordCallback(mRecordCallback);
421 | } catch (IOException ioe) {
422 | throw new RuntimeException(ioe);
423 | }
424 | mEglCore = new EglCore(config.mEglContext, EglCore.FLAG_RECORDABLE);
425 | mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
426 | mInputWindowSurface.makeCurrent();
427 |
428 | mFullScreen = new MainFrameRect(
429 | new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT));
430 | mFullScreen.setTopCropped(config.mTopCropped);
431 | mFullScreen.setBottomCropped(config.mBottomCropped);
432 |
433 | mTextureId = mFullScreen.createTextureObject();
434 |
435 | Log.d(TAG, "Texture created id: " + mTextureId);
436 |
437 | mVideoFrameSender = new HandlerThread("SurfaceFrameSender");
438 | mVideoFrameSender.start();
439 | mVideoFrameHandler = new Handler(mVideoFrameSender.getLooper());
440 | mSurfaceTexture = new SurfaceTexture(mTextureId);
441 | mSurfaceTexture.setOnFrameAvailableListener(this, mVideoFrameHandler); // 为了不阻塞TextureMovieEncoder ,需要额外的线程
442 | mSurfaceTexture.setDefaultBufferSize(config.mWidth, config.mHeight);
443 | mSurface = new Surface(mSurfaceTexture);
444 |
445 | if (mCallback != null) {
446 | mCallback.onInputSurfacePrepared(mSurface);
447 | }
448 |
449 | mFirstFrameSaved = false;
450 | }
451 |
452 | @Override
453 | public void onFrameAvailable(SurfaceTexture surfaceTexture) {
454 | mHandler.postDelayed(mUpdate, 16);
455 |
456 | frameAvailable(surfaceTexture);
457 | }
458 |
459 | private void releaseEncoder() {
460 | if (mVideoEncoder != null) {
461 | mVideoEncoder.release();
462 | mVideoEncoder = null;
463 | }
464 | if (mSurface != null) {
465 | mSurface.release();
466 | mSurface = null;
467 | }
468 | if (mInputWindowSurface != null) {
469 | mInputWindowSurface.release();
470 | mInputWindowSurface = null;
471 | }
472 | if (mFullScreen != null) {
473 | mFullScreen.release(false);
474 | mFullScreen = null;
475 | }
476 | if (mEglCore != null) {
477 | mEglCore.release();
478 | mEglCore = null;
479 | }
480 | if (mVideoFrameHandler != null) {
481 | mVideoFrameHandler = null;
482 | }
483 | if (mVideoFrameSender != null) {
484 | mVideoFrameSender.quit();
485 | mVideoFrameSender = null;
486 | }
487 | }
488 |
489 | /**
490 | * Draws a box, with position offset.
491 | */
492 | private void drawBox(int posn) {
493 | final int width = mInputWindowSurface.getWidth();
494 | int xpos = (posn * 4) % (width - 50);
495 | GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
496 | GLES20.glScissor(xpos, 0, 100, 100);
497 | GLES20.glClearColor(1.0f, 0.0f, 1.0f, 1.0f);
498 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
499 | GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
500 | }
501 |
502 | public interface Callback {
503 | /**
504 | * called when surface prepared
505 | * @param surface a prepared surface
506 | */
507 | void onInputSurfacePrepared(Surface surface);
508 | }
509 |
510 | /**
511 | * Encoder configuration.
512 | *
513 | * Object is immutable, which means we can safely pass it between threads without
514 | * explicit synchronization (and don't need to worry about it getting tweaked out from
515 | * under us).
516 | *
517 | * TODO: make frame rate and iframe interval configurable? Maybe use builder pattern
518 | * with reasonable defaults for those and bit rate.
519 | */
520 | public static class EncoderConfig {
521 | final File mOutputFile;
522 | final int mWidth;
523 | final int mHeight;
524 | final float mTopCropped;
525 | final float mBottomCropped;
526 | final int mBitRate;
527 | final EGLContext mEglContext;
528 |
529 | public EncoderConfig(File outputFile, int width, int height,
530 | float topCropped, float bottomCropped,
531 | int bitRate,
532 | EGLContext sharedEglContext) {
533 | mOutputFile = outputFile;
534 | mWidth = width;
535 | mHeight = height;
536 | mTopCropped = topCropped;
537 | mBottomCropped = bottomCropped;
538 | mBitRate = bitRate;
539 | mEglContext = sharedEglContext;
540 | }
541 |
542 | @Override
543 | public String toString() {
544 | return "EncoderConfig: " + mWidth + "x" + mHeight
545 | + ", Crop with: " + mTopCropped + " and " + mBottomCropped
546 | + "@" + mBitRate +
547 | " to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
548 | }
549 | }
550 |
551 | /**
552 | * Handles encoder state change requests. The handler is created on the encoder thread.
553 | */
554 | private static class EncoderHandler extends Handler {
555 | private WeakReference
56 | *
63 | *