121 | * Throws a RuntimeException if the location is invalid. 122 | */ 123 | public static void checkLocation(int location, String label) { 124 | if (location < 0) { 125 | throw new RuntimeException("Unable to locate '" + label + "' in program"); 126 | } 127 | } 128 | 129 | /** 130 | * Creates a texture from raw data. 131 | * 132 | * @param data Image data, in a "direct" ByteBuffer. 133 | * @param width Texture width, in pixels (not bytes). 134 | * @param height Texture height, in pixels. 135 | * @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA). 136 | * @return Handle to texture. 137 | */ 138 | public static int createImageTexture(ByteBuffer data, int width, int height, int format) { 139 | int[] textureHandles = new int[1]; 140 | int textureHandle; 141 | 142 | GLES20.glGenTextures(1, textureHandles, 0); 143 | textureHandle = textureHandles[0]; 144 | GlUtil.checkGlError("glGenTextures"); 145 | 146 | // Bind the texture handle to the 2D texture target. 147 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle); 148 | 149 | // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering 150 | // is smaller or larger than the source image. 151 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, 152 | GLES20.GL_LINEAR); 153 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, 154 | GLES20.GL_LINEAR); 155 | GlUtil.checkGlError("loadImageTexture"); 156 | 157 | // Load the data from the buffer into the texture handle. 158 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format, 159 | width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data); 160 | GlUtil.checkGlError("loadImageTexture"); 161 | 162 | return textureHandle; 163 | } 164 | 165 | /** 166 | * Allocates a direct float buffer, and populates it with the float array data. 167 | */ 168 | public static FloatBuffer createFloatBuffer(float[] coords) { 169 | // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. 170 | ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT); 171 | bb.order(ByteOrder.nativeOrder()); 172 | FloatBuffer fb = bb.asFloatBuffer(); 173 | fb.put(coords); 174 | fb.position(0); 175 | return fb; 176 | } 177 | 178 | /** 179 | * Writes GL version info to the log. 180 | */ 181 | public static void logVersionInfo() { 182 | Log.i(TAG, "vendor : " + GLES20.glGetString(GLES20.GL_VENDOR)); 183 | Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER)); 184 | Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION)); 185 | 186 | if (false) { 187 | int[] values = new int[1]; 188 | GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0); 189 | int majorVersion = values[0]; 190 | GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0); 191 | int minorVersion = values[0]; 192 | if (GLES30.glGetError() == GLES30.GL_NO_ERROR) { 193 | Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion); 194 | } 195 | } 196 | } 197 | } 198 | -------------------------------------------------------------------------------- /app/src/main/java/com/ryan/screenrecoder/coder/MediaEncoder.java: -------------------------------------------------------------------------------- 1 | package com.ryan.screenrecoder.coder; 2 | 3 | import android.graphics.Bitmap; 4 | import android.hardware.display.DisplayManager; 5 | import android.hardware.display.VirtualDisplay; 6 | import android.media.MediaCodec; 7 | import android.media.MediaCodecInfo; 8 | import android.media.MediaFormat; 9 | import android.media.MediaMuxer; 10 | import android.media.projection.MediaProjection; 11 | import android.os.Environment; 12 | import android.os.Handler; 13 | import android.os.Looper; 14 | import android.os.Message; 15 | import android.util.Log; 16 | import android.view.Surface; 17 | 18 | import com.ryan.screenrecoder.application.SysValue; 19 | import com.ryan.screenrecoder.bean.EventLogBean; 20 | import com.ryan.screenrecoder.glec.EGLRender; 21 | 22 | import org.greenrobot.eventbus.EventBus; 23 | 24 | import java.io.IOException; 25 | import java.nio.ByteBuffer; 26 | 27 | /** 28 | * Created by ryan on 2017/2/23 0023. 29 | */ 30 | 31 | public class MediaEncoder extends Thread { 32 | 33 | 34 | private final String TAG = "MediaEncoder"; 35 | 36 | private final String mime_type = MediaFormat.MIMETYPE_VIDEO_AVC; 37 | 38 | 39 | private DisplayManager displayManager; 40 | private MediaProjection projection; 41 | private MediaCodec mEncoder; 42 | private VirtualDisplay virtualDisplay; 43 | private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo(); 44 | private EGLRender eglRender; 45 | private Surface surface; 46 | 47 | 48 | //屏幕相关 49 | private int screen_width; 50 | private int screen_height; 51 | private int screen_dpi; 52 | 53 | 54 | //编码参数相关 55 | private int frame_bit = 2000000;//2MB 56 | private int frame_rate = 20;//这里指的是Mediacodec30张图为1组 ,并不是视屏本身的FPS 57 | private int frame_internal = 1;//关键帧间隔 一组加一个关键帧 58 | private final int TIMEOUT_US = 10000; 59 | private int video_fps = 30; 60 | private byte[] sps=null; 61 | private byte[] pps=null; 62 | 63 | 64 | private OnScreenCallBack onScreenCallBack; 65 | 66 | public void setOnScreenCallBack(OnScreenCallBack onScreenCallBack) { 67 | this.onScreenCallBack = onScreenCallBack; 68 | } 69 | 70 | public interface OnScreenCallBack { 71 | void onScreenInfo(byte[] bytes); 72 | void onCutScreen(Bitmap bitmap); 73 | } 74 | 75 | public MediaEncoder(MediaProjection projection, int screen_width, int screen_height, int screen_dpi) { 76 | this.projection = projection; 77 | initScreenInfo(screen_width, screen_height, screen_dpi); 78 | } 79 | 80 | public MediaEncoder(DisplayManager displayManager, int screen_width, int screen_height, int screen_dpi) { 81 | this.displayManager = displayManager; 82 | initScreenInfo(screen_width, screen_height, screen_dpi); 83 | } 84 | 85 | private void initScreenInfo(int screen_width, int screen_height, int screen_dpi) { 86 | this.screen_width = screen_width; 87 | this.screen_height = screen_height; 88 | this.screen_dpi = screen_dpi; 89 | } 90 | 91 | /** 92 | * 设置视频FPS 93 | * 94 | * @param fps 95 | */ 96 | public MediaEncoder setVideoFPS(int fps) { 97 | video_fps = fps; 98 | return this; 99 | } 100 | 101 | /** 102 | * 设置视屏编码采样率 103 | * 104 | * @param bit 105 | */ 106 | public MediaEncoder setVideoBit(int bit) { 107 | frame_bit = bit; 108 | return this; 109 | } 110 | 111 | @Override 112 | public void run() { 113 | super.run(); 114 | try { 115 | prepareEncoder(); 116 | } catch (IOException e) { 117 | e.printStackTrace(); 118 | } 119 | if (projection != null) { 120 | virtualDisplay = projection.createVirtualDisplay("screen", screen_width, screen_height, screen_dpi, 121 | DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, eglRender.getDecodeSurface(), null, null); 122 | } else { 123 | virtualDisplay = displayManager.createVirtualDisplay("screen", screen_width, screen_height, screen_dpi, 124 | eglRender.getDecodeSurface(), DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC); 125 | } 126 | startRecordScreen(); 127 | release(); 128 | } 129 | 130 | 131 | /** 132 | * 初始化编码器 133 | */ 134 | private void prepareEncoder() throws IOException { 135 | MediaFormat mediaFormat = MediaFormat.createVideoFormat(mime_type, screen_width, screen_height); 136 | mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 137 | mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, frame_bit); 138 | mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frame_rate); 139 | mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, frame_internal); 140 | mEncoder = MediaCodec.createEncoderByType(mime_type); 141 | mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 142 | surface = mEncoder.createInputSurface(); 143 | eglRender = new EGLRender(surface, screen_width, screen_height, video_fps); 144 | eglRender.setCallBack(new EGLRender.onFrameCallBack() { 145 | @Override 146 | public void onUpdate() { 147 | startEncode(); 148 | } 149 | 150 | @Override 151 | public void onCutScreen(Bitmap bitmap) { 152 | onScreenCallBack.onCutScreen(bitmap); 153 | } 154 | }); 155 | mEncoder.start(); 156 | } 157 | /** 158 | * 开始录屏 159 | */ 160 | private void startRecordScreen() { 161 | eglRender.start(); 162 | release(); 163 | } 164 | 165 | private void startEncode() { 166 | ByteBuffer[] byteBuffers = null; 167 | if (SysValue.api < 21) { 168 | byteBuffers = mEncoder.getOutputBuffers(); 169 | } 170 | int index = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US); 171 | if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 172 | resetOutputFormat(); 173 | } else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) { 174 | // Log.d("---", "retrieving buffers time out!"); 175 | // try { 176 | // // wait 10ms 177 | // Thread.sleep(10); 178 | // } catch (InterruptedException e) { 179 | // } 180 | } else if (index >= 0) { 181 | if (SysValue.api < 21) { 182 | encodeToVideoTrack(byteBuffers[index]); 183 | } else { 184 | encodeToVideoTrack(mEncoder.getOutputBuffer(index)); 185 | } 186 | mEncoder.releaseOutputBuffer(index, false); 187 | } 188 | } 189 | 190 | private void encodeToVideoTrack(ByteBuffer encodeData) { 191 | 192 | // ByteBuffer encodeData = mEncoder.getOutputBuffer(index); 193 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 194 | Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG"); 195 | mBufferInfo.size = 0; 196 | } 197 | if (mBufferInfo.size == 0) { 198 | Log.d(TAG, "info.size == 0, drop it."); 199 | encodeData = null; 200 | } else { 201 | Log.d(TAG, "got buffer, info: size=" + mBufferInfo.size 202 | + ", presentationTimeUs=" + mBufferInfo.presentationTimeUs 203 | + ", offset=" + mBufferInfo.offset); 204 | } 205 | if (encodeData != null) { 206 | encodeData.position(mBufferInfo.offset); 207 | encodeData.limit(mBufferInfo.offset + mBufferInfo.size); 208 | // muxer.writeSampleData(mVideoTrackIndex, encodeData, mBufferInfo);//写入文件 209 | byte[] bytes; 210 | if (mBufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) { 211 | //todo 关键帧上添加sps,和pps信息 212 | bytes = new byte[mBufferInfo.size + sps.length + pps.length]; 213 | System.arraycopy(sps, 0, bytes, 0, sps.length); 214 | System.arraycopy(pps, 0, bytes, sps.length, pps.length); 215 | encodeData.get(bytes, sps.length + pps.length, mBufferInfo.size); 216 | } else { 217 | bytes = new byte[mBufferInfo.size]; 218 | encodeData.get(bytes, 0, mBufferInfo.size); 219 | } 220 | onScreenCallBack.onScreenInfo(bytes); 221 | EventBus.getDefault().post(new EventLogBean("send:" + mBufferInfo.size +"\tflag:" + mBufferInfo.flags)); 222 | Log.e("---", "send:" + mBufferInfo.size +"\tflag:" + mBufferInfo.flags); 223 | } 224 | } 225 | 226 | private int mVideoTrackIndex; 227 | 228 | private void resetOutputFormat() { 229 | MediaFormat newFormat = mEncoder.getOutputFormat(); 230 | Log.i(TAG, "output format changed.\n new format: " + newFormat.toString()); 231 | getSpsPpsByteBuffer(newFormat); 232 | Log.i(TAG, "started media muxer, videoIndex=" + mVideoTrackIndex); 233 | } 234 | 235 | 236 | /** 237 | * 获取编码SPS和PPS信息 238 | * @param newFormat 239 | */ 240 | private void getSpsPpsByteBuffer(MediaFormat newFormat) { 241 | sps = newFormat.getByteBuffer("csd-0").array(); 242 | pps = newFormat.getByteBuffer("csd-1").array(); 243 | EventBus.getDefault().post(new EventLogBean("编码器初始化完成")); 244 | } 245 | 246 | public void stopScreen() { 247 | if (eglRender != null) { 248 | eglRender.stop(); 249 | } 250 | } 251 | 252 | public void release() { 253 | if (mEncoder != null) { 254 | mEncoder.stop(); 255 | mEncoder.release(); 256 | mEncoder = null; 257 | } 258 | if (virtualDisplay != null) { 259 | virtualDisplay.release(); 260 | } 261 | } 262 | public void cutScreen(){ 263 | eglRender.cutScreen(); 264 | } 265 | } 266 | -------------------------------------------------------------------------------- /app/src/main/java/com/ryan/screenrecoder/glec/EGLRender.java: -------------------------------------------------------------------------------- 1 | package com.ryan.screenrecoder.glec; 2 | 3 | import android.content.Context; 4 | import android.graphics.Bitmap; 5 | import android.graphics.SurfaceTexture; 6 | import android.opengl.EGL14; 7 | import android.opengl.EGLConfig; 8 | import android.opengl.EGLContext; 9 | import android.opengl.EGLDisplay; 10 | import android.opengl.EGLExt; 11 | import android.opengl.EGLSurface; 12 | import android.opengl.GLES20; 13 | import android.opengl.Matrix; 14 | import android.os.Handler; 15 | import android.os.Looper; 16 | import android.os.Message; 17 | import android.util.Log; 18 | import android.view.Surface; 19 | 20 | import java.nio.ByteBuffer; 21 | import java.nio.IntBuffer; 22 | import java.util.concurrent.ExecutorService; 23 | import java.util.concurrent.Executors; 24 | 25 | 26 | /** 27 | * Created by zx315476228 on 17-3-3. 28 | */ 29 | 30 | public class EGLRender implements SurfaceTexture.OnFrameAvailableListener { 31 | private final int HANDLER_PHOTO_CALLBACK = 0; 32 | private static final String TAG = "EncodeDecodeSurface"; 33 | private static final boolean VERBOSE = false; // lots of logging 34 | 35 | private STextureRender mTextureRender; 36 | private SurfaceTexture mSurfaceTexture; 37 | 38 | private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; 39 | private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; 40 | private EGLContext mEGLContextEncoder = EGL14.EGL_NO_CONTEXT; 41 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; 42 | private EGLSurface mEGLSurfaceEncoder = EGL14.EGL_NO_SURFACE; 43 | 44 | private Surface decodeSurface; 45 | 46 | private int mWidth; 47 | private int mHeight; 48 | private int fps; 49 | private int video_interval; 50 | private boolean mFrameAvailable = true; 51 | private onFrameCallBack callBack; 52 | private boolean hasCutScreen = false; 53 | 54 | private boolean start; 55 | private long time = 0; 56 | private long current_time; 57 | 58 | private Handler handler = new Handler(Looper.getMainLooper()) { 59 | @Override 60 | public void handleMessage(Message msg) { 61 | super.handleMessage(msg); 62 | switch (msg.what) { 63 | case HANDLER_PHOTO_CALLBACK: 64 | if (callBack != null && msg.obj != null) 65 | callBack.onCutScreen((Bitmap) msg.obj); 66 | break; 67 | } 68 | } 69 | }; 70 | private ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor(); 71 | 72 | private class CutScreeenThread implements Runnable { 73 | private int[] modelData; 74 | 75 | public CutScreeenThread(int[] modelData) { 76 | this.modelData = modelData; 77 | } 78 | 79 | @Override 80 | public void run() { 81 | 82 | int[] ArData = new int[modelData.length]; 83 | int offset1, offset2; 84 | for (int i = 0; i < mHeight; i++) { 85 | offset1 = i * mWidth; 86 | offset2 = (mHeight - i - 1) * mWidth; 87 | for (int j = 0; j < mWidth; j++) { 88 | int texturePixel = modelData[offset1 + j]; 89 | int blue = (texturePixel >> 16) & 0xff; 90 | int red = (texturePixel << 16) & 0x00ff0000; 91 | int pixel = (texturePixel & 0xff00ff00) | red | blue; 92 | ArData[offset2 + j] = pixel; 93 | } 94 | } 95 | Bitmap bitmap = Bitmap.createBitmap(ArData, mWidth, mHeight, Bitmap.Config.ARGB_8888); 96 | modelData = null; 97 | ArData = null; 98 | 99 | handler.obtainMessage(HANDLER_PHOTO_CALLBACK, bitmap).sendToTarget(); 100 | } 101 | } 102 | public void setCallBack(onFrameCallBack callBack) { 103 | this.callBack = callBack; 104 | } 105 | 106 | public interface onFrameCallBack { 107 | void onUpdate(); 108 | void onCutScreen(Bitmap bitmap); 109 | } 110 | 111 | 112 | public EGLRender(Surface surface, int mWidth, int mHeight, int fps) { 113 | this.mWidth = mWidth; 114 | this.mHeight = mHeight; 115 | initFPs(fps); 116 | eglSetup(surface); 117 | makeCurrent(); 118 | setup(); 119 | } 120 | 121 | private void initFPs(int fps) { 122 | this.fps = fps; 123 | video_interval = 1000 / fps; 124 | } 125 | 126 | /** 127 | * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer. 128 | */ 129 | private void eglSetup(Surface surface) { 130 | mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); 131 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { 132 | throw new RuntimeException("unable to get EGL14 display"); 133 | } 134 | int[] version = new int[2]; 135 | if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { 136 | mEGLDisplay = null; 137 | throw new RuntimeException("unable to initialize EGL14"); 138 | } 139 | 140 | // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB. 141 | int[] attribList = { 142 | EGL14.EGL_RED_SIZE, 8, 143 | EGL14.EGL_GREEN_SIZE, 8, 144 | EGL14.EGL_BLUE_SIZE, 8, 145 | EGL14.EGL_ALPHA_SIZE, 8, 146 | EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 147 | EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT, 148 | EGL14.EGL_NONE 149 | }; 150 | EGLConfig[] configs = new EGLConfig[1]; 151 | int[] numConfigs = new int[1]; 152 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, 153 | numConfigs, 0)) { 154 | throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config"); 155 | } 156 | 157 | EGLConfig configEncoder = getConfig(2); 158 | 159 | // Configure context for OpenGL ES 2.0. 160 | int[] attrib_list = { 161 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 162 | EGL14.EGL_NONE 163 | }; 164 | mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, 165 | attrib_list, 0); 166 | checkEglError("eglCreateContext"); 167 | if (mEGLContext == null) { 168 | throw new RuntimeException("null context"); 169 | } 170 | 171 | mEGLContextEncoder = EGL14.eglCreateContext(mEGLDisplay, configEncoder, mEGLContext, 172 | attrib_list, 0); 173 | checkEglError("eglCreateContext"); 174 | if (mEGLContextEncoder == null) { 175 | throw new RuntimeException("null context2"); 176 | } 177 | 178 | // Create a pbuffer surface. 179 | int[] surfaceAttribs = { 180 | EGL14.EGL_WIDTH, mWidth, 181 | EGL14.EGL_HEIGHT, mHeight, 182 | EGL14.EGL_NONE 183 | }; 184 | mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0); 185 | 186 | 187 | checkEglError("eglCreatePbufferSurface"); 188 | if (mEGLSurface == null) { 189 | throw new RuntimeException("surface was null"); 190 | } 191 | 192 | 193 | int[] surfaceAttribs2 = { 194 | EGL14.EGL_NONE 195 | }; 196 | mEGLSurfaceEncoder = EGL14.eglCreateWindowSurface(mEGLDisplay, configEncoder, surface, 197 | surfaceAttribs2, 0); //creates an EGL window surface and returns its handle 198 | checkEglError("eglCreateWindowSurface"); 199 | if (mEGLSurfaceEncoder == null) { 200 | throw new RuntimeException("surface was null"); 201 | } 202 | } 203 | 204 | /** 205 | * Makes our EGL context and surface current. 206 | */ 207 | public void makeCurrent() { 208 | if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { 209 | throw new RuntimeException("eglMakeCurrent failed"); 210 | } 211 | } 212 | 213 | /** 214 | * Creates interconnected instances of TextureRender, SurfaceTexture, and Surface. 215 | */ 216 | private void setup() { 217 | mTextureRender = new STextureRender(mWidth, mHeight); 218 | mTextureRender.surfaceCreated(); 219 | 220 | if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId()); 221 | mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId()); 222 | mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight); 223 | mSurfaceTexture.setOnFrameAvailableListener(this); 224 | decodeSurface = new Surface(mSurfaceTexture); 225 | } 226 | 227 | public Surface getDecodeSurface() { 228 | return decodeSurface; 229 | } 230 | 231 | private EGLConfig getConfig(int version) { 232 | int renderableType = EGL14.EGL_OPENGL_ES2_BIT; 233 | if (version >= 3) { 234 | renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR; 235 | } 236 | 237 | // The actual surface is generally RGBA or RGBX, so situationally omitting alpha 238 | // doesn't really help. It can also lead to a huge performance hit on glReadPixels() 239 | // when reading into a GL_RGBA buffer. 240 | int[] attribList = { 241 | EGL14.EGL_RED_SIZE, 8, 242 | EGL14.EGL_GREEN_SIZE, 8, 243 | EGL14.EGL_BLUE_SIZE, 8, 244 | EGL14.EGL_ALPHA_SIZE, 8, 245 | EGL14.EGL_RENDERABLE_TYPE, renderableType, 246 | EGL14.EGL_NONE, 0, // placeholder for recordable [@-3] 247 | EGL14.EGL_NONE 248 | }; 249 | EGLConfig[] configs = new EGLConfig[1]; 250 | int[] numConfigs = new int[1]; 251 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, 252 | numConfigs, 0)) { 253 | Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig"); 254 | return null; 255 | } 256 | return configs[0]; 257 | } 258 | 259 | private void checkEglError(String msg) { 260 | int error; 261 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { 262 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 263 | } 264 | } 265 | 266 | public void makeCurrent(int index) { 267 | 268 | if (index == 0) { 269 | if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { 270 | throw new RuntimeException("eglMakeCurrent failed"); 271 | } 272 | } else { 273 | if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurfaceEncoder, mEGLSurfaceEncoder, mEGLContextEncoder)) { 274 | throw new RuntimeException("eglMakeCurrent failed"); 275 | } 276 | } 277 | 278 | } 279 | 280 | public void setPresentationTime(long nsecs) { 281 | EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurfaceEncoder, nsecs); 282 | checkEglError("eglPresentationTimeANDROID"); 283 | } 284 | 285 | public void awaitNewImage() { 286 | if (mFrameAvailable) { 287 | mFrameAvailable = false; 288 | mSurfaceTexture.updateTexImage(); 289 | } 290 | } 291 | 292 | public boolean swapBuffers() { 293 | boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurfaceEncoder); 294 | checkEglError("eglSwapBuffers"); 295 | return result; 296 | } 297 | 298 | private int count = 1; 299 | 300 | @Override 301 | public void onFrameAvailable(SurfaceTexture surfaceTexture) { 302 | mFrameAvailable = true; 303 | } 304 | 305 | private long computePresentationTimeNsec(int frameIndex) { 306 | final long ONE_BILLION = 1000000000; 307 | return frameIndex * ONE_BILLION / fps; 308 | } 309 | 310 | public void drawImage() { 311 | mTextureRender.drawFrame(); 312 | } 313 | 314 | /** 315 | * 开始录屏 316 | */ 317 | public void start() { 318 | start = true; 319 | while (start) { 320 | makeCurrent(1); 321 | awaitNewImage(); 322 | current_time = System.currentTimeMillis(); 323 | if (current_time - time >= video_interval) { 324 | //todo 帧率控制 325 | drawImage(); 326 | callBack.onUpdate(); 327 | setPresentationTime(computePresentationTimeNsec(count++)); 328 | swapBuffers(); 329 | if (hasCutScreen) { 330 | getScreen(); 331 | hasCutScreen = false; 332 | } 333 | time = current_time; 334 | } 335 | } 336 | } 337 | 338 | 339 | /** 340 | * 获取当前屏幕信息 341 | */ 342 | private void getScreen() { 343 | IntBuffer buffer = IntBuffer.allocate(mWidth * mHeight); 344 | buffer.position(0); 345 | GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer); 346 | int[] modelData = buffer.array(); 347 | buffer.clear(); 348 | singleThreadExecutor.execute(new CutScreeenThread(modelData)); 349 | } 350 | public void cutScreen(){ 351 | hasCutScreen=true; 352 | } 353 | public void stop() { 354 | start = false; 355 | } 356 | } 357 | --------------------------------------------------------------------------------