36 | * The object wraps an encoder running on a dedicated thread. The various control messages 37 | * may be sent from arbitrary threads (typically the app UI thread). The encoder thread 38 | * manages both sides of the encoder (feeding and draining); the only external input is 39 | * the GL texture. 40 | *
41 | * The design is complicated slightly by the need to create an EGL context that shares state 42 | * with a view that gets restarted if (say) the device orientation changes. When the view 43 | * in question is a GLSurfaceView, we don't have full control over the EGL context creation 44 | * on that side, so we have to bend a bit backwards here. 45 | *
46 | * To use: 47 | *
56 | * TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases. 57 | */ 58 | public abstract class BaseMovieEncoder implements Runnable { 59 | 60 | static final int MSG_START_RECORDING = 0; 61 | static final int MSG_STOP_RECORDING = 1; 62 | static final int MSG_FRAME_AVAILABLE = 2; 63 | static final int MSG_QUIT = 4; 64 | 65 | // ----- accessed exclusively by encoder thread ----- 66 | private WindowSurface mInputWindowSurface; 67 | private EglCore mEglCore; 68 | 69 | private VideoEncoderCore mVideoEncoder; 70 | 71 | private AudioEncoderCore mAudioEncoder; 72 | 73 | // ----- accessed by multiple threads ----- 74 | protected volatile EncoderHandler mHandler; 75 | 76 | private Object mReadyFence = new Object(); // guards ready/running 77 | private volatile boolean mReady; 78 | private volatile boolean mRunning; 79 | 80 | protected Context mContext; 81 | 82 | protected int mWidth, mHeight; 83 | 84 | public BaseMovieEncoder(Context context, int width, int height) { 85 | mContext = context; 86 | mWidth = width; 87 | mHeight = height; 88 | } 89 | 90 | /** 91 | * Encoder configuration. 92 | *
93 | * Object is immutable, which means we can safely pass it between threads without 94 | * explicit synchronization (and don't need to worry about it getting tweaked out from 95 | * under us). 96 | *
97 | * TODO: make frame rate and iframe interval configurable? Maybe use builder pattern 98 | * with reasonable defaults for those and bit rate. 99 | */ 100 | public static class EncoderConfig { 101 | AndroidMuxer mMuxer; 102 | 103 | final File mOutputFile; 104 | 105 | final EGLContext mEglContext; 106 | 107 | public EncoderConfig(File outputFile, 108 | EGLContext sharedEglContext) { 109 | mOutputFile = outputFile; 110 | mEglContext = sharedEglContext; 111 | mMuxer = new AndroidMuxer(outputFile.getPath()); 112 | } 113 | 114 | @Override 115 | public String toString() { 116 | return "EncoderConfig: " + 117 | " to '" + mOutputFile.toString() + "' ctxt=" + mEglContext; 118 | } 119 | } 120 | 121 | /** 122 | * Tells the video recorder to start recording. (Call from non-encoder thread.) 123 | *
124 | * Creates a new thread, which will create an encoder using the provided configuration. 125 | *
126 | * Returns after the recorder thread has started and is ready to accept Messages. The 127 | * encoder may not yet be fully configured. 128 | */ 129 | public void startRecording(EncoderConfig config) { 130 | synchronized (mReadyFence) { 131 | if (mRunning) { 132 | return; 133 | } 134 | mRunning = true; 135 | new Thread(this, "TextureMovieEncoder").start(); 136 | while (!mReady) { 137 | try { 138 | mReadyFence.wait(); 139 | } catch (InterruptedException ie) { 140 | // ignore 141 | } 142 | } 143 | } 144 | 145 | LogUtils.v(String.format("startRecording called")); 146 | mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config)); 147 | } 148 | 149 | /** 150 | * Tells the video recorder to stop recording. (Call from non-encoder thread.) 151 | *
152 | * Returns immediately; the encoder/muxer may not yet be finished creating the movie. 153 | *
154 | * TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down 155 | * so we can provide reasonable status UI (and let the caller know that movie encoding 156 | * has completed). 157 | */ 158 | public void stopRecording() { 159 | mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING)); 160 | mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT)); 161 | // We don't know when these will actually finish (or even start). We don't want to 162 | // delay the UI thread though, so we return immediately. 163 | } 164 | 165 | /** 166 | * Returns true if recording has been started. 167 | */ 168 | public boolean isRecording() { 169 | synchronized (mReadyFence) { 170 | return mRunning; 171 | } 172 | } 173 | 174 | public abstract void onPrepareEncoder(); 175 | 176 | public abstract void onFrameAvailable(Object o, long timestamp); 177 | 178 | /** 179 | * Tells the video recorder that a new frame is available. (Call from non-encoder thread.) 180 | *
181 | * This function sends a message and returns immediately. This isn't sufficient -- we 182 | * don't want the caller to latch a new frame until we're done with this one -- but we 183 | * can get away with it so long as the input frame rate is reasonable and the encoder 184 | * thread doesn't stall. 185 | *
186 | * TODO: either block here until the texture has been rendered onto the encoder surface, 187 | * or have a separate "block if still busy" method that the caller can execute immediately 188 | * before it calls updateTexImage(). The latter is preferred because we don't want to 189 | * stall the caller while this thread does work. 190 | */ 191 | public void frameAvailable(Object object, long timestamp) { 192 | synchronized (mReadyFence) { 193 | if (!mReady) { 194 | return; 195 | } 196 | } 197 | 198 | if (timestamp == 0) { 199 | // Seeing this after device is toggled off/on with power button. The 200 | // first frame back has a zero timestamp. 201 | // 202 | // MPEG4Writer thinks this is cause to abort() in native code, so it's very 203 | // important that we just ignore the frame. 204 | return; 205 | } 206 | 207 | onFrameAvailable(object, timestamp); 208 | } 209 | 210 | /** 211 | * Encoder thread entry point. Establishes Looper/Handler and waits for messages. 212 | *
213 | *
214 | * @see Thread#run()
215 | */
216 | @Override
217 | public void run() {
218 | // Establish a Looper for this thread, and define a Handler for it.
219 | Looper.prepare();
220 | synchronized (mReadyFence) {
221 | mHandler = new EncoderHandler(this);
222 | mReady = true;
223 | mReadyFence.notify();
224 | }
225 | Looper.loop();
226 |
227 | synchronized (mReadyFence) {
228 | mReady = mRunning = false;
229 | mHandler = null;
230 | }
231 | }
232 |
233 |
234 | /**
235 | * Handles encoder state change requests. The handler is created on the encoder thread.
236 | */
237 | static class EncoderHandler extends Handler {
238 | private WeakReference
287 | * The texture is rendered onto the encoder's input surface, along with a moving
288 | * box (just because we can).
289 | *
290 | *
291 | * @param timestampNanos The frame's timestamp, from SurfaceTexture.
292 | */
293 | private void handleFrameAvailable(long timestampNanos) {
294 | mVideoEncoder.start();
295 | mAudioEncoder.start();
296 |
297 | onFrameAvailable();
298 |
299 | mInputWindowSurface.setPresentationTime(timestampNanos);
300 | mInputWindowSurface.swapBuffers();
301 | }
302 |
303 | public abstract void onFrameAvailable();
304 |
305 | /**
306 | * Handles a request to stop encoding.
307 | */
308 | private void handleStopRecording() {
309 | mVideoEncoder.stop();
310 | mAudioEncoder.stop();
311 | releaseEncoder();
312 | }
313 |
314 | private void prepareEncoder(AndroidMuxer muxer, EGLContext sharedContext, int width, int height) {
315 | mWidth = width;
316 | mHeight = height;
317 |
318 | mVideoEncoder = new VideoEncoderCore(muxer, width, height);
319 | mAudioEncoder = new AudioEncoderCore(muxer);
320 |
321 | mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
322 |
323 | mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
324 | mInputWindowSurface.makeCurrent();
325 | }
326 |
327 | private void releaseEncoder() {
328 | mVideoEncoder.release();
329 |
330 | if (mInputWindowSurface != null) {
331 | mInputWindowSurface.release();
332 | mInputWindowSurface = null;
333 | }
334 | if (mEglCore != null) {
335 | mEglCore.release();
336 | mEglCore = null;
337 | }
338 | }
339 | }
340 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/encoder/CameraHelper.java:
--------------------------------------------------------------------------------
1 | package com.inuker.library.encoder;
2 |
3 | import android.annotation.TargetApi;
4 | import android.app.Activity;
5 | import android.hardware.Camera;
6 | import android.os.Build;
7 | import android.os.Environment;
8 | import android.view.Surface;
9 |
10 | import java.io.File;
11 | import java.text.SimpleDateFormat;
12 | import java.util.Date;
13 | import java.util.List;
14 | import java.util.Locale;
15 |
16 | /**
17 | * Created by liwentian on 2017/8/29.
18 | */
19 |
20 | public class CameraHelper {
21 |
22 | public static final int MEDIA_TYPE_IMAGE = 1;
23 | public static final int MEDIA_TYPE_VIDEO = 2;
24 |
25 | public static int getFrontCameraId() {
26 | int frontIdx = 0;
27 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
28 | for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
29 | Camera.getCameraInfo(i, cameraInfo);
30 |
31 | if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
32 | frontIdx = i;
33 | break;
34 | }
35 | }
36 | return frontIdx;
37 | }
38 |
39 | public static int getDisplayOrientation(Activity activity, int cameraId) {
40 | android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
41 | android.hardware.Camera.getCameraInfo(cameraId, info);
42 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
43 |
44 | int degrees = 0;
45 | switch (rotation) {
46 | case Surface.ROTATION_0:
47 | degrees = 0;
48 | break;
49 | case Surface.ROTATION_90:
50 | degrees = 90;
51 | break;
52 | case Surface.ROTATION_180:
53 | degrees = 180;
54 | break;
55 | case Surface.ROTATION_270:
56 | degrees = 270;
57 | break;
58 | }
59 | int result;
60 | if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
61 | result = (info.orientation + degrees) % 360;
62 | result = (360 - result) % 360; // compensate the mirror
63 | } else {
64 | // back-facing
65 | result = (info.orientation - degrees + 360) % 360;
66 | }
67 |
68 | return result;
69 | }
70 |
71 | /**
72 | * Iterate over supported camera video sizes to see which one best fits the
73 | * dimensions of the given view while maintaining the aspect ratio. If none can,
74 | * be lenient with the aspect ratio.
75 | *
76 | * @param supportedVideoSizes Supported camera video sizes.
77 | * @param previewSizes Supported camera preview sizes.
78 | * @param w The width of the view.
79 | * @param h The height of the view.
80 | * @return Best match camera video size to fit in the view.
81 | */
82 | public static Camera.Size getOptimalVideoSize(List
32 | * The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
33 | */
34 | public final class EglCore {
35 | private static final String TAG = GlUtil.TAG;
36 |
37 | /**
38 | * Constructor flag: surface must be recordable. This discourages EGL from using a
39 | * pixel format that cannot be converted efficiently to something usable by the video
40 | * encoder.
41 | */
42 | public static final int FLAG_RECORDABLE = 0x01;
43 |
44 | /**
45 | * Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this
46 | * flag, GLES2 is used.
47 | */
48 | public static final int FLAG_TRY_GLES3 = 0x02;
49 |
50 | // Android-specific extension.
51 | private static final int EGL_RECORDABLE_ANDROID = 0x3142;
52 |
53 | private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
54 | private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
55 | private EGLConfig mEGLConfig = null;
56 | private int mGlVersion = -1;
57 |
58 |
59 | /**
60 | * Prepares EGL display and context.
61 | *
62 | * Equivalent to EglCore(null, 0).
63 | */
64 | public EglCore() {
65 | this(null, 0);
66 | }
67 |
68 | /**
69 | * Prepares EGL display and context.
70 | *
71 | * @param sharedContext The context to share, or null if sharing is not desired.
72 | * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
73 | */
74 | public EglCore(EGLContext sharedContext, int flags) {
75 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
76 | throw new RuntimeException("EGL already set up");
77 | }
78 |
79 | if (sharedContext == null) {
80 | sharedContext = EGL14.EGL_NO_CONTEXT;
81 | }
82 |
83 | mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
84 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
85 | throw new RuntimeException("unable to get EGL14 display");
86 | }
87 | int[] version = new int[2];
88 | if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
89 | mEGLDisplay = null;
90 | throw new RuntimeException("unable to initialize EGL14");
91 | }
92 |
93 | // Try to get a GLES3 context, if requested.
94 | if ((flags & FLAG_TRY_GLES3) != 0) {
95 | //Log.d(TAG, "Trying GLES 3");
96 | EGLConfig config = getConfig(flags, 3);
97 | if (config != null) {
98 | int[] attrib3_list = {
99 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
100 | EGL14.EGL_NONE
101 | };
102 | EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
103 | attrib3_list, 0);
104 |
105 | if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
106 | //Log.d(TAG, "Got GLES 3 config");
107 | mEGLConfig = config;
108 | mEGLContext = context;
109 | mGlVersion = 3;
110 | }
111 | }
112 | }
113 | if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed
114 | //Log.d(TAG, "Trying GLES 2");
115 | EGLConfig config = getConfig(flags, 2);
116 | if (config == null) {
117 | throw new RuntimeException("Unable to find a suitable EGLConfig");
118 | }
119 | int[] attrib2_list = {
120 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
121 | EGL14.EGL_NONE
122 | };
123 | EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
124 | attrib2_list, 0);
125 | checkEglError("eglCreateContext");
126 | mEGLConfig = config;
127 | mEGLContext = context;
128 | mGlVersion = 2;
129 | }
130 |
131 | // Confirm with query.
132 | int[] values = new int[1];
133 | EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
134 | values, 0);
135 | Log.d(TAG, "EGLContext created, client version " + values[0]);
136 | }
137 |
138 | /**
139 | * Finds a suitable EGLConfig.
140 | *
141 | * @param flags Bit flags from constructor.
142 | * @param version Must be 2 or 3.
143 | */
144 | private EGLConfig getConfig(int flags, int version) {
145 | int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
146 | if (version >= 3) {
147 | renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
148 | }
149 |
150 | // The actual surface is generally RGBA or RGBX, so situationally omitting alpha
151 | // doesn't really help. It can also lead to a huge performance hit on glReadPixels()
152 | // when reading into a GL_RGBA buffer.
153 | int[] attribList = {
154 | EGL14.EGL_RED_SIZE, 8,
155 | EGL14.EGL_GREEN_SIZE, 8,
156 | EGL14.EGL_BLUE_SIZE, 8,
157 | EGL14.EGL_ALPHA_SIZE, 8,
158 | //EGL14.EGL_DEPTH_SIZE, 16,
159 | //EGL14.EGL_STENCIL_SIZE, 8,
160 | EGL14.EGL_RENDERABLE_TYPE, renderableType,
161 | EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
162 | EGL14.EGL_NONE
163 | };
164 | if ((flags & FLAG_RECORDABLE) != 0) {
165 | attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
166 | attribList[attribList.length - 2] = 1;
167 | }
168 | EGLConfig[] configs = new EGLConfig[1];
169 | int[] numConfigs = new int[1];
170 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
171 | numConfigs, 0)) {
172 | Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
173 | return null;
174 | }
175 | return configs[0];
176 | }
177 |
178 | /**
179 | * Discards all resources held by this class, notably the EGL context. This must be
180 | * called from the thread where the context was created.
181 | *
182 | * On completion, no context will be current.
183 | */
184 | public void release() {
185 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
186 | // Android is unusual in that it uses a reference-counted EGLDisplay. So for
187 | // every eglInitialize() we need an eglTerminate().
188 | EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
189 | EGL14.EGL_NO_CONTEXT);
190 | EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
191 | EGL14.eglReleaseThread();
192 | EGL14.eglTerminate(mEGLDisplay);
193 | }
194 |
195 | mEGLDisplay = EGL14.EGL_NO_DISPLAY;
196 | mEGLContext = EGL14.EGL_NO_CONTEXT;
197 | mEGLConfig = null;
198 | }
199 |
200 | @Override
201 | protected void finalize() throws Throwable {
202 | try {
203 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
204 | // We're limited here -- finalizers don't run on the thread that holds
205 | // the EGL state, so if a surface or context is still current on another
206 | // thread we can't fully release it here. Exceptions thrown from here
207 | // are quietly discarded. Complain in the log file.
208 | Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
209 | release();
210 | }
211 | } finally {
212 | super.finalize();
213 | }
214 | }
215 |
216 | /**
217 | * Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's
218 | * still current in a context.
219 | */
220 | public void releaseSurface(EGLSurface eglSurface) {
221 | EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
222 | }
223 |
224 | /**
225 | * Creates an EGL surface associated with a Surface.
226 | *
227 | * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
228 | */
229 | public EGLSurface createWindowSurface(Object surface) {
230 | if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
231 | throw new RuntimeException("invalid surface: " + surface);
232 | }
233 |
234 | // Create a window surface, and attach it to the Surface we received.
235 | int[] surfaceAttribs = {
236 | EGL14.EGL_NONE
237 | };
238 | EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
239 | surfaceAttribs, 0);
240 | checkEglError("eglCreateWindowSurface");
241 | if (eglSurface == null) {
242 | throw new RuntimeException("surface was null");
243 | }
244 | return eglSurface;
245 | }
246 |
247 | /**
248 | * Creates an EGL surface associated with an offscreen buffer.
249 | */
250 | public EGLSurface createOffscreenSurface(int width, int height) {
251 | int[] surfaceAttribs = {
252 | EGL14.EGL_WIDTH, width,
253 | EGL14.EGL_HEIGHT, height,
254 | EGL14.EGL_NONE
255 | };
256 | EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
257 | surfaceAttribs, 0);
258 | checkEglError("eglCreatePbufferSurface");
259 | if (eglSurface == null) {
260 | throw new RuntimeException("surface was null");
261 | }
262 | return eglSurface;
263 | }
264 |
265 | /**
266 | * Makes our EGL context current, using the supplied surface for both "draw" and "read".
267 | */
268 | public void makeCurrent(EGLSurface eglSurface) {
269 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
270 | // called makeCurrent() before create?
271 | Log.d(TAG, "NOTE: makeCurrent w/o display");
272 | }
273 | if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
274 | throw new RuntimeException("eglMakeCurrent failed");
275 | }
276 | }
277 |
278 | /**
279 | * Makes our EGL context current, using the supplied "draw" and "read" surfaces.
280 | */
281 | public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
282 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
283 | // called makeCurrent() before create?
284 | Log.d(TAG, "NOTE: makeCurrent w/o display");
285 | }
286 | if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
287 | throw new RuntimeException("eglMakeCurrent(draw,read) failed");
288 | }
289 | }
290 |
291 | /**
292 | * Makes no context current.
293 | */
294 | public void makeNothingCurrent() {
295 | if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
296 | EGL14.EGL_NO_CONTEXT)) {
297 | throw new RuntimeException("eglMakeCurrent failed");
298 | }
299 | }
300 |
301 | /**
302 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
303 | *
304 | * @return false on failure
305 | */
306 | public boolean swapBuffers(EGLSurface eglSurface) {
307 | return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
308 | }
309 |
310 | /**
311 | * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
312 | */
313 | public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
314 | EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
315 | }
316 |
317 | /**
318 | * Returns true if our context and the specified surface are current.
319 | */
320 | public boolean isCurrent(EGLSurface eglSurface) {
321 | return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
322 | eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
323 | }
324 |
325 | /**
326 | * Performs a simple surface query.
327 | */
328 | public int querySurface(EGLSurface eglSurface, int what) {
329 | int[] value = new int[1];
330 | EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
331 | return value[0];
332 | }
333 |
334 | /**
335 | * Queries a string value.
336 | */
337 | public String queryString(int what) {
338 | return EGL14.eglQueryString(mEGLDisplay, what);
339 | }
340 |
341 | /**
342 | * Returns the GLES version this context is configured for (currently 2 or 3).
343 | */
344 | public int getGlVersion() {
345 | return mGlVersion;
346 | }
347 |
348 | /**
349 | * Writes the current display, context, and surface to the log.
350 | */
351 | public static void logCurrent(String msg) {
352 | EGLDisplay display;
353 | EGLContext context;
354 | EGLSurface surface;
355 |
356 | display = EGL14.eglGetCurrentDisplay();
357 | context = EGL14.eglGetCurrentContext();
358 | surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
359 | Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context +
360 | ", surface=" + surface);
361 | }
362 |
363 | /**
364 | * Checks for EGL errors. Throws an exception if an error has been raised.
365 | */
366 | private void checkEglError(String msg) {
367 | int error;
368 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
369 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
370 | }
371 | }
372 |
373 | public EGLContext getEGLContext() {
374 | return mEGLContext;
375 | }
376 | }
377 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/encoder/EglSurfaceBase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.inuker.library.encoder;
18 |
19 | import android.graphics.Bitmap;
20 | import android.opengl.EGL14;
21 | import android.opengl.EGLSurface;
22 | import android.opengl.GLES20;
23 | import android.util.Log;
24 |
25 | import java.io.BufferedOutputStream;
26 | import java.io.File;
27 | import java.io.FileOutputStream;
28 | import java.io.IOException;
29 | import java.nio.ByteBuffer;
30 | import java.nio.ByteOrder;
31 |
32 | /**
33 | * Common base class for EGL surfaces.
34 | *
35 | * There can be multiple surfaces associated with a single context.
36 | */
37 | public class EglSurfaceBase {
38 | protected static final String TAG = GlUtil.TAG;
39 |
40 | // EglCore object we're associated with. It may be associated with multiple surfaces.
41 | protected EglCore mEglCore;
42 |
43 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
44 | private int mWidth = -1;
45 | private int mHeight = -1;
46 |
47 | protected EglSurfaceBase(EglCore eglCore) {
48 | mEglCore = eglCore;
49 | }
50 |
51 | /**
52 | * Creates a window surface.
53 | *
54 | * @param surface May be a Surface or SurfaceTexture.
55 | */
56 | public void createWindowSurface(Object surface) {
57 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
58 | throw new IllegalStateException("surface already created");
59 | }
60 | mEGLSurface = mEglCore.createWindowSurface(surface);
61 |
62 | // Don't cache width/height here, because the size of the underlying surface can change
63 | // out from under us (see e.g. HardwareScalerActivity).
64 | //mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
65 | //mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
66 | }
67 |
68 | /**
69 | * Creates an off-screen surface.
70 | */
71 | public void createOffscreenSurface(int width, int height) {
72 | if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
73 | throw new IllegalStateException("surface already created");
74 | }
75 | mEGLSurface = mEglCore.createOffscreenSurface(width, height);
76 | mWidth = width;
77 | mHeight = height;
78 | }
79 |
80 | /**
81 | * Returns the surface's width, in pixels.
82 | *
83 | * If this is called on a window surface, and the underlying surface is in the process
84 | * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
85 | * callback). The size should match after the next buffer swap.
86 | */
87 | public int getWidth() {
88 | if (mWidth < 0) {
89 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
90 | } else {
91 | return mWidth;
92 | }
93 | }
94 |
95 | /**
96 | * Returns the surface's height, in pixels.
97 | */
98 | public int getHeight() {
99 | if (mHeight < 0) {
100 | return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
101 | } else {
102 | return mHeight;
103 | }
104 | }
105 |
106 | /**
107 | * Release the EGL surface.
108 | */
109 | public void releaseEglSurface() {
110 | mEglCore.releaseSurface(mEGLSurface);
111 | mEGLSurface = EGL14.EGL_NO_SURFACE;
112 | mWidth = mHeight = -1;
113 | }
114 |
115 | /**
116 | * Makes our EGL context and surface current.
117 | */
118 | public void makeCurrent() {
119 | mEglCore.makeCurrent(mEGLSurface);
120 | }
121 |
122 | /**
123 | * Makes our EGL context and surface current for drawing, using the supplied surface
124 | * for reading.
125 | */
126 | public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
127 | mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
128 | }
129 |
130 | /**
131 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
132 | *
133 | * @return false on failure
134 | */
135 | public boolean swapBuffers() {
136 | boolean result = mEglCore.swapBuffers(mEGLSurface);
137 | if (!result) {
138 | Log.d(TAG, "WARNING: swapBuffers() failed");
139 | }
140 | return result;
141 | }
142 |
143 | /**
144 | * Sends the presentation time stamp to EGL.
145 | *
146 | * @param nsecs Timestamp, in nanoseconds.
147 | */
148 | public void setPresentationTime(long nsecs) {
149 | mEglCore.setPresentationTime(mEGLSurface, nsecs);
150 | }
151 |
152 | /**
153 | * Saves the EGL surface to a file.
154 | *
155 | * Expects that this object's EGL surface is current.
156 | */
157 | public void saveFrame(File file) throws IOException {
158 | if (!mEglCore.isCurrent(mEGLSurface)) {
159 | throw new RuntimeException("Expected EGL context/surface is not current");
160 | }
161 |
162 | // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
163 | // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
164 | // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
165 | // Bitmap "copy pixels" method wants the same format GL provides.
166 | //
167 | // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
168 | // here often.
169 | //
170 | // Making this even more interesting is the upside-down nature of GL, which means
171 | // our output will look upside down relative to what appears on screen if the
172 | // typical GL conventions are used.
173 |
174 | String filename = file.toString();
175 |
176 | int width = getWidth();
177 | int height = getHeight();
178 | ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
179 | buf.order(ByteOrder.LITTLE_ENDIAN);
180 | GLES20.glReadPixels(0, 0, width, height,
181 | GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
182 | GlUtil.checkGlError("glReadPixels");
183 | buf.rewind();
184 |
185 | BufferedOutputStream bos = null;
186 | try {
187 | bos = new BufferedOutputStream(new FileOutputStream(filename));
188 | Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
189 | bmp.copyPixelsFromBuffer(buf);
190 | bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
191 | bmp.recycle();
192 | } finally {
193 | if (bos != null) bos.close();
194 | }
195 | Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/encoder/GlUtil.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.inuker.library.encoder;
18 |
19 | import android.opengl.GLES20;
20 | import android.opengl.GLES30;
21 | import android.opengl.Matrix;
22 | import android.util.Log;
23 |
24 | import java.nio.ByteBuffer;
25 | import java.nio.ByteOrder;
26 | import java.nio.FloatBuffer;
27 |
28 | /**
29 | * Some OpenGL utility functions.
30 | */
31 | public class GlUtil {
32 | public static final String TAG = "Grafika";
33 |
34 | /** Identity matrix for general use. Don't modify or life will get weird. */
35 | public static final float[] IDENTITY_MATRIX;
36 | static {
37 | IDENTITY_MATRIX = new float[16];
38 | Matrix.setIdentityM(IDENTITY_MATRIX, 0);
39 | }
40 |
41 | private static final int SIZEOF_FLOAT = 4;
42 |
43 |
44 | private GlUtil() {} // do not instantiate
45 |
46 | /**
47 | * Creates a new program from the supplied vertex and fragment shaders.
48 | *
49 | * @return A handle to the program, or 0 on failure.
50 | */
51 | public static int createProgram(String vertexSource, String fragmentSource) {
52 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
53 | if (vertexShader == 0) {
54 | return 0;
55 | }
56 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
57 | if (pixelShader == 0) {
58 | return 0;
59 | }
60 |
61 | int program = GLES20.glCreateProgram();
62 | checkGlError("glCreateProgram");
63 | if (program == 0) {
64 | Log.e(TAG, "Could not create program");
65 | }
66 | GLES20.glAttachShader(program, vertexShader);
67 | checkGlError("glAttachShader");
68 | GLES20.glAttachShader(program, pixelShader);
69 | checkGlError("glAttachShader");
70 | GLES20.glLinkProgram(program);
71 | int[] linkStatus = new int[1];
72 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
73 | if (linkStatus[0] != GLES20.GL_TRUE) {
74 | Log.e(TAG, "Could not link program: ");
75 | Log.e(TAG, GLES20.glGetProgramInfoLog(program));
76 | GLES20.glDeleteProgram(program);
77 | program = 0;
78 | }
79 | return program;
80 | }
81 |
82 | /**
83 | * Compiles the provided shader source.
84 | *
85 | * @return A handle to the shader, or 0 on failure.
86 | */
87 | public static int loadShader(int shaderType, String source) {
88 | int shader = GLES20.glCreateShader(shaderType);
89 | checkGlError("glCreateShader type=" + shaderType);
90 | GLES20.glShaderSource(shader, source);
91 | GLES20.glCompileShader(shader);
92 | int[] compiled = new int[1];
93 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
94 | if (compiled[0] == 0) {
95 | Log.e(TAG, "Could not compile shader " + shaderType + ":");
96 | Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
97 | GLES20.glDeleteShader(shader);
98 | shader = 0;
99 | }
100 | return shader;
101 | }
102 |
103 | /**
104 | * Checks to see if a GLES error has been raised.
105 | */
106 | public static void checkGlError(String op) {
107 | int error = GLES20.glGetError();
108 | if (error != GLES20.GL_NO_ERROR) {
109 | String msg = op + ": glError 0x" + Integer.toHexString(error);
110 | Log.e(TAG, msg);
111 | throw new RuntimeException(msg);
112 | }
113 | }
114 |
115 | /**
116 | * Checks to see if the location we obtained is valid. GLES returns -1 if a label
117 | * could not be found, but does not set the GL error.
118 | *
119 | * Throws a RuntimeException if the location is invalid.
120 | */
121 | public static void checkLocation(int location, String label) {
122 | if (location < 0) {
123 | throw new RuntimeException("Unable to locate '" + label + "' in program");
124 | }
125 | }
126 |
127 | /**
128 | * Creates a texture from raw data.
129 | *
130 | * @param data Image data, in a "direct" ByteBuffer.
131 | * @param width Texture width, in pixels (not bytes).
132 | * @param height Texture height, in pixels.
133 | * @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA).
134 | * @return Handle to texture.
135 | */
136 | public static int createImageTexture(ByteBuffer data, int width, int height, int format) {
137 | int[] textureHandles = new int[1];
138 | int textureHandle;
139 |
140 | GLES20.glGenTextures(1, textureHandles, 0);
141 | textureHandle = textureHandles[0];
142 | GlUtil.checkGlError("glGenTextures");
143 |
144 | // Bind the texture handle to the 2D texture target.
145 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);
146 |
147 | // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
148 | // is smaller or larger than the source image.
149 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
150 | GLES20.GL_LINEAR);
151 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
152 | GLES20.GL_LINEAR);
153 | GlUtil.checkGlError("loadImageTexture");
154 |
155 | // Load the data from the buffer into the texture handle.
156 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
157 | width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
158 | GlUtil.checkGlError("loadImageTexture");
159 |
160 | return textureHandle;
161 | }
162 |
163 | /**
164 | * Allocates a direct float buffer, and populates it with the float array data.
165 | */
166 | public static FloatBuffer createFloatBuffer(float[] coords) {
167 | // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
168 | ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT);
169 | bb.order(ByteOrder.nativeOrder());
170 | FloatBuffer fb = bb.asFloatBuffer();
171 | fb.put(coords);
172 | fb.position(0);
173 | return fb;
174 | }
175 |
176 | /**
177 | * Writes GL version info to the log.
178 | */
179 | public static void logVersionInfo() {
180 | Log.i(TAG, "vendor : " + GLES20.glGetString(GLES20.GL_VENDOR));
181 | Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER));
182 | Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION));
183 |
184 | if (false) {
185 | int[] values = new int[1];
186 | GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0);
187 | int majorVersion = values[0];
188 | GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0);
189 | int minorVersion = values[0];
190 | if (GLES30.glGetError() == GLES30.GL_NO_ERROR) {
191 | Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion);
192 | }
193 | }
194 | }
195 | }
196 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/encoder/MediaEncoderCore.java:
--------------------------------------------------------------------------------
1 | package com.inuker.library.encoder;
2 |
3 | import android.media.MediaCodec;
4 | import android.media.MediaFormat;
5 | import android.util.Log;
6 |
7 | import com.inuker.library.utils.LogUtils;
8 |
9 | import java.nio.ByteBuffer;
10 |
11 | /**
12 | * Created by liwentian on 17/8/1.
13 | */
14 |
15 | public abstract class MediaEncoderCore {
16 |
17 | protected final String TAG = getClass().getSimpleName();
18 |
19 | protected static final boolean VERBOSE = false;
20 |
21 | protected AndroidMuxer mMuxer;
22 |
23 | protected MediaCodec mEncoder;
24 |
25 | protected int mTrackIndex = -1;
26 |
27 | protected volatile boolean mRecording;
28 |
29 | protected MediaCodec.BufferInfo mBufferInfo;
30 |
31 | public MediaEncoderCore(AndroidMuxer muxer) {
32 | LogUtils.v(String.format("%s
44 | * If endOfStream is not set, this returns when there is no more data to drain. If it
45 | * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
46 | * Calling this with endOfStream set should be done once, right before stopping the muxer.
47 | *
48 | * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
49 | * not recording audio.
50 | */
51 | public void drainEncoder(boolean endOfStream) {
52 | // LogUtils.v(String.format("%s drainEncoder: end = %b", getClass().getSimpleName(), endOfStream));
53 |
54 | final int TIMEOUT_USEC = 10000;
55 | if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
56 |
57 | if (endOfStream && isSurfaceInput()) {
58 | if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
59 | mEncoder.signalEndOfInputStream();
60 | }
61 |
62 | ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
63 | while (true) {
64 | int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
65 |
66 | if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
67 | // no output available yet
68 | if (!endOfStream) {
69 | break; // out of while
70 | } else {
71 | if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
72 | }
73 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
74 | // not expected for an encoder
75 | encoderOutputBuffers = mEncoder.getOutputBuffers();
76 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
77 | // should happen before receiving buffers, and should only happen once
78 | MediaFormat newFormat = mEncoder.getOutputFormat();
79 | Log.d(TAG, "encoder output format changed: " + newFormat);
80 |
81 | // now that we have the Magic Goodies, start the muxer
82 | mTrackIndex = mMuxer.addTrack(newFormat);
83 | } else if (encoderStatus < 0) {
84 | Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
85 | encoderStatus);
86 | // let's ignore it
87 | } else {
88 | ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
89 | if (encodedData == null) {
90 | throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
91 | " was null");
92 | }
93 |
94 | if (!mMuxer.isStarted()) {
95 | mBufferInfo.size = 0;
96 | }
97 |
98 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
99 | // The codec config data was pulled out and fed to the muxer when we got
100 | // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
101 | if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
102 | mBufferInfo.size = 0;
103 | }
104 |
105 | if (mBufferInfo.size != 0) {
106 | // adjust the ByteBuffer values to match BufferInfo (not needed?)
107 | encodedData.position(mBufferInfo.offset);
108 | encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
109 |
110 | mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
111 | if (VERBOSE) {
112 | Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
113 | mBufferInfo.presentationTimeUs);
114 | }
115 | }
116 |
117 | mEncoder.releaseOutputBuffer(encoderStatus, false);
118 |
119 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
120 | if (!endOfStream) {
121 | Log.w(TAG, "reached end of stream unexpectedly");
122 | } else {
123 | if (VERBOSE) Log.d(TAG, "end of stream reached");
124 | }
125 | break; // out of while
126 | }
127 | }
128 | }
129 | }
130 |
131 | public void release() {
132 | LogUtils.v(String.format("%s.release", getClass().getSimpleName()));
133 |
134 | if (mEncoder != null) {
135 | mEncoder.stop();
136 | mEncoder.release();
137 | mEncoder = null;
138 | }
139 |
140 | if (mMuxer != null) {
141 | mMuxer.release();
142 | }
143 | }
144 |
145 | protected abstract boolean isSurfaceInput();
146 |
147 | }
148 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/encoder/MovieEncoder1.java:
--------------------------------------------------------------------------------
1 | package com.inuker.library.encoder;
2 |
3 | import android.content.Context;
4 | import android.graphics.ImageFormat;
5 | import android.graphics.SurfaceTexture;
6 |
7 | import com.inuker.library.RGBProgram;
8 | import com.inuker.library.utils.LogUtils;
9 |
10 | import java.nio.ByteBuffer;
11 | import java.nio.ByteOrder;
12 |
13 | /**
14 | * Created by liwentian on 2017/10/31.
15 | */
16 |
17 | public class MovieEncoder1 extends BaseMovieEncoder {
18 |
19 | private volatile RGBProgram mRGBProgram;
20 | private volatile ByteBuffer mYUVBuffer;
21 |
22 | public MovieEncoder1(Context context, int width, int height) {
23 | super(context, width, height);
24 | }
25 |
26 | @Override
27 | public void onPrepareEncoder() {
28 | LogUtils.v(String.format("onPrepareEncoder width = %d, height = %d", mWidth, mHeight));
29 | mRGBProgram = new RGBProgram(mContext, mWidth, mHeight);
30 | mYUVBuffer = ByteBuffer.allocateDirect(mWidth * mHeight * 4)
31 | .order(ByteOrder.nativeOrder());
32 | }
33 |
34 | @Override
35 | public void onFrameAvailable(Object object, long timestamp) {
36 | byte[] data = (byte[]) object;
37 |
38 | if (mYUVBuffer == null) {
39 | return;
40 | }
41 |
42 | // LogUtils.v(String.format("onFrameAvailable: data = %d, buffer = %d", data.length, mYUVBuffer.capacity()));
43 |
44 | synchronized (mYUVBuffer) {
45 | mYUVBuffer.position(0);
46 | int len = Math.min(mYUVBuffer.capacity(), data.length);
47 | mYUVBuffer.put(data, 0, len);
48 | }
49 | mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
50 | (int) (timestamp >> 32), (int) timestamp));
51 | }
52 |
53 | @Override
54 | public void onFrameAvailable() {
55 | mRGBProgram.useProgram();
56 |
57 | synchronized (mYUVBuffer) {
58 | mRGBProgram.setUniforms(mYUVBuffer.array());
59 | }
60 |
61 | mRGBProgram.draw();
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/encoder/VideoEncoderCore.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.inuker.library.encoder;
18 |
19 | import android.media.MediaCodec;
20 | import android.media.MediaCodecInfo;
21 | import android.media.MediaFormat;
22 | import android.util.Log;
23 | import android.view.Surface;
24 |
25 | import java.io.IOException;
26 |
27 | /**
28 | * This class wraps up the core components used for surface-input video encoding.
29 | *
30 | * Once created, frames are fed to the input surface. Remember to provide the presentation
31 | * time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
32 | * producer side doesn't get backed up.
33 | *
34 | * This class is not thread-safe, with one exception: it is valid to use the input surface
35 | * on one thread, and drain the output on a different thread.
36 | */
37 | public class VideoEncoderCore extends MediaEncoderCore {
38 |
39 | // TODO: these ought to be configurable as well
40 | private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
41 | private static final int FRAME_RATE = 30; // 30fps
42 | private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
43 | private static final int BIT_RATE = 4000000;
44 |
45 | private Surface mInputSurface;
46 |
47 | /**
48 | * Configures encoder and muxer state, and prepares the input Surface.
49 | */
50 | public VideoEncoderCore(AndroidMuxer muxer, int width, int height) {
51 | super(muxer);
52 |
53 | prepareEncoder(width, height);
54 |
55 | // Create a MediaMuxer. We can't add the video track and start() the muxer here,
56 | // because our MediaFormat doesn't have the Magic Goodies. These can only be
57 | // obtained from the encoder after it has started processing data.
58 | //
59 | // We're not actually interested in multiplexing audio. We just want to convert
60 | // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
61 | // mMuxer = new MediaMuxer(outputFile.toString(),
62 | // MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
63 | }
64 |
65 | private void prepareEncoder(int width, int height) {
66 | mBufferInfo = new MediaCodec.BufferInfo();
67 |
68 | MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
69 |
70 | // Set some properties. Failing to specify some of these can cause the MediaCodec
71 | // configure() call to throw an unhelpful exception.
72 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
73 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
74 | format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
75 | format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
76 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
77 | if (VERBOSE) Log.d(TAG, "format: " + format);
78 |
79 | // Create a MediaCodec encoder, and configure it with our format. Get a Surface
80 | // we can use for input and wrap it with a class that handles the EGL work.
81 |
82 | try {
83 | mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
84 | } catch (IOException e) {
85 | e.printStackTrace();
86 | }
87 |
88 | mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
89 | mInputSurface = mEncoder.createInputSurface();
90 | mEncoder.start();
91 | }
92 |
93 | /**
94 | * Returns the encoder's input surface.
95 | */
96 | public Surface getInputSurface() {
97 | return mInputSurface;
98 | }
99 |
100 | @Override
101 | public void start() {
102 | drainEncoder(false);
103 | }
104 |
105 | @Override
106 | public void stop() {
107 | drainEncoder(true);
108 | }
109 |
110 | @Override
111 | protected boolean isSurfaceInput() {
112 | return true;
113 | }
114 | }
115 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/encoder/WindowSurface.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 Google Inc. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.inuker.library.encoder;
18 |
19 | import android.graphics.SurfaceTexture;
20 | import android.view.Surface;
21 |
22 | /**
23 | * Recordable EGL window surface.
24 | *
25 | * It's good practice to explicitly release() the surface, preferably from a "finally" block.
26 | */
27 | public class WindowSurface extends EglSurfaceBase {
28 | private Surface mSurface;
29 | private boolean mReleaseSurface;
30 |
31 | /**
32 | * Associates an EGL surface with the native window surface.
33 | *
34 | * Set releaseSurface to true if you want the Surface to be released when release() is
35 | * called. This is convenient, but can interfere with framework classes that expect to
36 | * manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
37 | * surfaceDestroyed() callback won't fire).
38 | */
39 | public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
40 | super(eglCore);
41 | createWindowSurface(surface);
42 | mSurface = surface;
43 | mReleaseSurface = releaseSurface;
44 | }
45 |
46 | /**
47 | * Associates an EGL surface with the SurfaceTexture.
48 | */
49 | public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
50 | super(eglCore);
51 | createWindowSurface(surfaceTexture);
52 | }
53 |
54 | /**
55 | * Releases any resources associated with the EGL surface (and, if configured to do so,
56 | * with the Surface as well).
57 | *
58 | * Does not require that the surface's EGL context be current.
59 | */
60 | public void release() {
61 | releaseEglSurface();
62 | if (mSurface != null) {
63 | if (mReleaseSurface) {
64 | mSurface.release();
65 | }
66 | mSurface = null;
67 | }
68 | }
69 |
70 | /**
71 | * Recreate the EGLSurface, using the new EglBase. The caller should have already
72 | * freed the old EGLSurface with releaseEglSurface().
73 | *
74 | * This is useful when we want to update the EGLSurface associated with a Surface.
75 | * For example, if we want to share with a different EGLContext, which can only
76 | * be done by tearing down and recreating the context. (That's handled by the caller;
77 | * this just creates a new EGLSurface for the Surface we were handed earlier.)
78 | *
79 | * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
80 | * context somewhere, the create call will fail with complaints from the Surface
81 | * about already being connected.
82 | */
83 | public void recreate(EglCore newEglCore) {
84 | if (mSurface == null) {
85 | throw new RuntimeException("not yet implemented for SurfaceTexture");
86 | }
87 | mEglCore = newEglCore; // switch to new context
88 | createWindowSurface(mSurface); // create new surface
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/encoder/YUVProgram.java:
--------------------------------------------------------------------------------
1 | package com.inuker.library.encoder;
2 |
3 | import android.content.Context;
4 | import android.content.res.Configuration;
5 | import android.opengl.GLES20;
6 | import android.opengl.Matrix;
7 |
8 | import com.inuker.library.R;
9 | import com.inuker.library.ShaderProgram;
10 |
11 | import java.nio.ByteBuffer;
12 | import java.nio.ByteOrder;
13 | import java.nio.FloatBuffer;
14 |
15 | import static android.opengl.GLES20.GL_CLAMP_TO_EDGE;
16 | import static android.opengl.GLES20.GL_FLOAT;
17 | import static android.opengl.GLES20.GL_LINEAR;
18 | import static android.opengl.GLES20.GL_LUMINANCE;
19 | import static android.opengl.GLES20.GL_LUMINANCE_ALPHA;
20 | import static android.opengl.GLES20.GL_TEXTURE0;
21 | import static android.opengl.GLES20.GL_TEXTURE1;
22 | import static android.opengl.GLES20.GL_TEXTURE_2D;
23 | import static android.opengl.GLES20.GL_TEXTURE_MAG_FILTER;
24 | import static android.opengl.GLES20.GL_TEXTURE_MIN_FILTER;
25 | import static android.opengl.GLES20.GL_TEXTURE_WRAP_S;
26 | import static android.opengl.GLES20.GL_TEXTURE_WRAP_T;
27 | import static android.opengl.GLES20.GL_TRIANGLE_STRIP;
28 | import static android.opengl.GLES20.GL_UNSIGNED_BYTE;
29 | import static android.opengl.GLES20.glActiveTexture;
30 | import static android.opengl.GLES20.glBindTexture;
31 | import static android.opengl.GLES20.glDrawArrays;
32 | import static android.opengl.GLES20.glEnableVertexAttribArray;
33 | import static android.opengl.GLES20.glGenTextures;
34 | import static android.opengl.GLES20.glGetAttribLocation;
35 | import static android.opengl.GLES20.glGetUniformLocation;
36 | import static android.opengl.GLES20.glTexImage2D;
37 | import static android.opengl.GLES20.glTexParameterf;
38 | import static android.opengl.GLES20.glUniform1i;
39 | import static android.opengl.GLES20.glVertexAttribPointer;
40 |
41 | /**
42 | * Created by liwentian on 17/8/16.
43 | */
44 |
45 | /**
46 | * 输入Camera的预览NV21数据
47 | */
48 | public class YUVProgram extends ShaderProgram {
49 |
50 | protected final int mUniformYTextureLocation;
51 | protected final int mUniformUVTextureLocation;
52 |
53 | static final float CUBE[] = {
54 | -1.0f, -1.0f,
55 | 1.0f, -1.0f,
56 | -1.0f, 1.0f,
57 | 1.0f, 1.0f,
58 | };
59 |
60 | public static final float TEXTURE_UPSIDE_DOWN[] = {
61 | 0.0f, 1.0f,
62 | 1.0f, 1.0f,
63 | 0.0f, 0.0f,
64 | 1.0f, 0.0f,
65 | };
66 |
67 | public static final float TEXTURE_NO_ROTATION[] = {
68 | 0.0f, 0.0f,
69 | 1.0f, 0.0f,
70 | 0.0f, 1.0f,
71 | 1.0f, 1.0f,
72 | };
73 |
74 | // Attribute locations
75 | private final int aPositionLocation;
76 | private final int aTextureCoordinatesLocation;
77 |
78 | private final int uMVPMatrixLocation;
79 |
80 | private final FloatBuffer mGLCubeBuffer;
81 | private final FloatBuffer mGLTextureBuffer;
82 |
83 | private int mYTestureId, mUVTextureId;
84 |
85 | private ByteBuffer mYBuffer, mUVBuffer;
86 |
87 | public YUVProgram(Context context, int width, int height) {
88 | super(context, R.raw.yuv_vertex, R.raw.yuv_fragment, width, height);
89 |
90 | mUniformYTextureLocation = glGetUniformLocation(program, "y_texture");
91 | mUniformUVTextureLocation = glGetUniformLocation(program, "uv_texture");
92 | uMVPMatrixLocation = glGetUniformLocation(program, "uMVPMatrix");
93 |
94 | aPositionLocation = glGetAttribLocation(program, "a_Position");
95 | aTextureCoordinatesLocation = glGetAttribLocation(program, "a_TextureCoordinates");
96 |
97 | mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
98 | .order(ByteOrder.nativeOrder())
99 | .asFloatBuffer();
100 | mGLCubeBuffer.put(CUBE).position(0);
101 |
102 | mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
103 | .order(ByteOrder.nativeOrder())
104 | .asFloatBuffer();
105 |
106 | mYBuffer = ByteBuffer.allocateDirect(width * height)
107 | .order(ByteOrder.nativeOrder());
108 |
109 | mUVBuffer = ByteBuffer.allocateDirect(width * height / 2)
110 | .order(ByteOrder.nativeOrder());
111 |
112 | int[] textures = new int[2];
113 | glGenTextures(2, textures, 0);
114 |
115 | glActiveTexture(GL_TEXTURE0);
116 | glBindTexture(GL_TEXTURE_2D, textures[0]);
117 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
118 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
119 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
120 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
121 | mYTestureId = textures[0];
122 |
123 | glActiveTexture(GL_TEXTURE1);
124 | glBindTexture(GL_TEXTURE_2D, textures[1]);
125 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
126 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
127 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
128 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
129 | mUVTextureId = textures[1];
130 |
131 | mGLCubeBuffer.clear();
132 | mGLCubeBuffer.put(CUBE).position(0);
133 |
134 | mGLTextureBuffer.clear();
135 | mGLTextureBuffer.put(TEXTURE_NO_ROTATION).position(0);
136 | }
137 |
138 | public void setUpsideDown() {
139 | mGLTextureBuffer.clear();
140 | mGLTextureBuffer.put(TEXTURE_UPSIDE_DOWN).position(0);
141 | }
142 |
143 | public void setUniforms(byte[] data) {
144 | mYBuffer.position(0);
145 | mYBuffer.put(data, 0, width * height);
146 |
147 | mUVBuffer.position(0);
148 | mUVBuffer.put(data, width * height, width * height / 2);
149 |
150 | mYBuffer.position(0);
151 | glActiveTexture(GL_TEXTURE0);
152 | glBindTexture(GL_TEXTURE_2D, mYTestureId);
153 | glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height,
154 | 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, mYBuffer);
155 | glUniform1i(mUniformYTextureLocation, 0);
156 |
157 | GlUtil.checkGlError("init YTexture");
158 |
159 | mUVBuffer.position(0);
160 | glActiveTexture(GL_TEXTURE1);
161 | glBindTexture(GL_TEXTURE_2D, mUVTextureId);
162 | glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, width / 2, height / 2,
163 | 0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, mUVBuffer);
164 | glUniform1i(mUniformUVTextureLocation, 1);
165 |
166 |
167 | float[] matrix = new float[16];
168 | Matrix.setIdentityM(matrix, 0);
169 | int orientation = context.getResources().getConfiguration().orientation;
170 |
171 | int degrees = orientation == Configuration.ORIENTATION_LANDSCAPE ? 0 : -90;
172 | Matrix.rotateM(matrix, 0, degrees, 0.0f, 0.0f, 1.0f);
173 |
174 | GLES20.glUniformMatrix4fv(uMVPMatrixLocation, 1, false, matrix, 0);
175 |
176 | GlUtil.checkGlError("init UVTexture");
177 |
178 | mGLCubeBuffer.position(0);
179 | glVertexAttribPointer(aPositionLocation, 2, GL_FLOAT, false, 0, mGLCubeBuffer);
180 | glEnableVertexAttribArray(aPositionLocation);
181 |
182 | mGLTextureBuffer.position(0);
183 | glVertexAttribPointer(aTextureCoordinatesLocation, 2, GL_FLOAT, false, 0, mGLTextureBuffer);
184 | glEnableVertexAttribArray(aTextureCoordinatesLocation);
185 | }
186 |
187 | public void draw() {
188 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
189 | }
190 | }
191 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/utils/ImageUtils.java:
--------------------------------------------------------------------------------
1 | package com.inuker.library.utils;
2 |
3 | import com.inuker.library.MyContext;
4 |
5 | import java.io.File;
6 |
7 | /**
8 | * Created by liwentian on 2017/10/12.
9 | */
10 |
11 | public class ImageUtils {
12 |
13 | public static File getNewImageFile() {
14 | File dir = MyContext.getContext().getExternalFilesDir("image");
15 | if (!dir.exists() && !dir.mkdirs()) {
16 | return null;
17 | }
18 | String name = MD5Utils.getMD5(String.format("Image.%d", System.currentTimeMillis()));
19 | File file = new File(dir, name + ".jpg");
20 | if (file.exists() && file.isFile()) {
21 | file.delete();
22 | }
23 | return file;
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/utils/LogUtils.java:
--------------------------------------------------------------------------------
1 | package com.inuker.library.utils;
2 |
3 | import android.util.Log;
4 |
5 | import java.io.PrintWriter;
6 | import java.io.StringWriter;
7 | import java.io.Writer;
8 |
9 | /**
10 | * Created by liwentian on 17/8/16.
11 | */
12 |
13 | public class LogUtils {
14 |
15 | private static final String TAG = "bush";
16 |
17 | public static void v(String msg) {
18 | Log.v(TAG, msg);
19 | }
20 |
21 | public static void v(String tag, String msg) {
22 | Log.v(tag, msg);
23 | }
24 |
25 | public static void e(String msg) {
26 | Log.e(TAG, msg);
27 | }
28 |
29 | public static void e(String tag, String msg) {
30 | Log.e(tag, msg);
31 | }
32 |
33 | public static void w(String msg) {
34 | Log.w(TAG, msg);
35 | }
36 |
37 | public static void w(String tag, String msg) {
38 | Log.w(tag, msg);
39 | }
40 |
41 | public static void e(Throwable e) {
42 | String s = getThrowableString(e);
43 | e(s);
44 | }
45 |
46 | private static String getThrowableString(Throwable e) {
47 | Writer writer = new StringWriter();
48 | PrintWriter printWriter = new PrintWriter(writer);
49 |
50 | while (e != null) {
51 | e.printStackTrace(printWriter);
52 | e = e.getCause();
53 | }
54 |
55 | String text = writer.toString();
56 |
57 | printWriter.close();
58 |
59 | return text;
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/java/com/inuker/library/utils/MD5Utils.java:
--------------------------------------------------------------------------------
1 | package com.inuker.library.utils;
2 |
3 | import java.security.MessageDigest;
4 |
5 | /**
6 | * Created by liwentian on 2017/10/12.
7 | */
8 |
9 | public class MD5Utils {
10 |
11 | public static String getMD5(String val) {
12 | try {
13 | MessageDigest md5 = MessageDigest.getInstance("MD5");
14 | md5.update(val.getBytes());
15 | byte[] m = md5.digest();
16 | return getString(m);
17 | } catch (Throwable e) {
18 | e.printStackTrace();
19 | }
20 | return val;
21 | }
22 |
23 | private static String getString(byte[] b) {
24 | StringBuffer sb = new StringBuffer();
25 | for (int i = 0; i < b.length; i++) {
26 | sb.append(String.format("%02X", (int) (b[i] & 0xff)));
27 | }
28 | return sb.toString();
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/raw/rect_fragment.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying vec4 v_Color;
4 |
5 | void main() {
6 | gl_FragColor = v_Color;
7 | }
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/raw/rect_vertex.glsl:
--------------------------------------------------------------------------------
1 | attribute vec4 a_Position;
2 | attribute vec4 a_Color;
3 |
4 | varying vec4 v_Color;
5 |
6 | uniform mat4 u_Matrix;
7 |
8 | void main() {
9 | v_Color = a_Color;
10 | gl_Position = u_Matrix * a_Position;
11 | }
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/raw/rgb_fragment.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying vec2 v_TextureCoordinates;
4 |
5 | uniform sampler2D s_texture;
6 |
7 | void main() {
8 | float r, g, b;
9 |
10 | r = texture2D(s_texture, v_TextureCoordinates).r;
11 | g = texture2D(s_texture, v_TextureCoordinates).g;
12 | b = texture2D(s_texture, v_TextureCoordinates).b;
13 |
14 | gl_FragColor = vec4(r, g, b, 1.0);
15 | }
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/raw/rgb_vertex.glsl:
--------------------------------------------------------------------------------
1 | attribute vec4 a_Position;
2 | attribute vec2 a_TextureCoordinates;
3 |
4 | varying vec2 v_TextureCoordinates;
5 |
6 | uniform mat4 u_Matrix;
7 |
8 | void main() {
9 | v_TextureCoordinates = a_TextureCoordinates;
10 | gl_Position = u_Matrix * a_Position;
11 | }
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/raw/tex_fragment.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying vec2 v_TextureCoordinates;
4 |
5 | uniform sampler2D s_texture;
6 |
7 | void main() {
8 | gl_FragColor = texture2D(s_texture, v_TextureCoordinates);
9 |
10 | // float r = texture2D(s_texture, v_TextureCoordinates).r;
11 | // float g = texture2D(s_texture, v_TextureCoordinates).g;
12 | // float b = texture2D(s_texture, v_TextureCoordinates).b;
13 | // gl_FragColor = vec4(1.0, g, b, 1.0);
14 | }
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/raw/tex_vertex.glsl:
--------------------------------------------------------------------------------
1 | attribute vec4 a_Position;
2 | attribute vec2 a_TextureCoordinates;
3 |
4 | varying vec2 v_TextureCoordinates;
5 |
6 | void main() {
7 | v_TextureCoordinates = a_TextureCoordinates;
8 | gl_Position = a_Position;
9 | }
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/raw/yuv_fragment.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying vec2 v_TextureCoordinates;
4 |
5 | uniform sampler2D y_texture;
6 | uniform sampler2D uv_texture;
7 |
8 | void main() {
9 | float r, g, b, y, u, v;
10 |
11 | y = texture2D(y_texture, v_TextureCoordinates).r;
12 | u = texture2D(uv_texture, v_TextureCoordinates).a - 0.5;
13 | v = texture2D(uv_texture, v_TextureCoordinates).r - 0.5;
14 |
15 | r = y + 1.13983 * v;
16 | g = y - 0.39465 * u - 0.58060 * v;
17 | b = y + 2.03211 * u;
18 |
19 | gl_FragColor = vec4(r, g, b, 1.0);
20 | }
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/raw/yuv_vertex.glsl:
--------------------------------------------------------------------------------
1 | attribute vec4 a_Position;
2 | attribute vec2 a_TextureCoordinates;
3 |
4 | uniform mat4 uMVPMatrix;
5 |
6 | varying vec2 v_TextureCoordinates;
7 |
8 | void main() {
9 | v_TextureCoordinates = a_TextureCoordinates;
10 | gl_Position = uMVPMatrix * a_Position;
11 | }
--------------------------------------------------------------------------------
/vlcdemo/library/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |