├── AndroidManifest.xml
├── project.properties
├── res
├── layout
│ └── activity_main.xml
├── raw
│ └── video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz.mp4
├── values-sw600dp
│ └── dimens.xml
├── values-sw720dp-land
│ └── dimens.xml
├── values-v11
│ └── styles.xml
├── values-v14
│ └── styles.xml
└── values
│ ├── dimens.xml
│ ├── strings.xml
│ └── styles.xml
└── src
└── com
└── example
└── decodeencodetest
├── ExtractDecodeEditEncodeMuxTest.java
├── InputSurface.java
├── MainActivity.java
├── OutputSurface.java
└── TextureRender.java
/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
It uses MediaExtractor to get frames from a test stream, decodes them to a surface, uses a 46 | * shader to edit them, encodes them from the resulting surface, and then uses MediaMuxer to write 47 | * them into a file. 48 | * 49 | *
It does not currently check whether the result file is correct, but makes sure that nothing 50 | * fails along the way. 51 | * 52 | *
It also tests the way the codec config buffers need to be passed from the MediaCodec to the 53 | * MediaMuxer. 54 | */ 55 | @TargetApi(18) 56 | public class ExtractDecodeEditEncodeMuxTest extends AndroidTestCase { 57 | 58 | private static final String TAG = ExtractDecodeEditEncodeMuxTest.class.getSimpleName(); 59 | private static final boolean VERBOSE = true; // lots of logging 60 | 61 | /** How long to wait for the next buffer to become available. */ 62 | private static final int TIMEOUT_USEC = 10000; 63 | 64 | /** Where to output the test files. */ 65 | private static final File OUTPUT_FILENAME_DIR = Environment.getExternalStorageDirectory(); 66 | 67 | // parameters for the video encoder 68 | private static final String OUTPUT_VIDEO_MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding 69 | private static final int OUTPUT_VIDEO_BIT_RATE = 2000000; // 2Mbps 70 | private static final int OUTPUT_VIDEO_FRAME_RATE = 15; // 15fps 71 | private static final int OUTPUT_VIDEO_IFRAME_INTERVAL = 10; // 10 seconds between I-frames 72 | private static final int OUTPUT_VIDEO_COLOR_FORMAT = 73 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface; 74 | 75 | // parameters for the audio encoder 76 | private static final String OUTPUT_AUDIO_MIME_TYPE = "audio/mp4a-latm"; // Advanced Audio Coding 77 | private static final int OUTPUT_AUDIO_CHANNEL_COUNT = 2; // Must match the input stream. 78 | private static final int OUTPUT_AUDIO_BIT_RATE = 128 * 1024; 79 | private static final int OUTPUT_AUDIO_AAC_PROFILE = 80 | MediaCodecInfo.CodecProfileLevel.AACObjectHE; 81 | private static final int OUTPUT_AUDIO_SAMPLE_RATE_HZ = 44100; // Must match the input stream. 82 | 83 | /** 84 | * Used for editing the frames. 85 | * 86 | *
Swaps green and blue channels by storing an RBGA color in an RGBA buffer. 87 | */ 88 | private static final String FRAGMENT_SHADER = 89 | "#extension GL_OES_EGL_image_external : require\n" + 90 | "precision mediump float;\n" + 91 | "varying vec2 vTextureCoord;\n" + 92 | "uniform samplerExternalOES sTexture;\n" + 93 | "void main() {\n" + 94 | " gl_FragColor = texture2D(sTexture, vTextureCoord).rbga;\n" + 95 | "}\n"; 96 | 97 | /** Whether to copy the video from the test video. */ 98 | private boolean mCopyVideo; 99 | /** Whether to copy the audio from the test video. */ 100 | private boolean mCopyAudio; 101 | /** Width of the output frames. */ 102 | private int mWidth = -1; 103 | /** Height of the output frames. */ 104 | private int mHeight = -1; 105 | 106 | /** The raw resource used as the input file. */ 107 | private int mSourceResId; 108 | 109 | /** The destination file for the encoded output. */ 110 | private String mOutputFile; 111 | 112 | public void testExtractDecodeEditEncodeMuxQCIF() throws Throwable { 113 | setSize(176, 144); 114 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 115 | setCopyVideo(); 116 | TestWrapper.runTest(this); 117 | } 118 | 119 | public void testExtractDecodeEditEncodeMuxQVGA() throws Throwable { 120 | setSize(320, 240); 121 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 122 | setCopyVideo(); 123 | TestWrapper.runTest(this); 124 | } 125 | 126 | public void testExtractDecodeEditEncodeMux720p() throws Throwable { 127 | setSize(1280, 720); 128 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 129 | setCopyVideo(); 130 | TestWrapper.runTest(this); 131 | } 132 | 133 | public void testExtractDecodeEditEncodeMuxAudio() throws Throwable { 134 | setSize(1280, 720); 135 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 136 | setCopyAudio(); 137 | TestWrapper.runTest(this); 138 | } 139 | 140 | public void testExtractDecodeEditEncodeMuxAudioVideo() throws Throwable { 141 | setSize(1280, 720); 142 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 143 | setCopyAudio(); 144 | setCopyVideo(); 145 | TestWrapper.runTest(this); 146 | } 147 | 148 | /** Wraps testExtractDecodeEditEncodeMux() */ 149 | private static class TestWrapper implements Runnable { 150 | private Throwable mThrowable; 151 | private ExtractDecodeEditEncodeMuxTest mTest; 152 | 153 | private TestWrapper(ExtractDecodeEditEncodeMuxTest test) { 154 | mTest = test; 155 | } 156 | 157 | @Override 158 | public void run() { 159 | try { 160 | mTest.extractDecodeEditEncodeMux(); 161 | } catch (Throwable th) { 162 | mThrowable = th; 163 | } 164 | } 165 | 166 | /** 167 | * Entry point. 168 | */ 169 | public static void runTest(ExtractDecodeEditEncodeMuxTest test) throws Throwable { 170 | test.setOutputFile(); 171 | TestWrapper wrapper = new TestWrapper(test); 172 | Thread th = new Thread(wrapper, "codec test"); 173 | th.start(); 174 | th.join(); 175 | if (wrapper.mThrowable != null) { 176 | throw wrapper.mThrowable; 177 | } 178 | } 179 | } 180 | 181 | /** 182 | * Sets the test to copy the video stream. 183 | */ 184 | private void setCopyVideo() { 185 | mCopyVideo = true; 186 | } 187 | 188 | /** 189 | * Sets the test to copy the video stream. 190 | */ 191 | private void setCopyAudio() { 192 | mCopyAudio = true; 193 | } 194 | 195 | /** 196 | * Sets the desired frame size. 197 | */ 198 | private void setSize(int width, int height) { 199 | if ((width % 16) != 0 || (height % 16) != 0) { 200 | Log.w(TAG, "WARNING: width or height not multiple of 16"); 201 | } 202 | mWidth = width; 203 | mHeight = height; 204 | } 205 | 206 | /** 207 | * Sets the raw resource used as the source video. 208 | */ 209 | private void setSource(int resId) { 210 | mSourceResId = resId; 211 | } 212 | 213 | /** 214 | * Sets the name of the output file based on the other parameters. 215 | * 216 | *
Must be called after {@link #setSize(int, int)} and {@link #setSource(int)}. 217 | */ 218 | private void setOutputFile() { 219 | StringBuilder sb = new StringBuilder(); 220 | sb.append(OUTPUT_FILENAME_DIR.getAbsolutePath()); 221 | sb.append("/cts-media-"); 222 | sb.append(getClass().getSimpleName()); 223 | assertTrue("should have called setSource() first", mSourceResId != -1); 224 | sb.append('-'); 225 | sb.append(mSourceResId); 226 | if (mCopyVideo) { 227 | assertTrue("should have called setSize() first", mWidth != -1); 228 | assertTrue("should have called setSize() first", mHeight != -1); 229 | sb.append('-'); 230 | sb.append("video"); 231 | sb.append('-'); 232 | sb.append(mWidth); 233 | sb.append('x'); 234 | sb.append(mHeight); 235 | } 236 | if (mCopyAudio) { 237 | sb.append('-'); 238 | sb.append("audio"); 239 | } 240 | sb.append(".mp4"); 241 | mOutputFile = sb.toString(); 242 | } 243 | 244 | private MediaExtractor mVideoExtractor = null; 245 | private MediaExtractor mAudioExtractor = null; 246 | private InputSurface mInputSurface = null; 247 | private OutputSurface mOutputSurface = null; 248 | private MediaCodec mVideoDecoder = null; 249 | private MediaCodec mAudioDecoder = null; 250 | private MediaCodec mVideoEncoder = null; 251 | private MediaCodec mAudioEncoder = null; 252 | private MediaMuxer mMuxer = null; 253 | 254 | /** 255 | * Tests encoding and subsequently decoding video from frames generated into a buffer. 256 | *
257 | * We encode several frames of a video test pattern using MediaCodec, then decode the output
258 | * with MediaCodec and do some simple checks.
259 | */
260 | private void extractDecodeEditEncodeMux() throws Exception {
261 | // Exception that may be thrown during release.
262 | Exception exception = null;
263 |
264 | mDecoderOutputVideoFormat = null;
265 | mDecoderOutputAudioFormat = null;
266 | mEncoderOutputVideoFormat = null;
267 | mEncoderOutputAudioFormat = null;
268 |
269 | mOutputVideoTrack = -1;
270 | mOutputAudioTrack = -1;
271 | mVideoExtractorDone = false;
272 | mVideoDecoderDone = false;
273 | mVideoEncoderDone = false;
274 | mAudioExtractorDone = false;
275 | mAudioDecoderDone = false;
276 | mAudioEncoderDone = false;
277 | mPendingAudioDecoderOutputBufferIndices = new LinkedList The surface to use as input is stored in the given reference.
665 | *
666 | * @param codecInfo of the codec to use
667 | * @param format of the stream to be produced
668 | * @param surfaceReference to store the surface to use as input
669 | */
670 | private MediaCodec createVideoEncoder(
671 | MediaCodecInfo codecInfo,
672 | MediaFormat format,
673 | AtomicReference The muxer is not started as it needs to be started only after all streams have been added.
962 | */
963 | private MediaMuxer createMuxer() throws IOException {
964 | return new MediaMuxer(mOutputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
965 | }
966 |
967 | private int getAndSelectVideoTrackIndex(MediaExtractor extractor) {
968 | for (int index = 0; index < extractor.getTrackCount(); ++index) {
969 | if (VERBOSE) {
970 | Log.d(TAG, "format for track " + index + " is "
971 | + getMimeTypeFor(extractor.getTrackFormat(index)));
972 | }
973 | if (isVideoFormat(extractor.getTrackFormat(index))) {
974 | extractor.selectTrack(index);
975 | return index;
976 | }
977 | }
978 | return -1;
979 | }
980 |
981 | private int getAndSelectAudioTrackIndex(MediaExtractor extractor) {
982 | for (int index = 0; index < extractor.getTrackCount(); ++index) {
983 | if (VERBOSE) {
984 | Log.d(TAG, "format for track " + index + " is "
985 | + getMimeTypeFor(extractor.getTrackFormat(index)));
986 | }
987 | if (isAudioFormat(extractor.getTrackFormat(index))) {
988 | extractor.selectTrack(index);
989 | return index;
990 | }
991 | }
992 | return -1;
993 | }
994 |
995 | // We will get these from the decoders when notified of a format change.
996 | private MediaFormat mDecoderOutputVideoFormat = null;
997 | private MediaFormat mDecoderOutputAudioFormat = null;
998 | // We will get these from the encoders when notified of a format change.
999 | private MediaFormat mEncoderOutputVideoFormat = null;
1000 | private MediaFormat mEncoderOutputAudioFormat = null;
1001 |
1002 | // We will determine these once we have the output format.
1003 | private int mOutputVideoTrack = -1;
1004 | private int mOutputAudioTrack = -1;
1005 | // Whether things are done on the video side.
1006 | private boolean mVideoExtractorDone = false;
1007 | private boolean mVideoDecoderDone = false;
1008 | private boolean mVideoEncoderDone = false;
1009 | // Whether things are done on the audio side.
1010 | private boolean mAudioExtractorDone = false;
1011 | private boolean mAudioDecoderDone = false;
1012 | private boolean mAudioEncoderDone = false;
1013 | private LinkedList
32 | * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
33 | * to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
34 | * to the video encoder.
35 | */
36 | class InputSurface {
37 | private static final String TAG = "InputSurface";
38 | private static final boolean VERBOSE = false;
39 |
40 | private static final int EGL_RECORDABLE_ANDROID = 0x3142;
41 | private static final int EGL_OPENGL_ES2_BIT = 4;
42 |
43 | private EGLDisplay mEGLDisplay;
44 | private EGLContext mEGLContext;
45 | private EGLSurface mEGLSurface;
46 |
47 | private Surface mSurface;
48 |
49 | /**
50 | * Creates an InputSurface from a Surface.
51 | */
52 | public InputSurface(Surface surface) {
53 | if (surface == null) {
54 | throw new NullPointerException();
55 | }
56 | mSurface = surface;
57 |
58 | eglSetup();
59 | }
60 |
61 | /**
62 | * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
63 | */
64 | private void eglSetup() {
65 | mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
66 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
67 | throw new RuntimeException("unable to get EGL14 display");
68 | }
69 | int[] version = new int[2];
70 | if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
71 | mEGLDisplay = null;
72 | throw new RuntimeException("unable to initialize EGL14");
73 | }
74 |
75 | // Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
76 | // to be able to tell if the frame is reasonable.
77 | int[] attribList = {
78 | EGL14.EGL_RED_SIZE, 8,
79 | EGL14.EGL_GREEN_SIZE, 8,
80 | EGL14.EGL_BLUE_SIZE, 8,
81 | EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
82 | EGL_RECORDABLE_ANDROID, 1,
83 | EGL14.EGL_NONE
84 | };
85 | EGLConfig[] configs = new EGLConfig[1];
86 | int[] numConfigs = new int[1];
87 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
88 | numConfigs, 0)) {
89 | throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
90 | }
91 |
92 | // Configure context for OpenGL ES 2.0.
93 | int[] attrib_list = {
94 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
95 | EGL14.EGL_NONE
96 | };
97 | mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
98 | attrib_list, 0);
99 | checkEglError("eglCreateContext");
100 | if (mEGLContext == null) {
101 | throw new RuntimeException("null context");
102 | }
103 |
104 | // Create a window surface, and attach it to the Surface we received.
105 | int[] surfaceAttribs = {
106 | EGL14.EGL_NONE
107 | };
108 | mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
109 | surfaceAttribs, 0);
110 | checkEglError("eglCreateWindowSurface");
111 | if (mEGLSurface == null) {
112 | throw new RuntimeException("surface was null");
113 | }
114 | }
115 |
116 | /**
117 | * Discard all resources held by this class, notably the EGL context. Also releases the
118 | * Surface that was passed to our constructor.
119 | */
120 | public void release() {
121 | if (EGL14.eglGetCurrentContext().equals(mEGLContext)) {
122 | // Clear the current context and surface to ensure they are discarded immediately.
123 | EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
124 | EGL14.EGL_NO_CONTEXT);
125 | }
126 | EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
127 | EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
128 | //EGL14.eglTerminate(mEGLDisplay);
129 |
130 | mSurface.release();
131 |
132 | // null everything out so future attempts to use this object will cause an NPE
133 | mEGLDisplay = null;
134 | mEGLContext = null;
135 | mEGLSurface = null;
136 |
137 | mSurface = null;
138 | }
139 |
140 | /**
141 | * Makes our EGL context and surface current.
142 | */
143 | public void makeCurrent() {
144 | if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
145 | throw new RuntimeException("eglMakeCurrent failed");
146 | }
147 | }
148 |
149 | public void releaseEGLContext() {
150 | if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
151 | throw new RuntimeException("eglMakeCurrent failed");
152 | }
153 | }
154 |
155 | /**
156 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
157 | */
158 | public boolean swapBuffers() {
159 | return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
160 | }
161 |
162 | /**
163 | * Returns the Surface that the MediaCodec receives buffers from.
164 | */
165 | public Surface getSurface() {
166 | return mSurface;
167 | }
168 |
169 | /**
170 | * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
171 | */
172 | public void setPresentationTime(long nsecs) {
173 | EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
174 | }
175 |
176 | /**
177 | * Checks for EGL errors.
178 | */
179 | private void checkEglError(String msg) {
180 | boolean failed = false;
181 | int error;
182 | while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
183 | Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
184 | failed = true;
185 | }
186 | if (failed) {
187 | throw new RuntimeException("EGL error encountered (see log)");
188 | }
189 | }
190 | }
191 |
--------------------------------------------------------------------------------
/src/com/example/decodeencodetest/MainActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2016 Martin Storsjo
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.decodeencodetest;
18 |
19 | import android.os.Bundle;
20 | import android.app.Activity;
21 |
22 | public class MainActivity extends Activity {
23 | @Override
24 | protected void onCreate(Bundle savedInstanceState) {
25 | super.onCreate(savedInstanceState);
26 | setContentView(R.layout.activity_main);
27 | new Thread() {
28 | public void run() {
29 | ExtractDecodeEditEncodeMuxTest test = new ExtractDecodeEditEncodeMuxTest();
30 | test.setContext(MainActivity.this);
31 | try {
32 | test.testExtractDecodeEditEncodeMuxAudioVideo();
33 | } catch (Throwable t) {
34 | t.printStackTrace();
35 | }
36 | }
37 | }.start();
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/src/com/example/decodeencodetest/OutputSurface.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2013 The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.decodeencodetest;
18 |
19 | import android.graphics.SurfaceTexture;
20 | import android.opengl.EGL14;
21 | import android.opengl.GLES20;
22 | import android.opengl.GLES11Ext;
23 | import android.opengl.GLSurfaceView;
24 | import android.opengl.Matrix;
25 | import android.util.Log;
26 | import android.view.Surface;
27 |
28 | import java.nio.ByteBuffer;
29 |
30 | import javax.microedition.khronos.egl.EGL10;
31 | import javax.microedition.khronos.egl.EGLConfig;
32 | import javax.microedition.khronos.egl.EGLContext;
33 | import javax.microedition.khronos.egl.EGLDisplay;
34 | import javax.microedition.khronos.egl.EGLSurface;
35 | import javax.microedition.khronos.opengles.GL;
36 | import javax.microedition.khronos.opengles.GL10;
37 |
38 |
39 |
40 | /**
41 | * Holds state associated with a Surface used for MediaCodec decoder output.
42 | *
43 | * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
44 | * and then create a Surface for that SurfaceTexture. The Surface can be passed to
45 | * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
46 | * texture with updateTexImage, then render the texture with GL to a pbuffer.
47 | *
48 | * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
49 | * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
50 | * we just draw it on whatever surface is current.
51 | *
52 | * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
53 | * can potentially drop frames.
54 | */
55 | class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
56 | private static final String TAG = "OutputSurface";
57 | private static final boolean VERBOSE = false;
58 |
59 | private static final int EGL_OPENGL_ES2_BIT = 4;
60 |
61 | private EGL10 mEGL;
62 | private EGLDisplay mEGLDisplay;
63 | private EGLContext mEGLContext;
64 | private EGLSurface mEGLSurface;
65 |
66 | private SurfaceTexture mSurfaceTexture;
67 | private Surface mSurface;
68 |
69 | private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
70 | private boolean mFrameAvailable;
71 |
72 | private TextureRender mTextureRender;
73 |
74 | /**
75 | * Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
76 | * EGL context and surface will be made current. Creates a Surface that can be passed
77 | * to MediaCodec.configure().
78 | */
79 | public OutputSurface(int width, int height) {
80 | if (width <= 0 || height <= 0) {
81 | throw new IllegalArgumentException();
82 | }
83 |
84 | eglSetup(width, height);
85 | makeCurrent();
86 |
87 | setup();
88 | }
89 |
90 | /**
91 | * Creates an OutputSurface using the current EGL context. Creates a Surface that can be
92 | * passed to MediaCodec.configure().
93 | */
94 | public OutputSurface() {
95 | setup();
96 | }
97 |
98 | /**
99 | * Creates instances of TextureRender and SurfaceTexture, and a Surface associated
100 | * with the SurfaceTexture.
101 | */
102 | private void setup() {
103 | mTextureRender = new TextureRender();
104 | mTextureRender.surfaceCreated();
105 |
106 | // Even if we don't access the SurfaceTexture after the constructor returns, we
107 | // still need to keep a reference to it. The Surface doesn't retain a reference
108 | // at the Java level, so if we don't either then the object can get GCed, which
109 | // causes the native finalizer to run.
110 | if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
111 | mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
112 |
113 | // This doesn't work if OutputSurface is created on the thread that CTS started for
114 | // these test cases.
115 | //
116 | // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
117 | // create a Handler that uses it. The "frame available" message is delivered
118 | // there, but since we're not a Looper-based thread we'll never see it. For
119 | // this to do anything useful, OutputSurface must be created on a thread without
120 | // a Looper, so that SurfaceTexture uses the main application Looper instead.
121 | //
122 | // Java language note: passing "this" out of a constructor is generally unwise,
123 | // but we should be able to get away with it here.
124 | mSurfaceTexture.setOnFrameAvailableListener(this);
125 |
126 | mSurface = new Surface(mSurfaceTexture);
127 | }
128 |
129 | /**
130 | * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
131 | */
132 | private void eglSetup(int width, int height) {
133 | mEGL = (EGL10)EGLContext.getEGL();
134 | mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
135 | if (!mEGL.eglInitialize(mEGLDisplay, null)) {
136 | throw new RuntimeException("unable to initialize EGL10");
137 | }
138 |
139 | // Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
140 | // to be able to tell if the frame is reasonable.
141 | int[] attribList = {
142 | EGL10.EGL_RED_SIZE, 8,
143 | EGL10.EGL_GREEN_SIZE, 8,
144 | EGL10.EGL_BLUE_SIZE, 8,
145 | EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
146 | EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
147 | EGL10.EGL_NONE
148 | };
149 | EGLConfig[] configs = new EGLConfig[1];
150 | int[] numConfigs = new int[1];
151 | if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) {
152 | throw new RuntimeException("unable to find RGB888+pbuffer EGL config");
153 | }
154 |
155 | // Configure context for OpenGL ES 2.0.
156 | int[] attrib_list = {
157 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
158 | EGL10.EGL_NONE
159 | };
160 | mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT,
161 | attrib_list);
162 | checkEglError("eglCreateContext");
163 | if (mEGLContext == null) {
164 | throw new RuntimeException("null context");
165 | }
166 |
167 | // Create a pbuffer surface. By using this for output, we can use glReadPixels
168 | // to test values in the output.
169 | int[] surfaceAttribs = {
170 | EGL10.EGL_WIDTH, width,
171 | EGL10.EGL_HEIGHT, height,
172 | EGL10.EGL_NONE
173 | };
174 | mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs);
175 | checkEglError("eglCreatePbufferSurface");
176 | if (mEGLSurface == null) {
177 | throw new RuntimeException("surface was null");
178 | }
179 | }
180 |
181 | /**
182 | * Discard all resources held by this class, notably the EGL context.
183 | */
184 | public void release() {
185 | if (mEGL != null) {
186 | if (mEGL.eglGetCurrentContext().equals(mEGLContext)) {
187 | // Clear the current context and surface to ensure they are discarded immediately.
188 | mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
189 | EGL10.EGL_NO_CONTEXT);
190 | }
191 | mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
192 | mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
193 | //mEGL.eglTerminate(mEGLDisplay);
194 | }
195 |
196 | mSurface.release();
197 |
198 | // this causes a bunch of warnings that appear harmless but might confuse someone:
199 | // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
200 | //mSurfaceTexture.release();
201 |
202 | // null everything out so future attempts to use this object will cause an NPE
203 | mEGLDisplay = null;
204 | mEGLContext = null;
205 | mEGLSurface = null;
206 | mEGL = null;
207 |
208 | mTextureRender = null;
209 | mSurface = null;
210 | mSurfaceTexture = null;
211 | }
212 |
213 | /**
214 | * Makes our EGL context and surface current.
215 | */
216 | public void makeCurrent() {
217 | if (mEGL == null) {
218 | throw new RuntimeException("not configured for makeCurrent");
219 | }
220 | checkEglError("before makeCurrent");
221 | if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
222 | throw new RuntimeException("eglMakeCurrent failed");
223 | }
224 | }
225 |
226 | /**
227 | * Returns the Surface that we draw onto.
228 | */
229 | public Surface getSurface() {
230 | return mSurface;
231 | }
232 |
233 | /**
234 | * Replaces the fragment shader.
235 | */
236 | public void changeFragmentShader(String fragmentShader) {
237 | mTextureRender.changeFragmentShader(fragmentShader);
238 | }
239 |
240 | /**
241 | * Latches the next buffer into the texture. Must be called from the thread that created
242 | * the OutputSurface object, after the onFrameAvailable callback has signaled that new
243 | * data is available.
244 | */
245 | public void awaitNewImage() {
246 | final int TIMEOUT_MS = 500;
247 |
248 | synchronized (mFrameSyncObject) {
249 | while (!mFrameAvailable) {
250 | try {
251 | // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
252 | // stalling the test if it doesn't arrive.
253 | mFrameSyncObject.wait(TIMEOUT_MS);
254 | if (!mFrameAvailable) {
255 | // TODO: if "spurious wakeup", continue while loop
256 | throw new RuntimeException("Surface frame wait timed out");
257 | }
258 | } catch (InterruptedException ie) {
259 | // shouldn't happen
260 | throw new RuntimeException(ie);
261 | }
262 | }
263 | mFrameAvailable = false;
264 | }
265 |
266 | // Latch the data.
267 | mTextureRender.checkGlError("before updateTexImage");
268 | mSurfaceTexture.updateTexImage();
269 | }
270 |
271 | /**
272 | * Draws the data from SurfaceTexture onto the current EGL surface.
273 | */
274 | public void drawImage() {
275 | mTextureRender.drawFrame(mSurfaceTexture);
276 | }
277 |
278 | @Override
279 | public void onFrameAvailable(SurfaceTexture st) {
280 | if (VERBOSE) Log.d(TAG, "new frame available");
281 | synchronized (mFrameSyncObject) {
282 | if (mFrameAvailable) {
283 | throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
284 | }
285 | mFrameAvailable = true;
286 | mFrameSyncObject.notifyAll();
287 | }
288 | }
289 |
290 | /**
291 | * Checks for EGL errors.
292 | */
293 | private void checkEglError(String msg) {
294 | boolean failed = false;
295 | int error;
296 | while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) {
297 | Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
298 | failed = true;
299 | }
300 | if (failed) {
301 | throw new RuntimeException("EGL error encountered (see log)");
302 | }
303 | }
304 | }
305 |
--------------------------------------------------------------------------------
/src/com/example/decodeencodetest/TextureRender.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2013 The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.decodeencodetest;
18 |
19 | import java.nio.ByteBuffer;
20 | import java.nio.ByteOrder;
21 | import java.nio.FloatBuffer;
22 |
23 | import javax.microedition.khronos.egl.EGLConfig;
24 | import javax.microedition.khronos.opengles.GL10;
25 |
26 | import android.graphics.SurfaceTexture;
27 | import android.opengl.GLES11Ext;
28 | import android.opengl.GLES20;
29 | import android.opengl.GLSurfaceView;
30 | import android.opengl.Matrix;
31 | import android.util.Log;
32 |
33 |
34 | /**
35 | * Code for rendering a texture onto a surface using OpenGL ES 2.0.
36 | */
37 | class TextureRender {
38 | private static final String TAG = "TextureRender";
39 |
40 | private static final int FLOAT_SIZE_BYTES = 4;
41 | private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
42 | private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
43 | private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
44 | private final float[] mTriangleVerticesData = {
45 | // X, Y, Z, U, V
46 | -1.0f, -1.0f, 0, 0.f, 0.f,
47 | 1.0f, -1.0f, 0, 1.f, 0.f,
48 | -1.0f, 1.0f, 0, 0.f, 1.f,
49 | 1.0f, 1.0f, 0, 1.f, 1.f,
50 | };
51 |
52 | private FloatBuffer mTriangleVertices;
53 |
54 | private static final String VERTEX_SHADER =
55 | "uniform mat4 uMVPMatrix;\n" +
56 | "uniform mat4 uSTMatrix;\n" +
57 | "attribute vec4 aPosition;\n" +
58 | "attribute vec4 aTextureCoord;\n" +
59 | "varying vec2 vTextureCoord;\n" +
60 | "void main() {\n" +
61 | " gl_Position = uMVPMatrix * aPosition;\n" +
62 | " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
63 | "}\n";
64 |
65 | private static final String FRAGMENT_SHADER =
66 | "#extension GL_OES_EGL_image_external : require\n" +
67 | "precision mediump float;\n" + // highp here doesn't seem to matter
68 | "varying vec2 vTextureCoord;\n" +
69 | "uniform samplerExternalOES sTexture;\n" +
70 | "void main() {\n" +
71 | " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
72 | "}\n";
73 |
74 | private float[] mMVPMatrix = new float[16];
75 | private float[] mSTMatrix = new float[16];
76 |
77 | private int mProgram;
78 | private int mTextureID = -12345;
79 | private int muMVPMatrixHandle;
80 | private int muSTMatrixHandle;
81 | private int maPositionHandle;
82 | private int maTextureHandle;
83 |
84 | public TextureRender() {
85 | mTriangleVertices = ByteBuffer.allocateDirect(
86 | mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
87 | .order(ByteOrder.nativeOrder()).asFloatBuffer();
88 | mTriangleVertices.put(mTriangleVerticesData).position(0);
89 |
90 | Matrix.setIdentityM(mSTMatrix, 0);
91 | }
92 |
93 | public int getTextureId() {
94 | return mTextureID;
95 | }
96 |
97 | public void drawFrame(SurfaceTexture st) {
98 | checkGlError("onDrawFrame start");
99 | st.getTransformMatrix(mSTMatrix);
100 |
101 | GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
102 | GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
103 |
104 | GLES20.glUseProgram(mProgram);
105 | checkGlError("glUseProgram");
106 |
107 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
108 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
109 |
110 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
111 | GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
112 | TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
113 | checkGlError("glVertexAttribPointer maPosition");
114 | GLES20.glEnableVertexAttribArray(maPositionHandle);
115 | checkGlError("glEnableVertexAttribArray maPositionHandle");
116 |
117 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
118 | GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
119 | TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
120 | checkGlError("glVertexAttribPointer maTextureHandle");
121 | GLES20.glEnableVertexAttribArray(maTextureHandle);
122 | checkGlError("glEnableVertexAttribArray maTextureHandle");
123 |
124 | Matrix.setIdentityM(mMVPMatrix, 0);
125 | GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
126 | GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
127 |
128 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
129 | checkGlError("glDrawArrays");
130 | GLES20.glFinish();
131 | }
132 |
133 | /**
134 | * Initializes GL state. Call this after the EGL surface has been created and made current.
135 | */
136 | public void surfaceCreated() {
137 | mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
138 | if (mProgram == 0) {
139 | throw new RuntimeException("failed creating program");
140 | }
141 | maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
142 | checkGlError("glGetAttribLocation aPosition");
143 | if (maPositionHandle == -1) {
144 | throw new RuntimeException("Could not get attrib location for aPosition");
145 | }
146 | maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
147 | checkGlError("glGetAttribLocation aTextureCoord");
148 | if (maTextureHandle == -1) {
149 | throw new RuntimeException("Could not get attrib location for aTextureCoord");
150 | }
151 |
152 | muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
153 | checkGlError("glGetUniformLocation uMVPMatrix");
154 | if (muMVPMatrixHandle == -1) {
155 | throw new RuntimeException("Could not get attrib location for uMVPMatrix");
156 | }
157 |
158 | muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
159 | checkGlError("glGetUniformLocation uSTMatrix");
160 | if (muSTMatrixHandle == -1) {
161 | throw new RuntimeException("Could not get attrib location for uSTMatrix");
162 | }
163 |
164 |
165 | int[] textures = new int[1];
166 | GLES20.glGenTextures(1, textures, 0);
167 |
168 | mTextureID = textures[0];
169 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
170 | checkGlError("glBindTexture mTextureID");
171 |
172 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
173 | GLES20.GL_NEAREST);
174 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
175 | GLES20.GL_LINEAR);
176 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
177 | GLES20.GL_CLAMP_TO_EDGE);
178 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
179 | GLES20.GL_CLAMP_TO_EDGE);
180 | checkGlError("glTexParameter");
181 | }
182 |
183 | /**
184 | * Replaces the fragment shader.
185 | */
186 | public void changeFragmentShader(String fragmentShader) {
187 | GLES20.glDeleteProgram(mProgram);
188 | mProgram = createProgram(VERTEX_SHADER, fragmentShader);
189 | if (mProgram == 0) {
190 | throw new RuntimeException("failed creating program");
191 | }
192 | }
193 |
194 | private int loadShader(int shaderType, String source) {
195 | int shader = GLES20.glCreateShader(shaderType);
196 | checkGlError("glCreateShader type=" + shaderType);
197 | GLES20.glShaderSource(shader, source);
198 | GLES20.glCompileShader(shader);
199 | int[] compiled = new int[1];
200 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
201 | if (compiled[0] == 0) {
202 | Log.e(TAG, "Could not compile shader " + shaderType + ":");
203 | Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
204 | GLES20.glDeleteShader(shader);
205 | shader = 0;
206 | }
207 | return shader;
208 | }
209 |
210 | private int createProgram(String vertexSource, String fragmentSource) {
211 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
212 | if (vertexShader == 0) {
213 | return 0;
214 | }
215 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
216 | if (pixelShader == 0) {
217 | return 0;
218 | }
219 |
220 | int program = GLES20.glCreateProgram();
221 | checkGlError("glCreateProgram");
222 | if (program == 0) {
223 | Log.e(TAG, "Could not create program");
224 | }
225 | GLES20.glAttachShader(program, vertexShader);
226 | checkGlError("glAttachShader");
227 | GLES20.glAttachShader(program, pixelShader);
228 | checkGlError("glAttachShader");
229 | GLES20.glLinkProgram(program);
230 | int[] linkStatus = new int[1];
231 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
232 | if (linkStatus[0] != GLES20.GL_TRUE) {
233 | Log.e(TAG, "Could not link program: ");
234 | Log.e(TAG, GLES20.glGetProgramInfoLog(program));
235 | GLES20.glDeleteProgram(program);
236 | program = 0;
237 | }
238 | return program;
239 | }
240 |
241 | public void checkGlError(String op) {
242 | int error;
243 | while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
244 | Log.e(TAG, op + ": glError " + error);
245 | throw new RuntimeException(op + ": glError " + error);
246 | }
247 | }
248 | }
249 |
--------------------------------------------------------------------------------