├── AndroidManifest.xml ├── project.properties ├── res ├── layout │ └── activity_main.xml ├── raw │ └── video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz.mp4 ├── values-sw600dp │ └── dimens.xml ├── values-sw720dp-land │ └── dimens.xml ├── values-v11 │ └── styles.xml ├── values-v14 │ └── styles.xml └── values │ ├── dimens.xml │ ├── strings.xml │ └── styles.xml └── src └── com └── example └── decodeencodetest ├── ExtractDecodeEditEncodeMuxTest.java ├── InputSurface.java ├── MainActivity.java ├── OutputSurface.java └── TextureRender.java /AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 10 | 11 | 12 | 16 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /project.properties: -------------------------------------------------------------------------------- 1 | # This file is automatically generated by Android Tools. 2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED! 3 | # 4 | # This file must be checked in Version Control Systems. 5 | # 6 | # To customize properties used by the Ant build system edit 7 | # "ant.properties", and override values to adapt the script to your 8 | # project structure. 9 | # 10 | # To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home): 11 | #proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt 12 | 13 | # Project target. 14 | target=android-21 15 | -------------------------------------------------------------------------------- /res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /res/raw/video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mstorsjo/android-decodeencodetest/8e3fa5ac4aba64697db01fcaeaf0ab9c67ee8503/res/raw/video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz.mp4 -------------------------------------------------------------------------------- /res/values-sw600dp/dimens.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /res/values-sw720dp-land/dimens.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 7 | 128dp 8 | 9 | 10 | -------------------------------------------------------------------------------- /res/values-v11/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 7 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /res/values-v14/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /res/values/dimens.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16dp 5 | 16dp 6 | 7 | 8 | -------------------------------------------------------------------------------- /res/values/strings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | DecodeEncodeTest 5 | 6 | 7 | -------------------------------------------------------------------------------- /res/values/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 7 | 14 | 15 | 16 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /src/com/example/decodeencodetest/ExtractDecodeEditEncodeMuxTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2013 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.decodeencodetest; 18 | 19 | import android.annotation.TargetApi; 20 | import android.content.res.AssetFileDescriptor; 21 | import android.media.MediaCodec; 22 | import android.media.MediaCodecInfo; 23 | import android.media.MediaCodecList; 24 | import android.media.MediaExtractor; 25 | import android.media.MediaFormat; 26 | import android.media.MediaMuxer; 27 | import android.os.Environment; 28 | import android.os.Handler; 29 | import android.os.HandlerThread; 30 | import android.os.Looper; 31 | import android.os.Message; 32 | import android.test.AndroidTestCase; 33 | import android.util.Log; 34 | import android.view.Surface; 35 | 36 | import java.io.File; 37 | import java.io.IOException; 38 | import java.nio.ByteBuffer; 39 | import java.util.concurrent.atomic.AtomicReference; 40 | import java.util.LinkedList; 41 | 42 | /** 43 | * Test for the integration of MediaMuxer and MediaCodec's encoder. 44 | * 45 | *

It uses MediaExtractor to get frames from a test stream, decodes them to a surface, uses a 46 | * shader to edit them, encodes them from the resulting surface, and then uses MediaMuxer to write 47 | * them into a file. 48 | * 49 | *

It does not currently check whether the result file is correct, but makes sure that nothing 50 | * fails along the way. 51 | * 52 | *

It also tests the way the codec config buffers need to be passed from the MediaCodec to the 53 | * MediaMuxer. 54 | */ 55 | @TargetApi(18) 56 | public class ExtractDecodeEditEncodeMuxTest extends AndroidTestCase { 57 | 58 | private static final String TAG = ExtractDecodeEditEncodeMuxTest.class.getSimpleName(); 59 | private static final boolean VERBOSE = true; // lots of logging 60 | 61 | /** How long to wait for the next buffer to become available. */ 62 | private static final int TIMEOUT_USEC = 10000; 63 | 64 | /** Where to output the test files. */ 65 | private static final File OUTPUT_FILENAME_DIR = Environment.getExternalStorageDirectory(); 66 | 67 | // parameters for the video encoder 68 | private static final String OUTPUT_VIDEO_MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding 69 | private static final int OUTPUT_VIDEO_BIT_RATE = 2000000; // 2Mbps 70 | private static final int OUTPUT_VIDEO_FRAME_RATE = 15; // 15fps 71 | private static final int OUTPUT_VIDEO_IFRAME_INTERVAL = 10; // 10 seconds between I-frames 72 | private static final int OUTPUT_VIDEO_COLOR_FORMAT = 73 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface; 74 | 75 | // parameters for the audio encoder 76 | private static final String OUTPUT_AUDIO_MIME_TYPE = "audio/mp4a-latm"; // Advanced Audio Coding 77 | private static final int OUTPUT_AUDIO_CHANNEL_COUNT = 2; // Must match the input stream. 78 | private static final int OUTPUT_AUDIO_BIT_RATE = 128 * 1024; 79 | private static final int OUTPUT_AUDIO_AAC_PROFILE = 80 | MediaCodecInfo.CodecProfileLevel.AACObjectHE; 81 | private static final int OUTPUT_AUDIO_SAMPLE_RATE_HZ = 44100; // Must match the input stream. 82 | 83 | /** 84 | * Used for editing the frames. 85 | * 86 | *

Swaps green and blue channels by storing an RBGA color in an RGBA buffer. 87 | */ 88 | private static final String FRAGMENT_SHADER = 89 | "#extension GL_OES_EGL_image_external : require\n" + 90 | "precision mediump float;\n" + 91 | "varying vec2 vTextureCoord;\n" + 92 | "uniform samplerExternalOES sTexture;\n" + 93 | "void main() {\n" + 94 | " gl_FragColor = texture2D(sTexture, vTextureCoord).rbga;\n" + 95 | "}\n"; 96 | 97 | /** Whether to copy the video from the test video. */ 98 | private boolean mCopyVideo; 99 | /** Whether to copy the audio from the test video. */ 100 | private boolean mCopyAudio; 101 | /** Width of the output frames. */ 102 | private int mWidth = -1; 103 | /** Height of the output frames. */ 104 | private int mHeight = -1; 105 | 106 | /** The raw resource used as the input file. */ 107 | private int mSourceResId; 108 | 109 | /** The destination file for the encoded output. */ 110 | private String mOutputFile; 111 | 112 | public void testExtractDecodeEditEncodeMuxQCIF() throws Throwable { 113 | setSize(176, 144); 114 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 115 | setCopyVideo(); 116 | TestWrapper.runTest(this); 117 | } 118 | 119 | public void testExtractDecodeEditEncodeMuxQVGA() throws Throwable { 120 | setSize(320, 240); 121 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 122 | setCopyVideo(); 123 | TestWrapper.runTest(this); 124 | } 125 | 126 | public void testExtractDecodeEditEncodeMux720p() throws Throwable { 127 | setSize(1280, 720); 128 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 129 | setCopyVideo(); 130 | TestWrapper.runTest(this); 131 | } 132 | 133 | public void testExtractDecodeEditEncodeMuxAudio() throws Throwable { 134 | setSize(1280, 720); 135 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 136 | setCopyAudio(); 137 | TestWrapper.runTest(this); 138 | } 139 | 140 | public void testExtractDecodeEditEncodeMuxAudioVideo() throws Throwable { 141 | setSize(1280, 720); 142 | setSource(R.raw.video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz); 143 | setCopyAudio(); 144 | setCopyVideo(); 145 | TestWrapper.runTest(this); 146 | } 147 | 148 | /** Wraps testExtractDecodeEditEncodeMux() */ 149 | private static class TestWrapper implements Runnable { 150 | private Throwable mThrowable; 151 | private ExtractDecodeEditEncodeMuxTest mTest; 152 | 153 | private TestWrapper(ExtractDecodeEditEncodeMuxTest test) { 154 | mTest = test; 155 | } 156 | 157 | @Override 158 | public void run() { 159 | try { 160 | mTest.extractDecodeEditEncodeMux(); 161 | } catch (Throwable th) { 162 | mThrowable = th; 163 | } 164 | } 165 | 166 | /** 167 | * Entry point. 168 | */ 169 | public static void runTest(ExtractDecodeEditEncodeMuxTest test) throws Throwable { 170 | test.setOutputFile(); 171 | TestWrapper wrapper = new TestWrapper(test); 172 | Thread th = new Thread(wrapper, "codec test"); 173 | th.start(); 174 | th.join(); 175 | if (wrapper.mThrowable != null) { 176 | throw wrapper.mThrowable; 177 | } 178 | } 179 | } 180 | 181 | /** 182 | * Sets the test to copy the video stream. 183 | */ 184 | private void setCopyVideo() { 185 | mCopyVideo = true; 186 | } 187 | 188 | /** 189 | * Sets the test to copy the video stream. 190 | */ 191 | private void setCopyAudio() { 192 | mCopyAudio = true; 193 | } 194 | 195 | /** 196 | * Sets the desired frame size. 197 | */ 198 | private void setSize(int width, int height) { 199 | if ((width % 16) != 0 || (height % 16) != 0) { 200 | Log.w(TAG, "WARNING: width or height not multiple of 16"); 201 | } 202 | mWidth = width; 203 | mHeight = height; 204 | } 205 | 206 | /** 207 | * Sets the raw resource used as the source video. 208 | */ 209 | private void setSource(int resId) { 210 | mSourceResId = resId; 211 | } 212 | 213 | /** 214 | * Sets the name of the output file based on the other parameters. 215 | * 216 | *

Must be called after {@link #setSize(int, int)} and {@link #setSource(int)}. 217 | */ 218 | private void setOutputFile() { 219 | StringBuilder sb = new StringBuilder(); 220 | sb.append(OUTPUT_FILENAME_DIR.getAbsolutePath()); 221 | sb.append("/cts-media-"); 222 | sb.append(getClass().getSimpleName()); 223 | assertTrue("should have called setSource() first", mSourceResId != -1); 224 | sb.append('-'); 225 | sb.append(mSourceResId); 226 | if (mCopyVideo) { 227 | assertTrue("should have called setSize() first", mWidth != -1); 228 | assertTrue("should have called setSize() first", mHeight != -1); 229 | sb.append('-'); 230 | sb.append("video"); 231 | sb.append('-'); 232 | sb.append(mWidth); 233 | sb.append('x'); 234 | sb.append(mHeight); 235 | } 236 | if (mCopyAudio) { 237 | sb.append('-'); 238 | sb.append("audio"); 239 | } 240 | sb.append(".mp4"); 241 | mOutputFile = sb.toString(); 242 | } 243 | 244 | private MediaExtractor mVideoExtractor = null; 245 | private MediaExtractor mAudioExtractor = null; 246 | private InputSurface mInputSurface = null; 247 | private OutputSurface mOutputSurface = null; 248 | private MediaCodec mVideoDecoder = null; 249 | private MediaCodec mAudioDecoder = null; 250 | private MediaCodec mVideoEncoder = null; 251 | private MediaCodec mAudioEncoder = null; 252 | private MediaMuxer mMuxer = null; 253 | 254 | /** 255 | * Tests encoding and subsequently decoding video from frames generated into a buffer. 256 | *

257 | * We encode several frames of a video test pattern using MediaCodec, then decode the output 258 | * with MediaCodec and do some simple checks. 259 | */ 260 | private void extractDecodeEditEncodeMux() throws Exception { 261 | // Exception that may be thrown during release. 262 | Exception exception = null; 263 | 264 | mDecoderOutputVideoFormat = null; 265 | mDecoderOutputAudioFormat = null; 266 | mEncoderOutputVideoFormat = null; 267 | mEncoderOutputAudioFormat = null; 268 | 269 | mOutputVideoTrack = -1; 270 | mOutputAudioTrack = -1; 271 | mVideoExtractorDone = false; 272 | mVideoDecoderDone = false; 273 | mVideoEncoderDone = false; 274 | mAudioExtractorDone = false; 275 | mAudioDecoderDone = false; 276 | mAudioEncoderDone = false; 277 | mPendingAudioDecoderOutputBufferIndices = new LinkedList(); 278 | mPendingAudioDecoderOutputBufferInfos = new LinkedList(); 279 | mPendingAudioEncoderInputBufferIndices = new LinkedList(); 280 | mPendingVideoEncoderOutputBufferIndices = new LinkedList(); 281 | mPendingVideoEncoderOutputBufferInfos = new LinkedList(); 282 | mPendingAudioEncoderOutputBufferIndices = new LinkedList(); 283 | mPendingAudioEncoderOutputBufferInfos = new LinkedList(); 284 | mMuxing = false; 285 | mVideoExtractedFrameCount = 0; 286 | mVideoDecodedFrameCount = 0; 287 | mVideoEncodedFrameCount = 0; 288 | mAudioExtractedFrameCount = 0; 289 | mAudioDecodedFrameCount = 0; 290 | mAudioEncodedFrameCount = 0; 291 | 292 | MediaCodecInfo videoCodecInfo = selectCodec(OUTPUT_VIDEO_MIME_TYPE); 293 | if (videoCodecInfo == null) { 294 | // Don't fail CTS if they don't have an AVC codec (not here, anyway). 295 | Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_VIDEO_MIME_TYPE); 296 | return; 297 | } 298 | if (VERBOSE) Log.d(TAG, "video found codec: " + videoCodecInfo.getName()); 299 | 300 | MediaCodecInfo audioCodecInfo = selectCodec(OUTPUT_AUDIO_MIME_TYPE); 301 | if (audioCodecInfo == null) { 302 | // Don't fail CTS if they don't have an AAC codec (not here, anyway). 303 | Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_AUDIO_MIME_TYPE); 304 | return; 305 | } 306 | if (VERBOSE) Log.d(TAG, "audio found codec: " + audioCodecInfo.getName()); 307 | 308 | try { 309 | // Creates a muxer but do not start or add tracks just yet. 310 | mMuxer = createMuxer(); 311 | 312 | if (mCopyVideo) { 313 | mVideoExtractor = createExtractor(); 314 | int videoInputTrack = getAndSelectVideoTrackIndex(mVideoExtractor); 315 | assertTrue("missing video track in test video", videoInputTrack != -1); 316 | MediaFormat inputFormat = mVideoExtractor.getTrackFormat(videoInputTrack); 317 | 318 | // We avoid the device-specific limitations on width and height by using values 319 | // that are multiples of 16, which all tested devices seem to be able to handle. 320 | MediaFormat outputVideoFormat = 321 | MediaFormat.createVideoFormat(OUTPUT_VIDEO_MIME_TYPE, mWidth, mHeight); 322 | 323 | // Set some properties. Failing to specify some of these can cause the MediaCodec 324 | // configure() call to throw an unhelpful exception. 325 | outputVideoFormat.setInteger( 326 | MediaFormat.KEY_COLOR_FORMAT, OUTPUT_VIDEO_COLOR_FORMAT); 327 | outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE); 328 | outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE); 329 | outputVideoFormat.setInteger( 330 | MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL); 331 | if (VERBOSE) Log.d(TAG, "video format: " + outputVideoFormat); 332 | 333 | // Create a MediaCodec for the desired codec, then configure it as an encoder with 334 | // our desired properties. Request a Surface to use for input. 335 | AtomicReference inputSurfaceReference = new AtomicReference(); 336 | mVideoEncoder = createVideoEncoder( 337 | videoCodecInfo, outputVideoFormat, inputSurfaceReference); 338 | mInputSurface = new InputSurface(inputSurfaceReference.get()); 339 | mInputSurface.makeCurrent(); 340 | // Create a MediaCodec for the decoder, based on the extractor's format. 341 | mOutputSurface = new OutputSurface(); 342 | mOutputSurface.changeFragmentShader(FRAGMENT_SHADER); 343 | mVideoDecoder = createVideoDecoder(inputFormat, mOutputSurface.getSurface()); 344 | mInputSurface.releaseEGLContext(); 345 | } 346 | 347 | if (mCopyAudio) { 348 | mAudioExtractor = createExtractor(); 349 | int audioInputTrack = getAndSelectAudioTrackIndex(mAudioExtractor); 350 | assertTrue("missing audio track in test video", audioInputTrack != -1); 351 | MediaFormat inputFormat = mAudioExtractor.getTrackFormat(audioInputTrack); 352 | 353 | MediaFormat outputAudioFormat = 354 | MediaFormat.createAudioFormat( 355 | OUTPUT_AUDIO_MIME_TYPE, OUTPUT_AUDIO_SAMPLE_RATE_HZ, 356 | OUTPUT_AUDIO_CHANNEL_COUNT); 357 | outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_AUDIO_BIT_RATE); 358 | outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE); 359 | 360 | // Create a MediaCodec for the desired codec, then configure it as an encoder with 361 | // our desired properties. Request a Surface to use for input. 362 | mAudioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat); 363 | // Create a MediaCodec for the decoder, based on the extractor's format. 364 | mAudioDecoder = createAudioDecoder(inputFormat); 365 | } 366 | 367 | awaitEncode(); 368 | } finally { 369 | if (VERBOSE) Log.d(TAG, "releasing extractor, decoder, encoder, and muxer"); 370 | // Try to release everything we acquired, even if one of the releases fails, in which 371 | // case we save the first exception we got and re-throw at the end (unless something 372 | // other exception has already been thrown). This guarantees the first exception thrown 373 | // is reported as the cause of the error, everything is (attempted) to be released, and 374 | // all other exceptions appear in the logs. 375 | try { 376 | if (mVideoExtractor != null) { 377 | mVideoExtractor.release(); 378 | } 379 | } catch(Exception e) { 380 | Log.e(TAG, "error while releasing videoExtractor", e); 381 | if (exception == null) { 382 | exception = e; 383 | } 384 | } 385 | try { 386 | if (mAudioExtractor != null) { 387 | mAudioExtractor.release(); 388 | } 389 | } catch(Exception e) { 390 | Log.e(TAG, "error while releasing audioExtractor", e); 391 | if (exception == null) { 392 | exception = e; 393 | } 394 | } 395 | try { 396 | if (mVideoDecoder != null) { 397 | mVideoDecoder.stop(); 398 | mVideoDecoder.release(); 399 | } 400 | } catch(Exception e) { 401 | Log.e(TAG, "error while releasing videoDecoder", e); 402 | if (exception == null) { 403 | exception = e; 404 | } 405 | } 406 | try { 407 | if (mOutputSurface != null) { 408 | mOutputSurface.release(); 409 | } 410 | } catch(Exception e) { 411 | Log.e(TAG, "error while releasing outputSurface", e); 412 | if (exception == null) { 413 | exception = e; 414 | } 415 | } 416 | try { 417 | if (mVideoEncoder != null) { 418 | mVideoEncoder.stop(); 419 | mVideoEncoder.release(); 420 | } 421 | } catch(Exception e) { 422 | Log.e(TAG, "error while releasing videoEncoder", e); 423 | if (exception == null) { 424 | exception = e; 425 | } 426 | } 427 | try { 428 | if (mAudioDecoder != null) { 429 | mAudioDecoder.stop(); 430 | mAudioDecoder.release(); 431 | } 432 | } catch(Exception e) { 433 | Log.e(TAG, "error while releasing audioDecoder", e); 434 | if (exception == null) { 435 | exception = e; 436 | } 437 | } 438 | try { 439 | if (mAudioEncoder != null) { 440 | mAudioEncoder.stop(); 441 | mAudioEncoder.release(); 442 | } 443 | } catch(Exception e) { 444 | Log.e(TAG, "error while releasing audioEncoder", e); 445 | if (exception == null) { 446 | exception = e; 447 | } 448 | } 449 | try { 450 | if (mMuxer != null) { 451 | mMuxer.stop(); 452 | mMuxer.release(); 453 | } 454 | } catch(Exception e) { 455 | Log.e(TAG, "error while releasing muxer", e); 456 | if (exception == null) { 457 | exception = e; 458 | } 459 | } 460 | try { 461 | if (mInputSurface != null) { 462 | mInputSurface.release(); 463 | } 464 | } catch(Exception e) { 465 | Log.e(TAG, "error while releasing inputSurface", e); 466 | if (exception == null) { 467 | exception = e; 468 | } 469 | } 470 | if (mVideoDecoderHandlerThread != null) { 471 | mVideoDecoderHandlerThread.quitSafely(); 472 | } 473 | mVideoExtractor = null; 474 | mAudioExtractor = null; 475 | mOutputSurface = null; 476 | mInputSurface = null; 477 | mVideoDecoder = null; 478 | mAudioDecoder = null; 479 | mVideoEncoder = null; 480 | mAudioEncoder = null; 481 | mMuxer = null; 482 | mVideoDecoderHandlerThread = null; 483 | } 484 | if (exception != null) { 485 | throw exception; 486 | } 487 | } 488 | 489 | /** 490 | * Creates an extractor that reads its frames from {@link #mSourceResId}. 491 | */ 492 | private MediaExtractor createExtractor() throws IOException { 493 | MediaExtractor extractor; 494 | AssetFileDescriptor srcFd = getContext().getResources().openRawResourceFd(mSourceResId); 495 | extractor = new MediaExtractor(); 496 | extractor.setDataSource(srcFd.getFileDescriptor(), srcFd.getStartOffset(), 497 | srcFd.getLength()); 498 | return extractor; 499 | } 500 | 501 | static class CallbackHandler extends Handler { 502 | CallbackHandler(Looper l) { 503 | super(l); 504 | } 505 | private MediaCodec mCodec; 506 | private boolean mEncoder; 507 | private MediaCodec.Callback mCallback; 508 | private String mMime; 509 | private boolean mSetDone; 510 | @Override 511 | public void handleMessage(Message msg) { 512 | try { 513 | mCodec = mEncoder ? MediaCodec.createEncoderByType(mMime) : MediaCodec.createDecoderByType(mMime); 514 | } catch (IOException ioe) { 515 | } 516 | mCodec.setCallback(mCallback); 517 | synchronized (this) { 518 | mSetDone = true; 519 | notifyAll(); 520 | } 521 | } 522 | void create(boolean encoder, String mime, MediaCodec.Callback callback) { 523 | mEncoder = encoder; 524 | mMime = mime; 525 | mCallback = callback; 526 | mSetDone = false; 527 | sendEmptyMessage(0); 528 | synchronized (this) { 529 | while (!mSetDone) { 530 | try { 531 | wait(); 532 | } catch (InterruptedException ie) { 533 | } 534 | } 535 | } 536 | } 537 | MediaCodec getCodec() { 538 | return mCodec; 539 | } 540 | } 541 | private HandlerThread mVideoDecoderHandlerThread; 542 | private CallbackHandler mVideoDecoderHandler; 543 | 544 | /** 545 | * Creates a decoder for the given format, which outputs to the given surface. 546 | * 547 | * @param inputFormat the format of the stream to decode 548 | * @param surface into which to decode the frames 549 | */ 550 | private MediaCodec createVideoDecoder(MediaFormat inputFormat, Surface surface) throws IOException { 551 | mVideoDecoderHandlerThread = new HandlerThread("DecoderThread"); 552 | mVideoDecoderHandlerThread.start(); 553 | mVideoDecoderHandler = new CallbackHandler(mVideoDecoderHandlerThread.getLooper()); 554 | MediaCodec.Callback callback = new MediaCodec.Callback() { 555 | public void onError(MediaCodec codec, MediaCodec.CodecException exception) { 556 | } 557 | public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 558 | mDecoderOutputVideoFormat = codec.getOutputFormat(); 559 | if (VERBOSE) { 560 | Log.d(TAG, "video decoder: output format changed: " 561 | + mDecoderOutputVideoFormat); 562 | } 563 | } 564 | public void onInputBufferAvailable(MediaCodec codec, int index) { 565 | // Extract video from file and feed to decoder. 566 | // We feed packets regardless of whether the muxer is set up or not. 567 | // If the muxer isn't set up yet, the encoder output will be queued up, 568 | // finally blocking the decoder as well. 569 | ByteBuffer decoderInputBuffer = codec.getInputBuffer(index); 570 | while (!mVideoExtractorDone) { 571 | int size = mVideoExtractor.readSampleData(decoderInputBuffer, 0); 572 | long presentationTime = mVideoExtractor.getSampleTime(); 573 | if (VERBOSE) { 574 | Log.d(TAG, "video extractor: returned buffer of size " + size); 575 | Log.d(TAG, "video extractor: returned buffer for time " + presentationTime); 576 | } 577 | if (size >= 0) { 578 | codec.queueInputBuffer( 579 | index, 580 | 0, 581 | size, 582 | presentationTime, 583 | mVideoExtractor.getSampleFlags()); 584 | } 585 | mVideoExtractorDone = !mVideoExtractor.advance(); 586 | if (mVideoExtractorDone) { 587 | if (VERBOSE) Log.d(TAG, "video extractor: EOS"); 588 | codec.queueInputBuffer( 589 | index, 590 | 0, 591 | 0, 592 | 0, 593 | MediaCodec.BUFFER_FLAG_END_OF_STREAM); 594 | } 595 | mVideoExtractedFrameCount++; 596 | logState(); 597 | if (size >= 0) 598 | break; 599 | } 600 | } 601 | public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { 602 | if (VERBOSE) { 603 | Log.d(TAG, "video decoder: returned output buffer: " + index); 604 | Log.d(TAG, "video decoder: returned buffer of size " + info.size); 605 | } 606 | if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 607 | if (VERBOSE) Log.d(TAG, "video decoder: codec config buffer"); 608 | codec.releaseOutputBuffer(index, false); 609 | return; 610 | } 611 | if (VERBOSE) { 612 | Log.d(TAG, "video decoder: returned buffer for time " 613 | + info.presentationTimeUs); 614 | } 615 | boolean render = info.size != 0; 616 | codec.releaseOutputBuffer(index, render); 617 | if (render) { 618 | mInputSurface.makeCurrent(); 619 | if (VERBOSE) Log.d(TAG, "output surface: await new image"); 620 | mOutputSurface.awaitNewImage(); 621 | // Edit the frame and send it to the encoder. 622 | if (VERBOSE) Log.d(TAG, "output surface: draw image"); 623 | mOutputSurface.drawImage(); 624 | mInputSurface.setPresentationTime( 625 | info.presentationTimeUs * 1000); 626 | if (VERBOSE) Log.d(TAG, "input surface: swap buffers"); 627 | mInputSurface.swapBuffers(); 628 | if (VERBOSE) Log.d(TAG, "video encoder: notified of new frame"); 629 | mInputSurface.releaseEGLContext(); 630 | } 631 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 632 | if (VERBOSE) Log.d(TAG, "video decoder: EOS"); 633 | mVideoDecoderDone = true; 634 | mVideoEncoder.signalEndOfInputStream(); 635 | } 636 | mVideoDecodedFrameCount++; 637 | logState(); 638 | } 639 | }; 640 | // Create the decoder on a different thread, in order to have the callbacks there. 641 | // This makes sure that the blocking waiting and rendering in onOutputBufferAvailable 642 | // won't block other callbacks (e.g. blocking encoder output callbacks), which 643 | // would otherwise lead to the transcoding pipeline to lock up. 644 | 645 | // Since API 23, we could just do setCallback(callback, mVideoDecoderHandler) instead 646 | // of using a custom Handler and passing a message to create the MediaCodec there. 647 | 648 | // When the callbacks are received on a different thread, the updating of the variables 649 | // that are used for state logging (mVideoExtractedFrameCount, mVideoDecodedFrameCount, 650 | // mVideoExtractorDone and mVideoDecoderDone) should ideally be synchronized properly 651 | // against accesses from other threads, but that is left out for brevity since it's 652 | // not essential to the actual transcoding. 653 | mVideoDecoderHandler.create(false, getMimeTypeFor(inputFormat), callback); 654 | MediaCodec decoder = mVideoDecoderHandler.getCodec(); 655 | decoder.configure(inputFormat, surface, null, 0); 656 | decoder.start(); 657 | return decoder; 658 | } 659 | 660 | /** 661 | * Creates an encoder for the given format using the specified codec, taking input from a 662 | * surface. 663 | * 664 | *

The surface to use as input is stored in the given reference. 665 | * 666 | * @param codecInfo of the codec to use 667 | * @param format of the stream to be produced 668 | * @param surfaceReference to store the surface to use as input 669 | */ 670 | private MediaCodec createVideoEncoder( 671 | MediaCodecInfo codecInfo, 672 | MediaFormat format, 673 | AtomicReference surfaceReference) throws IOException { 674 | MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName()); 675 | encoder.setCallback(new MediaCodec.Callback() { 676 | public void onError(MediaCodec codec, MediaCodec.CodecException exception) { 677 | } 678 | public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 679 | if (VERBOSE) Log.d(TAG, "video encoder: output format changed"); 680 | if (mOutputVideoTrack >= 0) { 681 | fail("video encoder changed its output format again?"); 682 | } 683 | mEncoderOutputVideoFormat = codec.getOutputFormat(); 684 | setupMuxer(); 685 | } 686 | public void onInputBufferAvailable(MediaCodec codec, int index) { 687 | } 688 | public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { 689 | if (VERBOSE) { 690 | Log.d(TAG, "video encoder: returned output buffer: " + index); 691 | Log.d(TAG, "video encoder: returned buffer of size " + info.size); 692 | } 693 | muxVideo(index, info); 694 | } 695 | }); 696 | encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 697 | // Must be called before start() is. 698 | surfaceReference.set(encoder.createInputSurface()); 699 | encoder.start(); 700 | return encoder; 701 | } 702 | 703 | /** 704 | * Creates a decoder for the given format. 705 | * 706 | * @param inputFormat the format of the stream to decode 707 | */ 708 | private MediaCodec createAudioDecoder(MediaFormat inputFormat) throws IOException { 709 | MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat)); 710 | decoder.setCallback(new MediaCodec.Callback() { 711 | public void onError(MediaCodec codec, MediaCodec.CodecException exception) { 712 | } 713 | public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 714 | mDecoderOutputAudioFormat = codec.getOutputFormat(); 715 | if (VERBOSE) { 716 | Log.d(TAG, "audio decoder: output format changed: " 717 | + mDecoderOutputAudioFormat); 718 | } 719 | } 720 | public void onInputBufferAvailable(MediaCodec codec, int index) { 721 | ByteBuffer decoderInputBuffer = codec.getInputBuffer(index); 722 | while (!mAudioExtractorDone) { 723 | int size = mAudioExtractor.readSampleData(decoderInputBuffer, 0); 724 | long presentationTime = mAudioExtractor.getSampleTime(); 725 | if (VERBOSE) { 726 | Log.d(TAG, "audio extractor: returned buffer of size " + size); 727 | Log.d(TAG, "audio extractor: returned buffer for time " + presentationTime); 728 | } 729 | if (size >= 0) { 730 | codec.queueInputBuffer( 731 | index, 732 | 0, 733 | size, 734 | presentationTime, 735 | mAudioExtractor.getSampleFlags()); 736 | } 737 | mAudioExtractorDone = !mAudioExtractor.advance(); 738 | if (mAudioExtractorDone) { 739 | if (VERBOSE) Log.d(TAG, "audio extractor: EOS"); 740 | codec.queueInputBuffer( 741 | index, 742 | 0, 743 | 0, 744 | 0, 745 | MediaCodec.BUFFER_FLAG_END_OF_STREAM); 746 | } 747 | mAudioExtractedFrameCount++; 748 | logState(); 749 | if (size >= 0) 750 | break; 751 | } 752 | } 753 | public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { 754 | if (VERBOSE) { 755 | Log.d(TAG, "audio decoder: returned output buffer: " + index); 756 | } 757 | if (VERBOSE) { 758 | Log.d(TAG, "audio decoder: returned buffer of size " + info.size); 759 | } 760 | ByteBuffer decoderOutputBuffer = codec.getOutputBuffer(index); 761 | if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 762 | if (VERBOSE) Log.d(TAG, "audio decoder: codec config buffer"); 763 | codec.releaseOutputBuffer(index, false); 764 | return; 765 | } 766 | if (VERBOSE) { 767 | Log.d(TAG, "audio decoder: returned buffer for time " 768 | + info.presentationTimeUs); 769 | } 770 | mPendingAudioDecoderOutputBufferIndices.add(index); 771 | mPendingAudioDecoderOutputBufferInfos.add(info); 772 | mAudioDecodedFrameCount++; 773 | logState(); 774 | tryEncodeAudio(); 775 | } 776 | }); 777 | decoder.configure(inputFormat, null, null, 0); 778 | decoder.start(); 779 | return decoder; 780 | } 781 | 782 | /** 783 | * Creates an encoder for the given format using the specified codec. 784 | * 785 | * @param codecInfo of the codec to use 786 | * @param format of the stream to be produced 787 | */ 788 | private MediaCodec createAudioEncoder(MediaCodecInfo codecInfo, MediaFormat format) throws IOException { 789 | MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName()); 790 | encoder.setCallback(new MediaCodec.Callback() { 791 | public void onError(MediaCodec codec, MediaCodec.CodecException exception) { 792 | } 793 | public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 794 | if (VERBOSE) Log.d(TAG, "audio encoder: output format changed"); 795 | if (mOutputAudioTrack >= 0) { 796 | fail("audio encoder changed its output format again?"); 797 | } 798 | 799 | mEncoderOutputAudioFormat = codec.getOutputFormat(); 800 | setupMuxer(); 801 | } 802 | public void onInputBufferAvailable(MediaCodec codec, int index) { 803 | if (VERBOSE) { 804 | Log.d(TAG, "audio encoder: returned input buffer: " + index); 805 | } 806 | mPendingAudioEncoderInputBufferIndices.add(index); 807 | tryEncodeAudio(); 808 | } 809 | public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { 810 | if (VERBOSE) { 811 | Log.d(TAG, "audio encoder: returned output buffer: " + index); 812 | Log.d(TAG, "audio encoder: returned buffer of size " + info.size); 813 | } 814 | muxAudio(index, info); 815 | } 816 | }); 817 | encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 818 | encoder.start(); 819 | return encoder; 820 | } 821 | 822 | // No need to have synchronization around this, since both audio encoder and 823 | // decoder callbacks are on the same thread. 824 | private void tryEncodeAudio() { 825 | if (mPendingAudioEncoderInputBufferIndices.size() == 0 || 826 | mPendingAudioDecoderOutputBufferIndices.size() == 0) 827 | return; 828 | int decoderIndex = mPendingAudioDecoderOutputBufferIndices.poll(); 829 | int encoderIndex = mPendingAudioEncoderInputBufferIndices.poll(); 830 | MediaCodec.BufferInfo info = mPendingAudioDecoderOutputBufferInfos.poll(); 831 | 832 | ByteBuffer encoderInputBuffer = mAudioEncoder.getInputBuffer(encoderIndex); 833 | int size = info.size; 834 | long presentationTime = info.presentationTimeUs; 835 | if (VERBOSE) { 836 | Log.d(TAG, "audio decoder: processing pending buffer: " 837 | + decoderIndex); 838 | } 839 | if (VERBOSE) { 840 | Log.d(TAG, "audio decoder: pending buffer of size " + size); 841 | Log.d(TAG, "audio decoder: pending buffer for time " + presentationTime); 842 | } 843 | if (size >= 0) { 844 | ByteBuffer decoderOutputBuffer = mAudioDecoder.getOutputBuffer(decoderIndex).duplicate(); 845 | decoderOutputBuffer.position(info.offset); 846 | decoderOutputBuffer.limit(info.offset + size); 847 | encoderInputBuffer.position(0); 848 | encoderInputBuffer.put(decoderOutputBuffer); 849 | 850 | mAudioEncoder.queueInputBuffer( 851 | encoderIndex, 852 | 0, 853 | size, 854 | presentationTime, 855 | info.flags); 856 | } 857 | mAudioDecoder.releaseOutputBuffer(decoderIndex, false); 858 | if ((info.flags 859 | & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 860 | if (VERBOSE) Log.d(TAG, "audio decoder: EOS"); 861 | mAudioDecoderDone = true; 862 | } 863 | logState(); 864 | } 865 | 866 | private void setupMuxer() { 867 | if (!mMuxing 868 | && (!mCopyAudio || mEncoderOutputAudioFormat != null) 869 | && (!mCopyVideo || mEncoderOutputVideoFormat != null)) { 870 | if (mCopyVideo) { 871 | Log.d(TAG, "muxer: adding video track."); 872 | mOutputVideoTrack = mMuxer.addTrack(mEncoderOutputVideoFormat); 873 | } 874 | if (mCopyAudio) { 875 | Log.d(TAG, "muxer: adding audio track."); 876 | mOutputAudioTrack = mMuxer.addTrack(mEncoderOutputAudioFormat); 877 | } 878 | Log.d(TAG, "muxer: starting"); 879 | mMuxer.start(); 880 | mMuxing = true; 881 | 882 | MediaCodec.BufferInfo info; 883 | while ((info = mPendingVideoEncoderOutputBufferInfos.poll()) != null) { 884 | int index = mPendingVideoEncoderOutputBufferIndices.poll().intValue(); 885 | muxVideo(index, info); 886 | } 887 | while ((info = mPendingAudioEncoderOutputBufferInfos.poll()) != null) { 888 | int index = mPendingAudioEncoderOutputBufferIndices.poll().intValue(); 889 | muxAudio(index, info); 890 | } 891 | } 892 | } 893 | 894 | private void muxVideo(int index, MediaCodec.BufferInfo info) { 895 | if (!mMuxing) { 896 | mPendingVideoEncoderOutputBufferIndices.add(new Integer(index)); 897 | mPendingVideoEncoderOutputBufferInfos.add(info); 898 | return; 899 | } 900 | ByteBuffer encoderOutputBuffer = mVideoEncoder.getOutputBuffer(index); 901 | if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 902 | if (VERBOSE) Log.d(TAG, "video encoder: codec config buffer"); 903 | // Simply ignore codec config buffers. 904 | mVideoEncoder.releaseOutputBuffer(index, false); 905 | return; 906 | } 907 | if (VERBOSE) { 908 | Log.d(TAG, "video encoder: returned buffer for time " 909 | + info.presentationTimeUs); 910 | } 911 | if (info.size != 0) { 912 | mMuxer.writeSampleData( 913 | mOutputVideoTrack, encoderOutputBuffer, info); 914 | } 915 | mVideoEncoder.releaseOutputBuffer(index, false); 916 | mVideoEncodedFrameCount++; 917 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 918 | if (VERBOSE) Log.d(TAG, "video encoder: EOS"); 919 | synchronized (this) { 920 | mVideoEncoderDone = true; 921 | notifyAll(); 922 | } 923 | } 924 | logState(); 925 | } 926 | private void muxAudio(int index, MediaCodec.BufferInfo info) { 927 | if (!mMuxing) { 928 | mPendingAudioEncoderOutputBufferIndices.add(new Integer(index)); 929 | mPendingAudioEncoderOutputBufferInfos.add(info); 930 | return; 931 | } 932 | ByteBuffer encoderOutputBuffer = mAudioEncoder.getOutputBuffer(index); 933 | if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 934 | if (VERBOSE) Log.d(TAG, "audio encoder: codec config buffer"); 935 | // Simply ignore codec config buffers. 936 | mAudioEncoder.releaseOutputBuffer(index, false); 937 | return; 938 | } 939 | if (VERBOSE) { 940 | Log.d(TAG, "audio encoder: returned buffer for time " + info.presentationTimeUs); 941 | } 942 | if (info.size != 0) { 943 | mMuxer.writeSampleData( 944 | mOutputAudioTrack, encoderOutputBuffer, info); 945 | } 946 | mAudioEncoder.releaseOutputBuffer(index, false); 947 | mAudioEncodedFrameCount++; 948 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 949 | if (VERBOSE) Log.d(TAG, "audio encoder: EOS"); 950 | synchronized (this) { 951 | mAudioEncoderDone = true; 952 | notifyAll(); 953 | } 954 | } 955 | logState(); 956 | } 957 | 958 | /** 959 | * Creates a muxer to write the encoded frames. 960 | * 961 | *

The muxer is not started as it needs to be started only after all streams have been added. 962 | */ 963 | private MediaMuxer createMuxer() throws IOException { 964 | return new MediaMuxer(mOutputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 965 | } 966 | 967 | private int getAndSelectVideoTrackIndex(MediaExtractor extractor) { 968 | for (int index = 0; index < extractor.getTrackCount(); ++index) { 969 | if (VERBOSE) { 970 | Log.d(TAG, "format for track " + index + " is " 971 | + getMimeTypeFor(extractor.getTrackFormat(index))); 972 | } 973 | if (isVideoFormat(extractor.getTrackFormat(index))) { 974 | extractor.selectTrack(index); 975 | return index; 976 | } 977 | } 978 | return -1; 979 | } 980 | 981 | private int getAndSelectAudioTrackIndex(MediaExtractor extractor) { 982 | for (int index = 0; index < extractor.getTrackCount(); ++index) { 983 | if (VERBOSE) { 984 | Log.d(TAG, "format for track " + index + " is " 985 | + getMimeTypeFor(extractor.getTrackFormat(index))); 986 | } 987 | if (isAudioFormat(extractor.getTrackFormat(index))) { 988 | extractor.selectTrack(index); 989 | return index; 990 | } 991 | } 992 | return -1; 993 | } 994 | 995 | // We will get these from the decoders when notified of a format change. 996 | private MediaFormat mDecoderOutputVideoFormat = null; 997 | private MediaFormat mDecoderOutputAudioFormat = null; 998 | // We will get these from the encoders when notified of a format change. 999 | private MediaFormat mEncoderOutputVideoFormat = null; 1000 | private MediaFormat mEncoderOutputAudioFormat = null; 1001 | 1002 | // We will determine these once we have the output format. 1003 | private int mOutputVideoTrack = -1; 1004 | private int mOutputAudioTrack = -1; 1005 | // Whether things are done on the video side. 1006 | private boolean mVideoExtractorDone = false; 1007 | private boolean mVideoDecoderDone = false; 1008 | private boolean mVideoEncoderDone = false; 1009 | // Whether things are done on the audio side. 1010 | private boolean mAudioExtractorDone = false; 1011 | private boolean mAudioDecoderDone = false; 1012 | private boolean mAudioEncoderDone = false; 1013 | private LinkedList mPendingAudioDecoderOutputBufferIndices; 1014 | private LinkedList mPendingAudioDecoderOutputBufferInfos; 1015 | private LinkedList mPendingAudioEncoderInputBufferIndices; 1016 | 1017 | private LinkedList mPendingVideoEncoderOutputBufferIndices; 1018 | private LinkedList mPendingVideoEncoderOutputBufferInfos; 1019 | private LinkedList mPendingAudioEncoderOutputBufferIndices; 1020 | private LinkedList mPendingAudioEncoderOutputBufferInfos; 1021 | 1022 | private boolean mMuxing = false; 1023 | 1024 | private int mVideoExtractedFrameCount = 0; 1025 | private int mVideoDecodedFrameCount = 0; 1026 | private int mVideoEncodedFrameCount = 0; 1027 | 1028 | private int mAudioExtractedFrameCount = 0; 1029 | private int mAudioDecodedFrameCount = 0; 1030 | private int mAudioEncodedFrameCount = 0; 1031 | 1032 | private void logState() { 1033 | if (VERBOSE) { 1034 | Log.d(TAG, String.format( 1035 | "loop: " 1036 | 1037 | + "V(%b){" 1038 | + "extracted:%d(done:%b) " 1039 | + "decoded:%d(done:%b) " 1040 | + "encoded:%d(done:%b)} " 1041 | 1042 | + "A(%b){" 1043 | + "extracted:%d(done:%b) " 1044 | + "decoded:%d(done:%b) " 1045 | + "encoded:%d(done:%b) " 1046 | 1047 | + "muxing:%b(V:%d,A:%d)", 1048 | 1049 | mCopyVideo, 1050 | mVideoExtractedFrameCount, mVideoExtractorDone, 1051 | mVideoDecodedFrameCount, mVideoDecoderDone, 1052 | mVideoEncodedFrameCount, mVideoEncoderDone, 1053 | 1054 | mCopyAudio, 1055 | mAudioExtractedFrameCount, mAudioExtractorDone, 1056 | mAudioDecodedFrameCount, mAudioDecoderDone, 1057 | mAudioEncodedFrameCount, mAudioEncoderDone, 1058 | 1059 | mMuxing, mOutputVideoTrack, mOutputAudioTrack)); 1060 | } 1061 | } 1062 | 1063 | private void awaitEncode() { 1064 | synchronized (this) { 1065 | while ((mCopyVideo && !mVideoEncoderDone) || (mCopyAudio && !mAudioEncoderDone)) { 1066 | try { 1067 | wait(); 1068 | } catch (InterruptedException ie) { 1069 | } 1070 | } 1071 | } 1072 | 1073 | // Basic sanity checks. 1074 | if (mCopyVideo) { 1075 | assertEquals("encoded and decoded video frame counts should match", 1076 | mVideoDecodedFrameCount, mVideoEncodedFrameCount); 1077 | assertTrue("decoded frame count should be less than extracted frame count", 1078 | mVideoDecodedFrameCount <= mVideoExtractedFrameCount); 1079 | } 1080 | if (mCopyAudio) { 1081 | assertEquals("no frame should be pending", 0, mPendingAudioDecoderOutputBufferIndices.size()); 1082 | } 1083 | 1084 | // TODO: Check the generated output file. 1085 | } 1086 | 1087 | private static boolean isVideoFormat(MediaFormat format) { 1088 | return getMimeTypeFor(format).startsWith("video/"); 1089 | } 1090 | 1091 | private static boolean isAudioFormat(MediaFormat format) { 1092 | return getMimeTypeFor(format).startsWith("audio/"); 1093 | } 1094 | 1095 | private static String getMimeTypeFor(MediaFormat format) { 1096 | return format.getString(MediaFormat.KEY_MIME); 1097 | } 1098 | 1099 | /** 1100 | * Returns the first codec capable of encoding the specified MIME type, or null if no match was 1101 | * found. 1102 | */ 1103 | private static MediaCodecInfo selectCodec(String mimeType) { 1104 | int numCodecs = MediaCodecList.getCodecCount(); 1105 | for (int i = 0; i < numCodecs; i++) { 1106 | MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); 1107 | 1108 | if (!codecInfo.isEncoder()) { 1109 | continue; 1110 | } 1111 | 1112 | String[] types = codecInfo.getSupportedTypes(); 1113 | for (int j = 0; j < types.length; j++) { 1114 | if (types[j].equalsIgnoreCase(mimeType)) { 1115 | return codecInfo; 1116 | } 1117 | } 1118 | } 1119 | return null; 1120 | } 1121 | 1122 | } 1123 | -------------------------------------------------------------------------------- /src/com/example/decodeencodetest/InputSurface.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2013 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.decodeencodetest; 18 | 19 | import android.opengl.EGL14; 20 | import android.opengl.EGLExt; 21 | import android.opengl.EGLConfig; 22 | import android.opengl.EGLContext; 23 | import android.opengl.EGLDisplay; 24 | import android.opengl.EGLSurface; 25 | import android.util.Log; 26 | import android.view.Surface; 27 | 28 | 29 | /** 30 | * Holds state associated with a Surface used for MediaCodec encoder input. 31 | *

32 | * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that 33 | * to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent 34 | * to the video encoder. 35 | */ 36 | class InputSurface { 37 | private static final String TAG = "InputSurface"; 38 | private static final boolean VERBOSE = false; 39 | 40 | private static final int EGL_RECORDABLE_ANDROID = 0x3142; 41 | private static final int EGL_OPENGL_ES2_BIT = 4; 42 | 43 | private EGLDisplay mEGLDisplay; 44 | private EGLContext mEGLContext; 45 | private EGLSurface mEGLSurface; 46 | 47 | private Surface mSurface; 48 | 49 | /** 50 | * Creates an InputSurface from a Surface. 51 | */ 52 | public InputSurface(Surface surface) { 53 | if (surface == null) { 54 | throw new NullPointerException(); 55 | } 56 | mSurface = surface; 57 | 58 | eglSetup(); 59 | } 60 | 61 | /** 62 | * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording. 63 | */ 64 | private void eglSetup() { 65 | mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); 66 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { 67 | throw new RuntimeException("unable to get EGL14 display"); 68 | } 69 | int[] version = new int[2]; 70 | if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { 71 | mEGLDisplay = null; 72 | throw new RuntimeException("unable to initialize EGL14"); 73 | } 74 | 75 | // Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits 76 | // to be able to tell if the frame is reasonable. 77 | int[] attribList = { 78 | EGL14.EGL_RED_SIZE, 8, 79 | EGL14.EGL_GREEN_SIZE, 8, 80 | EGL14.EGL_BLUE_SIZE, 8, 81 | EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, 82 | EGL_RECORDABLE_ANDROID, 1, 83 | EGL14.EGL_NONE 84 | }; 85 | EGLConfig[] configs = new EGLConfig[1]; 86 | int[] numConfigs = new int[1]; 87 | if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, 88 | numConfigs, 0)) { 89 | throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config"); 90 | } 91 | 92 | // Configure context for OpenGL ES 2.0. 93 | int[] attrib_list = { 94 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 95 | EGL14.EGL_NONE 96 | }; 97 | mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, 98 | attrib_list, 0); 99 | checkEglError("eglCreateContext"); 100 | if (mEGLContext == null) { 101 | throw new RuntimeException("null context"); 102 | } 103 | 104 | // Create a window surface, and attach it to the Surface we received. 105 | int[] surfaceAttribs = { 106 | EGL14.EGL_NONE 107 | }; 108 | mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface, 109 | surfaceAttribs, 0); 110 | checkEglError("eglCreateWindowSurface"); 111 | if (mEGLSurface == null) { 112 | throw new RuntimeException("surface was null"); 113 | } 114 | } 115 | 116 | /** 117 | * Discard all resources held by this class, notably the EGL context. Also releases the 118 | * Surface that was passed to our constructor. 119 | */ 120 | public void release() { 121 | if (EGL14.eglGetCurrentContext().equals(mEGLContext)) { 122 | // Clear the current context and surface to ensure they are discarded immediately. 123 | EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, 124 | EGL14.EGL_NO_CONTEXT); 125 | } 126 | EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface); 127 | EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); 128 | //EGL14.eglTerminate(mEGLDisplay); 129 | 130 | mSurface.release(); 131 | 132 | // null everything out so future attempts to use this object will cause an NPE 133 | mEGLDisplay = null; 134 | mEGLContext = null; 135 | mEGLSurface = null; 136 | 137 | mSurface = null; 138 | } 139 | 140 | /** 141 | * Makes our EGL context and surface current. 142 | */ 143 | public void makeCurrent() { 144 | if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { 145 | throw new RuntimeException("eglMakeCurrent failed"); 146 | } 147 | } 148 | 149 | public void releaseEGLContext() { 150 | if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) { 151 | throw new RuntimeException("eglMakeCurrent failed"); 152 | } 153 | } 154 | 155 | /** 156 | * Calls eglSwapBuffers. Use this to "publish" the current frame. 157 | */ 158 | public boolean swapBuffers() { 159 | return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface); 160 | } 161 | 162 | /** 163 | * Returns the Surface that the MediaCodec receives buffers from. 164 | */ 165 | public Surface getSurface() { 166 | return mSurface; 167 | } 168 | 169 | /** 170 | * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. 171 | */ 172 | public void setPresentationTime(long nsecs) { 173 | EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs); 174 | } 175 | 176 | /** 177 | * Checks for EGL errors. 178 | */ 179 | private void checkEglError(String msg) { 180 | boolean failed = false; 181 | int error; 182 | while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { 183 | Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error)); 184 | failed = true; 185 | } 186 | if (failed) { 187 | throw new RuntimeException("EGL error encountered (see log)"); 188 | } 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /src/com/example/decodeencodetest/MainActivity.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2016 Martin Storsjo 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.decodeencodetest; 18 | 19 | import android.os.Bundle; 20 | import android.app.Activity; 21 | 22 | public class MainActivity extends Activity { 23 | @Override 24 | protected void onCreate(Bundle savedInstanceState) { 25 | super.onCreate(savedInstanceState); 26 | setContentView(R.layout.activity_main); 27 | new Thread() { 28 | public void run() { 29 | ExtractDecodeEditEncodeMuxTest test = new ExtractDecodeEditEncodeMuxTest(); 30 | test.setContext(MainActivity.this); 31 | try { 32 | test.testExtractDecodeEditEncodeMuxAudioVideo(); 33 | } catch (Throwable t) { 34 | t.printStackTrace(); 35 | } 36 | } 37 | }.start(); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/com/example/decodeencodetest/OutputSurface.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2013 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.decodeencodetest; 18 | 19 | import android.graphics.SurfaceTexture; 20 | import android.opengl.EGL14; 21 | import android.opengl.GLES20; 22 | import android.opengl.GLES11Ext; 23 | import android.opengl.GLSurfaceView; 24 | import android.opengl.Matrix; 25 | import android.util.Log; 26 | import android.view.Surface; 27 | 28 | import java.nio.ByteBuffer; 29 | 30 | import javax.microedition.khronos.egl.EGL10; 31 | import javax.microedition.khronos.egl.EGLConfig; 32 | import javax.microedition.khronos.egl.EGLContext; 33 | import javax.microedition.khronos.egl.EGLDisplay; 34 | import javax.microedition.khronos.egl.EGLSurface; 35 | import javax.microedition.khronos.opengles.GL; 36 | import javax.microedition.khronos.opengles.GL10; 37 | 38 | 39 | 40 | /** 41 | * Holds state associated with a Surface used for MediaCodec decoder output. 42 | *

43 | * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture, 44 | * and then create a Surface for that SurfaceTexture. The Surface can be passed to 45 | * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the 46 | * texture with updateTexImage, then render the texture with GL to a pbuffer. 47 | *

48 | * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer. 49 | * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives 50 | * we just draw it on whatever surface is current. 51 | *

52 | * By default, the Surface will be using a BufferQueue in asynchronous mode, so we 53 | * can potentially drop frames. 54 | */ 55 | class OutputSurface implements SurfaceTexture.OnFrameAvailableListener { 56 | private static final String TAG = "OutputSurface"; 57 | private static final boolean VERBOSE = false; 58 | 59 | private static final int EGL_OPENGL_ES2_BIT = 4; 60 | 61 | private EGL10 mEGL; 62 | private EGLDisplay mEGLDisplay; 63 | private EGLContext mEGLContext; 64 | private EGLSurface mEGLSurface; 65 | 66 | private SurfaceTexture mSurfaceTexture; 67 | private Surface mSurface; 68 | 69 | private Object mFrameSyncObject = new Object(); // guards mFrameAvailable 70 | private boolean mFrameAvailable; 71 | 72 | private TextureRender mTextureRender; 73 | 74 | /** 75 | * Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new 76 | * EGL context and surface will be made current. Creates a Surface that can be passed 77 | * to MediaCodec.configure(). 78 | */ 79 | public OutputSurface(int width, int height) { 80 | if (width <= 0 || height <= 0) { 81 | throw new IllegalArgumentException(); 82 | } 83 | 84 | eglSetup(width, height); 85 | makeCurrent(); 86 | 87 | setup(); 88 | } 89 | 90 | /** 91 | * Creates an OutputSurface using the current EGL context. Creates a Surface that can be 92 | * passed to MediaCodec.configure(). 93 | */ 94 | public OutputSurface() { 95 | setup(); 96 | } 97 | 98 | /** 99 | * Creates instances of TextureRender and SurfaceTexture, and a Surface associated 100 | * with the SurfaceTexture. 101 | */ 102 | private void setup() { 103 | mTextureRender = new TextureRender(); 104 | mTextureRender.surfaceCreated(); 105 | 106 | // Even if we don't access the SurfaceTexture after the constructor returns, we 107 | // still need to keep a reference to it. The Surface doesn't retain a reference 108 | // at the Java level, so if we don't either then the object can get GCed, which 109 | // causes the native finalizer to run. 110 | if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId()); 111 | mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId()); 112 | 113 | // This doesn't work if OutputSurface is created on the thread that CTS started for 114 | // these test cases. 115 | // 116 | // The CTS-created thread has a Looper, and the SurfaceTexture constructor will 117 | // create a Handler that uses it. The "frame available" message is delivered 118 | // there, but since we're not a Looper-based thread we'll never see it. For 119 | // this to do anything useful, OutputSurface must be created on a thread without 120 | // a Looper, so that SurfaceTexture uses the main application Looper instead. 121 | // 122 | // Java language note: passing "this" out of a constructor is generally unwise, 123 | // but we should be able to get away with it here. 124 | mSurfaceTexture.setOnFrameAvailableListener(this); 125 | 126 | mSurface = new Surface(mSurfaceTexture); 127 | } 128 | 129 | /** 130 | * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer. 131 | */ 132 | private void eglSetup(int width, int height) { 133 | mEGL = (EGL10)EGLContext.getEGL(); 134 | mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 135 | if (!mEGL.eglInitialize(mEGLDisplay, null)) { 136 | throw new RuntimeException("unable to initialize EGL10"); 137 | } 138 | 139 | // Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits 140 | // to be able to tell if the frame is reasonable. 141 | int[] attribList = { 142 | EGL10.EGL_RED_SIZE, 8, 143 | EGL10.EGL_GREEN_SIZE, 8, 144 | EGL10.EGL_BLUE_SIZE, 8, 145 | EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, 146 | EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, 147 | EGL10.EGL_NONE 148 | }; 149 | EGLConfig[] configs = new EGLConfig[1]; 150 | int[] numConfigs = new int[1]; 151 | if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) { 152 | throw new RuntimeException("unable to find RGB888+pbuffer EGL config"); 153 | } 154 | 155 | // Configure context for OpenGL ES 2.0. 156 | int[] attrib_list = { 157 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 158 | EGL10.EGL_NONE 159 | }; 160 | mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT, 161 | attrib_list); 162 | checkEglError("eglCreateContext"); 163 | if (mEGLContext == null) { 164 | throw new RuntimeException("null context"); 165 | } 166 | 167 | // Create a pbuffer surface. By using this for output, we can use glReadPixels 168 | // to test values in the output. 169 | int[] surfaceAttribs = { 170 | EGL10.EGL_WIDTH, width, 171 | EGL10.EGL_HEIGHT, height, 172 | EGL10.EGL_NONE 173 | }; 174 | mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs); 175 | checkEglError("eglCreatePbufferSurface"); 176 | if (mEGLSurface == null) { 177 | throw new RuntimeException("surface was null"); 178 | } 179 | } 180 | 181 | /** 182 | * Discard all resources held by this class, notably the EGL context. 183 | */ 184 | public void release() { 185 | if (mEGL != null) { 186 | if (mEGL.eglGetCurrentContext().equals(mEGLContext)) { 187 | // Clear the current context and surface to ensure they are discarded immediately. 188 | mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, 189 | EGL10.EGL_NO_CONTEXT); 190 | } 191 | mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface); 192 | mEGL.eglDestroyContext(mEGLDisplay, mEGLContext); 193 | //mEGL.eglTerminate(mEGLDisplay); 194 | } 195 | 196 | mSurface.release(); 197 | 198 | // this causes a bunch of warnings that appear harmless but might confuse someone: 199 | // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned! 200 | //mSurfaceTexture.release(); 201 | 202 | // null everything out so future attempts to use this object will cause an NPE 203 | mEGLDisplay = null; 204 | mEGLContext = null; 205 | mEGLSurface = null; 206 | mEGL = null; 207 | 208 | mTextureRender = null; 209 | mSurface = null; 210 | mSurfaceTexture = null; 211 | } 212 | 213 | /** 214 | * Makes our EGL context and surface current. 215 | */ 216 | public void makeCurrent() { 217 | if (mEGL == null) { 218 | throw new RuntimeException("not configured for makeCurrent"); 219 | } 220 | checkEglError("before makeCurrent"); 221 | if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { 222 | throw new RuntimeException("eglMakeCurrent failed"); 223 | } 224 | } 225 | 226 | /** 227 | * Returns the Surface that we draw onto. 228 | */ 229 | public Surface getSurface() { 230 | return mSurface; 231 | } 232 | 233 | /** 234 | * Replaces the fragment shader. 235 | */ 236 | public void changeFragmentShader(String fragmentShader) { 237 | mTextureRender.changeFragmentShader(fragmentShader); 238 | } 239 | 240 | /** 241 | * Latches the next buffer into the texture. Must be called from the thread that created 242 | * the OutputSurface object, after the onFrameAvailable callback has signaled that new 243 | * data is available. 244 | */ 245 | public void awaitNewImage() { 246 | final int TIMEOUT_MS = 500; 247 | 248 | synchronized (mFrameSyncObject) { 249 | while (!mFrameAvailable) { 250 | try { 251 | // Wait for onFrameAvailable() to signal us. Use a timeout to avoid 252 | // stalling the test if it doesn't arrive. 253 | mFrameSyncObject.wait(TIMEOUT_MS); 254 | if (!mFrameAvailable) { 255 | // TODO: if "spurious wakeup", continue while loop 256 | throw new RuntimeException("Surface frame wait timed out"); 257 | } 258 | } catch (InterruptedException ie) { 259 | // shouldn't happen 260 | throw new RuntimeException(ie); 261 | } 262 | } 263 | mFrameAvailable = false; 264 | } 265 | 266 | // Latch the data. 267 | mTextureRender.checkGlError("before updateTexImage"); 268 | mSurfaceTexture.updateTexImage(); 269 | } 270 | 271 | /** 272 | * Draws the data from SurfaceTexture onto the current EGL surface. 273 | */ 274 | public void drawImage() { 275 | mTextureRender.drawFrame(mSurfaceTexture); 276 | } 277 | 278 | @Override 279 | public void onFrameAvailable(SurfaceTexture st) { 280 | if (VERBOSE) Log.d(TAG, "new frame available"); 281 | synchronized (mFrameSyncObject) { 282 | if (mFrameAvailable) { 283 | throw new RuntimeException("mFrameAvailable already set, frame could be dropped"); 284 | } 285 | mFrameAvailable = true; 286 | mFrameSyncObject.notifyAll(); 287 | } 288 | } 289 | 290 | /** 291 | * Checks for EGL errors. 292 | */ 293 | private void checkEglError(String msg) { 294 | boolean failed = false; 295 | int error; 296 | while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) { 297 | Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error)); 298 | failed = true; 299 | } 300 | if (failed) { 301 | throw new RuntimeException("EGL error encountered (see log)"); 302 | } 303 | } 304 | } 305 | -------------------------------------------------------------------------------- /src/com/example/decodeencodetest/TextureRender.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (C) 2013 The Android Open Source Project 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.example.decodeencodetest; 18 | 19 | import java.nio.ByteBuffer; 20 | import java.nio.ByteOrder; 21 | import java.nio.FloatBuffer; 22 | 23 | import javax.microedition.khronos.egl.EGLConfig; 24 | import javax.microedition.khronos.opengles.GL10; 25 | 26 | import android.graphics.SurfaceTexture; 27 | import android.opengl.GLES11Ext; 28 | import android.opengl.GLES20; 29 | import android.opengl.GLSurfaceView; 30 | import android.opengl.Matrix; 31 | import android.util.Log; 32 | 33 | 34 | /** 35 | * Code for rendering a texture onto a surface using OpenGL ES 2.0. 36 | */ 37 | class TextureRender { 38 | private static final String TAG = "TextureRender"; 39 | 40 | private static final int FLOAT_SIZE_BYTES = 4; 41 | private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 42 | private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 43 | private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 44 | private final float[] mTriangleVerticesData = { 45 | // X, Y, Z, U, V 46 | -1.0f, -1.0f, 0, 0.f, 0.f, 47 | 1.0f, -1.0f, 0, 1.f, 0.f, 48 | -1.0f, 1.0f, 0, 0.f, 1.f, 49 | 1.0f, 1.0f, 0, 1.f, 1.f, 50 | }; 51 | 52 | private FloatBuffer mTriangleVertices; 53 | 54 | private static final String VERTEX_SHADER = 55 | "uniform mat4 uMVPMatrix;\n" + 56 | "uniform mat4 uSTMatrix;\n" + 57 | "attribute vec4 aPosition;\n" + 58 | "attribute vec4 aTextureCoord;\n" + 59 | "varying vec2 vTextureCoord;\n" + 60 | "void main() {\n" + 61 | " gl_Position = uMVPMatrix * aPosition;\n" + 62 | " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 63 | "}\n"; 64 | 65 | private static final String FRAGMENT_SHADER = 66 | "#extension GL_OES_EGL_image_external : require\n" + 67 | "precision mediump float;\n" + // highp here doesn't seem to matter 68 | "varying vec2 vTextureCoord;\n" + 69 | "uniform samplerExternalOES sTexture;\n" + 70 | "void main() {\n" + 71 | " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 72 | "}\n"; 73 | 74 | private float[] mMVPMatrix = new float[16]; 75 | private float[] mSTMatrix = new float[16]; 76 | 77 | private int mProgram; 78 | private int mTextureID = -12345; 79 | private int muMVPMatrixHandle; 80 | private int muSTMatrixHandle; 81 | private int maPositionHandle; 82 | private int maTextureHandle; 83 | 84 | public TextureRender() { 85 | mTriangleVertices = ByteBuffer.allocateDirect( 86 | mTriangleVerticesData.length * FLOAT_SIZE_BYTES) 87 | .order(ByteOrder.nativeOrder()).asFloatBuffer(); 88 | mTriangleVertices.put(mTriangleVerticesData).position(0); 89 | 90 | Matrix.setIdentityM(mSTMatrix, 0); 91 | } 92 | 93 | public int getTextureId() { 94 | return mTextureID; 95 | } 96 | 97 | public void drawFrame(SurfaceTexture st) { 98 | checkGlError("onDrawFrame start"); 99 | st.getTransformMatrix(mSTMatrix); 100 | 101 | GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 102 | GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 103 | 104 | GLES20.glUseProgram(mProgram); 105 | checkGlError("glUseProgram"); 106 | 107 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 108 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); 109 | 110 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 111 | GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 112 | TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 113 | checkGlError("glVertexAttribPointer maPosition"); 114 | GLES20.glEnableVertexAttribArray(maPositionHandle); 115 | checkGlError("glEnableVertexAttribArray maPositionHandle"); 116 | 117 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 118 | GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, 119 | TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 120 | checkGlError("glVertexAttribPointer maTextureHandle"); 121 | GLES20.glEnableVertexAttribArray(maTextureHandle); 122 | checkGlError("glEnableVertexAttribArray maTextureHandle"); 123 | 124 | Matrix.setIdentityM(mMVPMatrix, 0); 125 | GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); 126 | GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); 127 | 128 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 129 | checkGlError("glDrawArrays"); 130 | GLES20.glFinish(); 131 | } 132 | 133 | /** 134 | * Initializes GL state. Call this after the EGL surface has been created and made current. 135 | */ 136 | public void surfaceCreated() { 137 | mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 138 | if (mProgram == 0) { 139 | throw new RuntimeException("failed creating program"); 140 | } 141 | maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); 142 | checkGlError("glGetAttribLocation aPosition"); 143 | if (maPositionHandle == -1) { 144 | throw new RuntimeException("Could not get attrib location for aPosition"); 145 | } 146 | maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); 147 | checkGlError("glGetAttribLocation aTextureCoord"); 148 | if (maTextureHandle == -1) { 149 | throw new RuntimeException("Could not get attrib location for aTextureCoord"); 150 | } 151 | 152 | muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); 153 | checkGlError("glGetUniformLocation uMVPMatrix"); 154 | if (muMVPMatrixHandle == -1) { 155 | throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 156 | } 157 | 158 | muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix"); 159 | checkGlError("glGetUniformLocation uSTMatrix"); 160 | if (muSTMatrixHandle == -1) { 161 | throw new RuntimeException("Could not get attrib location for uSTMatrix"); 162 | } 163 | 164 | 165 | int[] textures = new int[1]; 166 | GLES20.glGenTextures(1, textures, 0); 167 | 168 | mTextureID = textures[0]; 169 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); 170 | checkGlError("glBindTexture mTextureID"); 171 | 172 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 173 | GLES20.GL_NEAREST); 174 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 175 | GLES20.GL_LINEAR); 176 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 177 | GLES20.GL_CLAMP_TO_EDGE); 178 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 179 | GLES20.GL_CLAMP_TO_EDGE); 180 | checkGlError("glTexParameter"); 181 | } 182 | 183 | /** 184 | * Replaces the fragment shader. 185 | */ 186 | public void changeFragmentShader(String fragmentShader) { 187 | GLES20.glDeleteProgram(mProgram); 188 | mProgram = createProgram(VERTEX_SHADER, fragmentShader); 189 | if (mProgram == 0) { 190 | throw new RuntimeException("failed creating program"); 191 | } 192 | } 193 | 194 | private int loadShader(int shaderType, String source) { 195 | int shader = GLES20.glCreateShader(shaderType); 196 | checkGlError("glCreateShader type=" + shaderType); 197 | GLES20.glShaderSource(shader, source); 198 | GLES20.glCompileShader(shader); 199 | int[] compiled = new int[1]; 200 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 201 | if (compiled[0] == 0) { 202 | Log.e(TAG, "Could not compile shader " + shaderType + ":"); 203 | Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 204 | GLES20.glDeleteShader(shader); 205 | shader = 0; 206 | } 207 | return shader; 208 | } 209 | 210 | private int createProgram(String vertexSource, String fragmentSource) { 211 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 212 | if (vertexShader == 0) { 213 | return 0; 214 | } 215 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 216 | if (pixelShader == 0) { 217 | return 0; 218 | } 219 | 220 | int program = GLES20.glCreateProgram(); 221 | checkGlError("glCreateProgram"); 222 | if (program == 0) { 223 | Log.e(TAG, "Could not create program"); 224 | } 225 | GLES20.glAttachShader(program, vertexShader); 226 | checkGlError("glAttachShader"); 227 | GLES20.glAttachShader(program, pixelShader); 228 | checkGlError("glAttachShader"); 229 | GLES20.glLinkProgram(program); 230 | int[] linkStatus = new int[1]; 231 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 232 | if (linkStatus[0] != GLES20.GL_TRUE) { 233 | Log.e(TAG, "Could not link program: "); 234 | Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 235 | GLES20.glDeleteProgram(program); 236 | program = 0; 237 | } 238 | return program; 239 | } 240 | 241 | public void checkGlError(String op) { 242 | int error; 243 | while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 244 | Log.e(TAG, op + ": glError " + error); 245 | throw new RuntimeException(op + ": glError " + error); 246 | } 247 | } 248 | } 249 | --------------------------------------------------------------------------------