├── .gitignore ├── README.md ├── Sync-One2-Test-1080p-24-H_264_V.mp4 ├── app ├── .gitignore ├── build.gradle ├── proguard-rules.pro └── src │ ├── androidTest │ └── java │ │ └── com │ │ └── example │ │ └── zhanghui │ │ └── avplayer │ │ └── ApplicationTest.java │ ├── main │ ├── AndroidManifest.xml │ ├── java │ │ └── com │ │ │ └── example │ │ │ └── zhanghui │ │ │ └── avplayer │ │ │ ├── CodecState.java │ │ │ ├── MainActivity.java │ │ │ ├── MediaCodecPlayer.java │ │ │ ├── MediaTimeProvider.java │ │ │ ├── NonBlockingAudioTrack.java │ │ │ ├── PlayerActivity.java │ │ │ └── VideoFrameReleaseTimeHelper.java │ └── res │ │ ├── layout │ │ ├── activity_main.xml │ │ └── activity_player.xml │ │ ├── mipmap-hdpi │ │ └── ic_launcher.png │ │ ├── mipmap-mdpi │ │ └── ic_launcher.png │ │ ├── mipmap-xhdpi │ │ └── ic_launcher.png │ │ ├── mipmap-xxhdpi │ │ └── ic_launcher.png │ │ ├── mipmap-xxxhdpi │ │ └── ic_launcher.png │ │ ├── values-w820dp │ │ └── dimens.xml │ │ └── values │ │ ├── colors.xml │ │ ├── dimens.xml │ │ ├── strings.xml │ │ └── styles.xml │ └── test │ └── java │ └── com │ └── example │ └── zhanghui │ └── avplayer │ └── ExampleUnitTest.java ├── build.gradle ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | .gradle 3 | /local.properties 4 | /.idea/workspace.xml 5 | /.idea/libraries 6 | .DS_Store 7 | /build 8 | /captures 9 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # simplest_android_avplayer 2 | this samples shows how to use MediaCodec and AudioTrack to build an android player, with avsync optimization 3 | 4 | 最简单的android mediacodec + audiotrack demo,添加音画同步优化 5 | 6 | Author: 7 | 8 | zhang hui 9 | 10 | LeEco BSP Multimedia / Communication University of China 11 | 12 | 欢迎关注我的公众号灰度五十,分享各类音视频、移动开发知识,以及名企内推信息~ 13 | 14 | ![在这里插入图片描述](https://img-blog.csdnimg.cn/20181222184847599.jpg) 15 | -------------------------------------------------------------------------------- /Sync-One2-Test-1080p-24-H_264_V.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zhanghuicuc/simplest_android_avplayer/06b8d5bfb956ad6fd3c72db66fa97afcfa62d0be/Sync-One2-Test-1080p-24-H_264_V.mp4 -------------------------------------------------------------------------------- /app/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 25 5 | buildToolsVersion "26.0.0" 6 | 7 | defaultConfig { 8 | applicationId "com.example.zhanghui.avplayer" 9 | minSdkVersion 23 10 | targetSdkVersion 25 11 | versionCode 1 12 | versionName "1.0" 13 | } 14 | buildTypes { 15 | release { 16 | minifyEnabled false 17 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 18 | } 19 | } 20 | } 21 | 22 | dependencies { 23 | compile fileTree(dir: 'libs', include: ['*.jar']) 24 | testCompile 'junit:junit:4.12' 25 | compile 'com.android.support:appcompat-v7:25.3.1' 26 | } 27 | -------------------------------------------------------------------------------- /app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # By default, the flags in this file are appended to flags specified 3 | # in D:\Android\Android_sdk/tools/proguard/proguard-android.txt 4 | # You can edit the include path and order by changing the proguardFiles 5 | # directive in build.gradle. 6 | # 7 | # For more details, see 8 | # http://developer.android.com/guide/developing/tools/proguard.html 9 | 10 | # Add any project specific keep options here: 11 | 12 | # If your project uses WebView with JS, uncomment the following 13 | # and specify the fully qualified class name to the JavaScript interface 14 | # class: 15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 16 | # public *; 17 | #} 18 | -------------------------------------------------------------------------------- /app/src/androidTest/java/com/example/zhanghui/avplayer/ApplicationTest.java: -------------------------------------------------------------------------------- 1 | package com.example.zhanghui.avplayer; 2 | 3 | import android.app.Application; 4 | import android.test.ApplicationTestCase; 5 | 6 | /** 7 | * Testing Fundamentals 8 | */ 9 | public class ApplicationTest extends ApplicationTestCase { 10 | public ApplicationTest() { 11 | super(Application.class); 12 | } 13 | } -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/zhanghui/avplayer/CodecState.java: -------------------------------------------------------------------------------- 1 | package com.example.zhanghui.avplayer; 2 | 3 | import android.media.AudioTrack; 4 | import android.media.MediaCodec; 5 | import android.media.MediaExtractor; 6 | import android.media.MediaFormat; 7 | import android.util.Log; 8 | 9 | import java.nio.ByteBuffer; 10 | import java.util.LinkedList; 11 | 12 | /** 13 | * Class for directly managing both audio and video playback by 14 | * using {@link MediaCodec} and {@link AudioTrack}. 15 | */ 16 | public class CodecState { 17 | private static final String TAG = CodecState.class.getSimpleName(); 18 | 19 | private boolean mSawInputEOS, mSawOutputEOS; 20 | private boolean mLimitQueueDepth; 21 | private boolean mIsAudio; 22 | private ByteBuffer[] mCodecInputBuffers; 23 | private ByteBuffer[] mCodecOutputBuffers; 24 | private int mTrackIndex; 25 | private LinkedList mAvailableInputBufferIndices; 26 | private LinkedList mAvailableOutputBufferIndices; 27 | private LinkedList mAvailableOutputBufferInfos; 28 | private long mPresentationTimeUs; 29 | private long mSampleBaseTimeUs; 30 | private MediaCodec mCodec; 31 | private MediaTimeProvider mMediaTimeProvider; 32 | private MediaExtractor mExtractor; 33 | private MediaFormat mFormat; 34 | private MediaFormat mOutputFormat; 35 | private NonBlockingAudioTrack mAudioTrack; 36 | 37 | /** 38 | * Manages audio and video playback using MediaCodec and AudioTrack. 39 | */ 40 | public CodecState( 41 | MediaTimeProvider mediaTimeProvider, 42 | MediaExtractor extractor, 43 | int trackIndex, 44 | MediaFormat format, 45 | MediaCodec codec, 46 | boolean limitQueueDepth) { 47 | mMediaTimeProvider = mediaTimeProvider; 48 | mExtractor = extractor; 49 | mTrackIndex = trackIndex; 50 | mFormat = format; 51 | mSawInputEOS = mSawOutputEOS = false; 52 | mLimitQueueDepth = limitQueueDepth; 53 | mSampleBaseTimeUs = -1; 54 | 55 | mCodec = codec; 56 | 57 | mAvailableInputBufferIndices = new LinkedList(); 58 | mAvailableOutputBufferIndices = new LinkedList(); 59 | mAvailableOutputBufferInfos = new LinkedList(); 60 | 61 | mPresentationTimeUs = 0; 62 | 63 | String mime = mFormat.getString(MediaFormat.KEY_MIME); 64 | Log.d(TAG, "CodecState::onOutputFormatChanged " + mime); 65 | mIsAudio = mime.startsWith("audio/"); 66 | } 67 | 68 | public void release() { 69 | mCodec.stop(); 70 | mCodecInputBuffers = null; 71 | mCodecOutputBuffers = null; 72 | mOutputFormat = null; 73 | 74 | mAvailableInputBufferIndices.clear(); 75 | mAvailableOutputBufferIndices.clear(); 76 | mAvailableOutputBufferInfos.clear(); 77 | 78 | mAvailableInputBufferIndices = null; 79 | mAvailableOutputBufferIndices = null; 80 | mAvailableOutputBufferInfos = null; 81 | 82 | mCodec.release(); 83 | mCodec = null; 84 | 85 | if (mAudioTrack != null) { 86 | mAudioTrack.release(); 87 | mAudioTrack = null; 88 | } 89 | } 90 | 91 | public void start() { 92 | mCodec.start(); 93 | mCodecInputBuffers = mCodec.getInputBuffers(); 94 | mCodecOutputBuffers = mCodec.getOutputBuffers(); 95 | 96 | if (mAudioTrack != null) { 97 | mAudioTrack.play(); 98 | } 99 | } 100 | 101 | public void pause() { 102 | if (mAudioTrack != null) { 103 | mAudioTrack.pause(); 104 | } 105 | } 106 | 107 | public long getCurrentPositionUs() { 108 | return mPresentationTimeUs; 109 | } 110 | 111 | public void flush() { 112 | mAvailableInputBufferIndices.clear(); 113 | mAvailableOutputBufferIndices.clear(); 114 | mAvailableOutputBufferInfos.clear(); 115 | 116 | mSawInputEOS = false; 117 | mSawOutputEOS = false; 118 | 119 | if (mAudioTrack != null 120 | && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { 121 | mAudioTrack.flush(); 122 | } 123 | 124 | mCodec.flush(); 125 | } 126 | 127 | public boolean isEnded() { 128 | return mSawInputEOS && mSawOutputEOS; 129 | } 130 | 131 | /** 132 | * doSomeWork() is the worker function that does all buffer handling and decoding works. 133 | * It first reads data from {@link MediaExtractor} and pushes it into {@link MediaCodec}; 134 | * it then dequeues buffer from {@link MediaCodec}, consumes it and pushes back to its own 135 | * buffer queue for next round reading data from {@link MediaExtractor}. 136 | */ 137 | public void doSomeWork() { 138 | int indexInput = mCodec.dequeueInputBuffer(0 /* timeoutUs */); 139 | 140 | if (indexInput != MediaCodec.INFO_TRY_AGAIN_LATER) { 141 | mAvailableInputBufferIndices.add(indexInput); 142 | } 143 | 144 | while (feedInputBuffer()) { 145 | } 146 | 147 | MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 148 | int indexOutput = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */); 149 | 150 | if (indexOutput == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 151 | mOutputFormat = mCodec.getOutputFormat(); 152 | onOutputFormatChanged(); 153 | } else if (indexOutput == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 154 | mCodecOutputBuffers = mCodec.getOutputBuffers(); 155 | } else if (indexOutput != MediaCodec.INFO_TRY_AGAIN_LATER) { 156 | mAvailableOutputBufferIndices.add(indexOutput); 157 | mAvailableOutputBufferInfos.add(info); 158 | } 159 | 160 | while (drainOutputBuffer()) { 161 | } 162 | } 163 | 164 | /** Returns true if more input data could be fed. */ 165 | private boolean feedInputBuffer() throws MediaCodec.CryptoException, IllegalStateException { 166 | if (mSawInputEOS || mAvailableInputBufferIndices.isEmpty()) { 167 | return false; 168 | } 169 | 170 | // stalls read if audio queue is larger than 2MB full so we will not occupy too much heap 171 | if (mLimitQueueDepth && mAudioTrack != null && 172 | mAudioTrack.getNumBytesQueued() > 2 * 1024 * 1024) { 173 | return false; 174 | } 175 | 176 | int index = mAvailableInputBufferIndices.peekFirst().intValue(); 177 | 178 | ByteBuffer codecData = mCodecInputBuffers[index]; 179 | 180 | int trackIndex = mExtractor.getSampleTrackIndex(); 181 | 182 | if (trackIndex == mTrackIndex) { 183 | int sampleSize = 184 | mExtractor.readSampleData(codecData, 0 /* offset */); 185 | 186 | long sampleTime = mExtractor.getSampleTime(); 187 | 188 | int sampleFlags = mExtractor.getSampleFlags(); 189 | 190 | if (sampleSize <= 0) { 191 | Log.d(TAG, "sampleSize: " + sampleSize + " trackIndex:" + trackIndex + 192 | " sampleTime:" + sampleTime + " sampleFlags:" + sampleFlags); 193 | mSawInputEOS = true; 194 | return false; 195 | } 196 | 197 | if (!mIsAudio) { 198 | if (mSampleBaseTimeUs == -1) { 199 | mSampleBaseTimeUs = sampleTime; 200 | } 201 | sampleTime -= mSampleBaseTimeUs; 202 | // this is just used for getCurrentPosition, not used for avsync 203 | mPresentationTimeUs = sampleTime; 204 | } 205 | 206 | if ((sampleFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) { 207 | MediaCodec.CryptoInfo info = new MediaCodec.CryptoInfo(); 208 | mExtractor.getSampleCryptoInfo(info); 209 | 210 | mCodec.queueSecureInputBuffer( 211 | index, 0 /* offset */, info, sampleTime, 0 /* flags */); 212 | } else { 213 | mCodec.queueInputBuffer( 214 | index, 0 /* offset */, sampleSize, sampleTime, 0 /* flags */); 215 | } 216 | 217 | mAvailableInputBufferIndices.removeFirst(); 218 | mExtractor.advance(); 219 | 220 | return true; 221 | } else if (trackIndex < 0) { 222 | Log.d(TAG, "saw input EOS on track " + mTrackIndex); 223 | 224 | mSawInputEOS = true; 225 | 226 | mCodec.queueInputBuffer( 227 | index, 0 /* offset */, 0 /* sampleSize */, 228 | 0 /* sampleTime */, MediaCodec.BUFFER_FLAG_END_OF_STREAM); 229 | 230 | mAvailableInputBufferIndices.removeFirst(); 231 | } 232 | 233 | return false; 234 | } 235 | 236 | private void onOutputFormatChanged() { 237 | String mime = mOutputFormat.getString(MediaFormat.KEY_MIME); 238 | Log.d(TAG, "CodecState::onOutputFormatChanged " + mime); 239 | 240 | mIsAudio = false; 241 | if (mime.startsWith("audio/")) { 242 | mIsAudio = true; 243 | int sampleRate = 244 | mOutputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); 245 | 246 | int channelCount = 247 | mOutputFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); 248 | 249 | Log.d(TAG, "CodecState::onOutputFormatChanged Audio" + 250 | " sampleRate:" + sampleRate + " channels:" + channelCount); 251 | // We do sanity check here after we receive data from MediaExtractor and before 252 | // we pass them down to AudioTrack. If MediaExtractor works properly, this 253 | // sanity-check is not necessary, however, in our tests, we found that there 254 | // are a few cases where ch=0 and samplerate=0 were returned by MediaExtractor. 255 | if (channelCount < 1 || channelCount > 8 || 256 | sampleRate < 8000 || sampleRate > 128000) { 257 | return; 258 | } 259 | mAudioTrack = new NonBlockingAudioTrack(sampleRate, channelCount); 260 | mAudioTrack.play(); 261 | } 262 | 263 | if (mime.startsWith("video/")) { 264 | int width = mOutputFormat.getInteger(MediaFormat.KEY_WIDTH); 265 | int height = mOutputFormat.getInteger(MediaFormat.KEY_HEIGHT); 266 | Log.d(TAG, "CodecState::onOutputFormatChanged Video" + 267 | " width:" + width + " height:" + height); 268 | } 269 | } 270 | 271 | /** Returns true if more output data could be drained. */ 272 | //audio and video belongs to different codecstate, one has audio, the other one dont 273 | //so there exists two mPresentationTimeUs, one for audio, the other one for video 274 | //however, audio and video draining works in the same thread 275 | private boolean drainOutputBuffer() { 276 | if (mSawOutputEOS || mAvailableOutputBufferIndices.isEmpty()) { 277 | return false; 278 | } 279 | 280 | int index = mAvailableOutputBufferIndices.peekFirst().intValue(); 281 | MediaCodec.BufferInfo info = mAvailableOutputBufferInfos.peekFirst(); 282 | 283 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 284 | Log.d(TAG, "saw output EOS on track " + mTrackIndex); 285 | 286 | mSawOutputEOS = true; 287 | 288 | return false; 289 | } 290 | 291 | if (mAudioTrack != null) { 292 | ByteBuffer buffer = mCodecOutputBuffers[index]; 293 | buffer.clear(); 294 | ByteBuffer audioBuffer = ByteBuffer.allocate(buffer.remaining()); 295 | audioBuffer.put(buffer); 296 | audioBuffer.clear(); 297 | 298 | mAudioTrack.write(audioBuffer, info.size, info.presentationTimeUs*1000); 299 | 300 | mCodec.releaseOutputBuffer(index, false /* render */); 301 | 302 | mPresentationTimeUs = info.presentationTimeUs; 303 | 304 | mAvailableOutputBufferIndices.removeFirst(); 305 | mAvailableOutputBufferInfos.removeFirst(); 306 | return true; 307 | } else { 308 | // video 309 | boolean render; 310 | long twiceVsyncDurationUs = 2 * mMediaTimeProvider.getVsyncDurationNs()/1000; 311 | 312 | long realTimeUs = 313 | mMediaTimeProvider.getRealTimeUsForMediaTime(info.presentationTimeUs); //映射到nowUs时间轴上 314 | long nowUs = mMediaTimeProvider.getNowUs(); //audio play time 315 | //String streamType = mAudioTrack == null ? "video:":"audio:"; 316 | //Log.d("avsync", streamType + " presentationUs is " + info.presentationTimeUs + ",realTimeUs is " + realTimeUs + ",nowUs is " + nowUs); 317 | long lateUs = System.nanoTime()/1000 - realTimeUs; 318 | 319 | if (lateUs < -twiceVsyncDurationUs) { 320 | // too early; 321 | return false; 322 | } else if (lateUs > 30000) { 323 | Log.d(TAG, "video late by " + lateUs + " us."); 324 | render = false; 325 | } else { 326 | render = true; 327 | mPresentationTimeUs = info.presentationTimeUs; 328 | } 329 | 330 | //mCodec.releaseOutputBuffer(index, render); 331 | mCodec.releaseOutputBuffer(index, realTimeUs*1000); 332 | mAvailableOutputBufferIndices.removeFirst(); 333 | mAvailableOutputBufferInfos.removeFirst(); 334 | return true; 335 | } 336 | } 337 | 338 | public long getAudioTimeUs() { 339 | if (mAudioTrack == null) { 340 | return 0; 341 | } 342 | 343 | return mAudioTrack.getAudioTimeUs(); 344 | } 345 | 346 | public void process() { 347 | if (mAudioTrack != null) { 348 | mAudioTrack.process(); 349 | } 350 | } 351 | } 352 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/zhanghui/avplayer/MainActivity.java: -------------------------------------------------------------------------------- 1 | /** 2 | * this samples shows how to use MediaCodec and AudioTrack to build an android player, with avsync optimization 3 | * Author: 4 | * zhang hui 5 | * LeEco BSP Multimedia / Communication University of China 6 | */ 7 | package com.example.zhanghui.avplayer; 8 | 9 | import android.app.Activity; 10 | import android.content.Intent; 11 | import android.net.Uri; 12 | import android.os.Bundle; 13 | import android.view.View; 14 | import android.widget.Button; 15 | import android.widget.EditText; 16 | import android.widget.Toast; 17 | 18 | public class MainActivity extends Activity { 19 | 20 | private Button mPlayButton; 21 | private EditText mUrlEditText; 22 | private static final String DEFAULT_FILE_URL = "/sdcard/Sync-One2-Test-1080p-24-H_264_V.mp4"; 23 | @Override 24 | protected void onCreate(Bundle savedInstanceState) { 25 | super.onCreate(savedInstanceState); 26 | setContentView(R.layout.activity_main); 27 | mUrlEditText = (EditText) findViewById(R.id.input_url_editText); 28 | mPlayButton = (Button) findViewById(R.id.play_button); 29 | mPlayButton.setOnClickListener(new View.OnClickListener(){ 30 | @Override 31 | public void onClick(View v) { 32 | String fileUrl = mUrlEditText.getText().toString().trim(); 33 | if (fileUrl.equals("")) { 34 | Toast.makeText(MainActivity.this, "file url is null, will use default url", Toast.LENGTH_SHORT).show(); 35 | Intent intent = new Intent(MainActivity.this, PlayerActivity.class).setData(Uri.parse(DEFAULT_FILE_URL)); 36 | startActivity(intent); 37 | } else { 38 | Intent intent = new Intent(getApplicationContext(), PlayerActivity.class).setData(Uri.parse(fileUrl)); 39 | startActivity(intent); 40 | } 41 | } 42 | }); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/zhanghui/avplayer/MediaCodecPlayer.java: -------------------------------------------------------------------------------- 1 | package com.example.zhanghui.avplayer; 2 | 3 | import android.content.Context; 4 | import android.media.MediaCodec; 5 | import android.media.MediaExtractor; 6 | import android.media.MediaFormat; 7 | import android.net.Uri; 8 | import android.util.Log; 9 | import android.view.SurfaceHolder; 10 | 11 | import java.io.IOException; 12 | import java.util.HashMap; 13 | import java.util.Map; 14 | 15 | public class MediaCodecPlayer implements MediaTimeProvider { 16 | private static final String TAG = MediaCodecPlayer.class.getSimpleName(); 17 | 18 | private static final int STATE_IDLE = 1; 19 | private static final int STATE_PREPARING = 2; 20 | private static final int STATE_PLAYING = 3; 21 | private static final int STATE_PAUSED = 4; 22 | 23 | private Boolean mThreadStarted = false; 24 | private CodecState mAudioTrackState; 25 | private int mMediaFormatHeight; 26 | private int mMediaFormatWidth; 27 | private Integer mState; 28 | private long mDeltaTimeUs; 29 | private long mDurationUs; 30 | private Map mAudioCodecStates; 31 | private Map mVideoCodecStates; 32 | private Map mAudioHeaders; 33 | private Map mVideoHeaders; 34 | private MediaExtractor mAudioExtractor; 35 | private MediaExtractor mVideoExtractor; 36 | private SurfaceHolder mSurfaceHolder; 37 | private Thread mThread; 38 | private Uri mAudioUri; 39 | private Uri mVideoUri; 40 | private VideoFrameReleaseTimeHelper mFrameReleaseTimeHelper; 41 | 42 | /* 43 | * Media player class to playback video using MediaCodec. 44 | */ 45 | public MediaCodecPlayer(SurfaceHolder holder, Context context) { 46 | mSurfaceHolder = holder; 47 | mFrameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(context); 48 | mAudioTrackState = null; 49 | mState = STATE_IDLE; 50 | mThread = new Thread(new Runnable() { 51 | @Override 52 | public void run() { 53 | while (true) { 54 | synchronized (mThreadStarted) { 55 | if (mThreadStarted == false) { 56 | break; 57 | } 58 | } 59 | synchronized (mState) { 60 | if (mState == STATE_PLAYING) { 61 | doSomeWork(); 62 | if (mAudioTrackState != null) { 63 | mAudioTrackState.process(); 64 | } 65 | } 66 | } 67 | try { 68 | Thread.sleep(5);//5ms loop 69 | } catch (InterruptedException ex) { 70 | Log.d(TAG, "Thread interrupted"); 71 | } 72 | } 73 | } 74 | }); 75 | } 76 | 77 | public void setAudioDataSource(Uri uri, Map headers) { 78 | mAudioUri = uri; 79 | mAudioHeaders = headers; 80 | } 81 | 82 | public void setVideoDataSource(Uri uri, Map headers) { 83 | mVideoUri = uri; 84 | mVideoHeaders = headers; 85 | } 86 | 87 | private boolean prepareAudio() throws IOException { 88 | for (int i = mAudioExtractor.getTrackCount(); i-- > 0;) { 89 | MediaFormat format = mAudioExtractor.getTrackFormat(i); 90 | String mime = format.getString(MediaFormat.KEY_MIME); 91 | 92 | if (!mime.startsWith("audio/")) { 93 | continue; 94 | } 95 | 96 | Log.d(TAG, "audio track #" + i + " " + format + " " + mime + 97 | " Is ADTS:" + getMediaFormatInteger(format, MediaFormat.KEY_IS_ADTS) + 98 | " Sample rate:" + getMediaFormatInteger(format, MediaFormat.KEY_SAMPLE_RATE) + 99 | " Channel count:" + 100 | getMediaFormatInteger(format, MediaFormat.KEY_CHANNEL_COUNT)); 101 | 102 | mAudioExtractor.selectTrack(i); 103 | if (!addTrack(i, format)) { 104 | Log.e(TAG, "prepareAudio - addTrack() failed!"); 105 | return false; 106 | } 107 | 108 | if (format.containsKey(MediaFormat.KEY_DURATION)) { 109 | long durationUs = format.getLong(MediaFormat.KEY_DURATION); 110 | 111 | if (durationUs > mDurationUs) { 112 | mDurationUs = durationUs; 113 | } 114 | Log.d(TAG, "audio track format #" + i + 115 | " Duration:" + mDurationUs + " microseconds"); 116 | } 117 | } 118 | return true; 119 | } 120 | 121 | private boolean prepareVideo() throws IOException { 122 | for (int i = mVideoExtractor.getTrackCount(); i-- > 0;) { 123 | MediaFormat format = mVideoExtractor.getTrackFormat(i); 124 | String mime = format.getString(MediaFormat.KEY_MIME); 125 | 126 | if (!mime.startsWith("video")) { 127 | continue; 128 | } 129 | 130 | mMediaFormatHeight = getMediaFormatInteger(format, MediaFormat.KEY_HEIGHT); 131 | mMediaFormatWidth = getMediaFormatInteger(format, MediaFormat.KEY_WIDTH); 132 | Log.d(TAG, "video track #" + i + " " + format + " " + mime + 133 | " Width:" + mMediaFormatWidth + ", Height:" + mMediaFormatHeight); 134 | 135 | mVideoExtractor.selectTrack(i); 136 | if (!addTrack(i, format)) { 137 | Log.e(TAG, "prepareVideo - addTrack() failed!"); 138 | return false; 139 | } 140 | 141 | if (format.containsKey(MediaFormat.KEY_DURATION)) { 142 | long durationUs = format.getLong(MediaFormat.KEY_DURATION); 143 | 144 | if (durationUs > mDurationUs) { 145 | mDurationUs = durationUs; 146 | } 147 | Log.d(TAG, "track format #" + i + " Duration:" + 148 | mDurationUs + " microseconds"); 149 | } 150 | } 151 | return true; 152 | } 153 | 154 | public boolean prepare() throws IOException { 155 | if (null == mAudioExtractor) { 156 | mAudioExtractor = new MediaExtractor(); 157 | if (null == mAudioExtractor) { 158 | Log.e(TAG, "prepare - Cannot create Audio extractor."); 159 | return false; 160 | } 161 | } 162 | 163 | if (null == mVideoExtractor){ 164 | mVideoExtractor = new MediaExtractor(); 165 | if (null == mVideoExtractor) { 166 | Log.e(TAG, "prepare - Cannot create Video extractor."); 167 | return false; 168 | } 169 | } 170 | 171 | mAudioExtractor.setDataSource(mAudioUri.toString(), mAudioHeaders); 172 | mVideoExtractor.setDataSource(mVideoUri.toString(), mVideoHeaders); 173 | 174 | if (null == mVideoCodecStates) { 175 | mVideoCodecStates = new HashMap(); 176 | } else { 177 | mVideoCodecStates.clear(); 178 | } 179 | 180 | if (null == mAudioCodecStates) { 181 | mAudioCodecStates = new HashMap(); 182 | } else { 183 | mAudioCodecStates.clear(); 184 | } 185 | 186 | if (!prepareAudio()) { 187 | Log.e(TAG,"prepare - prepareAudio() failed!"); 188 | return false; 189 | } 190 | if (!prepareVideo()) { 191 | Log.e(TAG,"prepare - prepareVideo() failed!"); 192 | return false; 193 | } 194 | 195 | synchronized (mState) { 196 | mState = STATE_PAUSED; 197 | } 198 | return true; 199 | } 200 | 201 | private boolean addTrack(int trackIndex, MediaFormat format) throws IOException { 202 | String mime = format.getString(MediaFormat.KEY_MIME); 203 | boolean isVideo = mime.startsWith("video/"); 204 | boolean isAudio = mime.startsWith("audio/"); 205 | MediaCodec codec; 206 | 207 | codec = MediaCodec.createDecoderByType(mime); 208 | if (codec == null) { 209 | Log.e(TAG, "addTrack - Could not create regular playback codec for mime "+ 210 | mime+"!"); 211 | return false; 212 | } 213 | codec.configure( 214 | format, 215 | isVideo ? mSurfaceHolder.getSurface() : null, null, 0); 216 | 217 | CodecState state; 218 | if (isVideo) { 219 | state = new CodecState((MediaTimeProvider)this, mVideoExtractor, 220 | trackIndex, format, codec, true); 221 | mVideoCodecStates.put(Integer.valueOf(trackIndex), state); 222 | } else { 223 | state = new CodecState((MediaTimeProvider)this, mAudioExtractor, 224 | trackIndex, format, codec, true); 225 | mAudioCodecStates.put(Integer.valueOf(trackIndex), state); 226 | } 227 | 228 | if (isAudio) { 229 | mAudioTrackState = state; 230 | } 231 | 232 | return true; 233 | } 234 | 235 | protected int getMediaFormatInteger(MediaFormat format, String key) { 236 | return format.containsKey(key) ? format.getInteger(key) : 0; 237 | } 238 | 239 | public boolean start() { 240 | Log.d(TAG, "start"); 241 | 242 | synchronized (mState) { 243 | if (mState == STATE_PLAYING || mState == STATE_PREPARING) { 244 | return true; 245 | } else if (mState == STATE_IDLE) { 246 | mState = STATE_PREPARING; 247 | return true; 248 | } else if (mState != STATE_PAUSED) { 249 | throw new IllegalStateException(); 250 | } 251 | 252 | for (CodecState state : mVideoCodecStates.values()) { 253 | state.start(); 254 | } 255 | 256 | for (CodecState state : mAudioCodecStates.values()) { 257 | state.start(); 258 | } 259 | 260 | mDeltaTimeUs = -1; 261 | mState = STATE_PLAYING; 262 | } 263 | return false; 264 | } 265 | 266 | public void startThread() { 267 | mFrameReleaseTimeHelper.enable(); 268 | start(); 269 | synchronized (mThreadStarted) { 270 | mThreadStarted = true; 271 | mThread.start(); 272 | } 273 | } 274 | 275 | public void pause() { 276 | Log.d(TAG, "pause"); 277 | 278 | synchronized (mState) { 279 | if (mState == STATE_PAUSED) { 280 | return; 281 | } else if (mState != STATE_PLAYING) { 282 | throw new IllegalStateException(); 283 | } 284 | 285 | for (CodecState state : mVideoCodecStates.values()) { 286 | state.pause(); 287 | } 288 | 289 | for (CodecState state : mAudioCodecStates.values()) { 290 | state.pause(); 291 | } 292 | 293 | mState = STATE_PAUSED; 294 | } 295 | } 296 | 297 | public void flush() { 298 | Log.d(TAG, "flush"); 299 | 300 | synchronized (mState) { 301 | if (mState == STATE_PLAYING || mState == STATE_PREPARING) { 302 | return; 303 | } 304 | 305 | for (CodecState state : mAudioCodecStates.values()) { 306 | state.flush(); 307 | } 308 | 309 | for (CodecState state : mVideoCodecStates.values()) { 310 | state.flush(); 311 | } 312 | } 313 | } 314 | 315 | public void reset() { 316 | synchronized (mState) { 317 | if (mState == STATE_PLAYING) { 318 | pause(); 319 | } 320 | if (mVideoCodecStates != null) { 321 | for (CodecState state : mVideoCodecStates.values()) { 322 | state.release(); 323 | } 324 | mVideoCodecStates = null; 325 | } 326 | 327 | if (mAudioCodecStates != null) { 328 | for (CodecState state : mAudioCodecStates.values()) { 329 | state.release(); 330 | } 331 | mAudioCodecStates = null; 332 | } 333 | 334 | if (mAudioExtractor != null) { 335 | mAudioExtractor.release(); 336 | mAudioExtractor = null; 337 | } 338 | 339 | if (mVideoExtractor != null) { 340 | mVideoExtractor.release(); 341 | mVideoExtractor = null; 342 | } 343 | 344 | if (mFrameReleaseTimeHelper != null) { 345 | mFrameReleaseTimeHelper.disable(); 346 | mFrameReleaseTimeHelper = null; 347 | } 348 | 349 | mDurationUs = -1; 350 | mState = STATE_IDLE; 351 | } 352 | synchronized (mThreadStarted) { 353 | mThreadStarted = false; 354 | } 355 | try { 356 | mThread.join(); 357 | } catch (InterruptedException ex) { 358 | Log.d(TAG, "mThread.join " + ex); 359 | } 360 | } 361 | 362 | public boolean isEnded() { 363 | for (CodecState state : mVideoCodecStates.values()) { 364 | if (!state.isEnded()) { 365 | return false; 366 | } 367 | } 368 | 369 | for (CodecState state : mAudioCodecStates.values()) { 370 | if (!state.isEnded()) { 371 | return false; 372 | } 373 | } 374 | 375 | return true; 376 | } 377 | 378 | private void doSomeWork() { 379 | try { 380 | for (CodecState state : mVideoCodecStates.values()) { 381 | state.doSomeWork(); 382 | } 383 | } catch (IllegalStateException e) { 384 | throw new Error("Video CodecState.doSomeWork" + e); 385 | } 386 | 387 | try { 388 | for (CodecState state : mAudioCodecStates.values()) { 389 | state.doSomeWork(); 390 | } 391 | } catch (IllegalStateException e) { 392 | throw new Error("Audio CodecState.doSomeWork" + e); 393 | } 394 | 395 | } 396 | 397 | public long getNowUs() { 398 | //返回audio播放的时间 399 | if (mAudioTrackState == null) { 400 | return System.currentTimeMillis() * 1000; 401 | } 402 | 403 | return mAudioTrackState.getAudioTimeUs(); 404 | } 405 | 406 | public long getRealTimeUsForMediaTime(long mediaTimeUs) { 407 | if (mDeltaTimeUs == -1) { 408 | long nowUs = getNowUs(); 409 | mDeltaTimeUs = nowUs - mediaTimeUs; 410 | } 411 | long earlyUs = mDeltaTimeUs + mediaTimeUs - getNowUs(); 412 | long unadjustedFrameReleaseTimeNs = System.nanoTime() + (earlyUs * 1000); 413 | long adjustedReleaseTimeNs = mFrameReleaseTimeHelper.adjustReleaseTime( 414 | mDeltaTimeUs + mediaTimeUs, unadjustedFrameReleaseTimeNs); 415 | return adjustedReleaseTimeNs / 1000; 416 | } 417 | 418 | public long getVsyncDurationNs() { 419 | if (mFrameReleaseTimeHelper != null) { 420 | return mFrameReleaseTimeHelper.getVsyncDurationNs(); 421 | } else { 422 | return -1; 423 | } 424 | } 425 | 426 | public int getDuration() { 427 | return (int)((mDurationUs + 500) / 1000); 428 | } 429 | 430 | public int getCurrentPosition() { 431 | if (mVideoCodecStates == null) { 432 | return 0; 433 | } 434 | 435 | long positionUs = 0; 436 | 437 | for (CodecState state : mVideoCodecStates.values()) { 438 | long trackPositionUs = state.getCurrentPositionUs(); 439 | 440 | if (trackPositionUs > positionUs) { 441 | positionUs = trackPositionUs; 442 | } 443 | } 444 | return (int)((positionUs + 500) / 1000); 445 | } 446 | 447 | } 448 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/zhanghui/avplayer/MediaTimeProvider.java: -------------------------------------------------------------------------------- 1 | package com.example.zhanghui.avplayer; 2 | 3 | /* 4 | * Interface used by CodecState to retrieve Media timing info from parent Player 5 | */ 6 | public interface MediaTimeProvider { 7 | public long getNowUs(); 8 | public long getRealTimeUsForMediaTime(long mediaTimeUs); 9 | public long getVsyncDurationNs(); 10 | } 11 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/zhanghui/avplayer/NonBlockingAudioTrack.java: -------------------------------------------------------------------------------- 1 | package com.example.zhanghui.avplayer; 2 | 3 | import android.media.AudioFormat; 4 | import android.media.AudioManager; 5 | import android.media.AudioTimestamp; 6 | import android.media.AudioTrack; 7 | import android.media.AudioAttributes; 8 | import android.util.Log; 9 | 10 | import java.lang.reflect.Method; 11 | import java.nio.ByteBuffer; 12 | import java.util.LinkedList; 13 | 14 | /** 15 | * Class for playing audio by using audio track. 16 | * audioTrack.write methods will 17 | * block until all data has been written to system. In order to avoid blocking, this class 18 | * caculates available buffer size first then writes to audio sink. 19 | */ 20 | public class NonBlockingAudioTrack { 21 | private static final String TAG = NonBlockingAudioTrack.class.getSimpleName(); 22 | private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 250000; 23 | 24 | class QueueElement { 25 | ByteBuffer data; 26 | int size; 27 | long pts; 28 | } 29 | 30 | private AudioTrack mAudioTrack; 31 | private int mSampleRate; 32 | private int mNumBytesQueued = 0; 33 | private LinkedList mQueue = new LinkedList(); 34 | private boolean mStopped; 35 | private Method getLatencyMethod; 36 | private long mLatencyUs; 37 | private long mLastTimestampSampleTimeUs; 38 | private boolean mAudioTimestampSet; 39 | private final AudioTimestamp mAudioTimestamp; 40 | 41 | public NonBlockingAudioTrack(int sampleRate, int channelCount) { 42 | int channelConfig; 43 | switch (channelCount) { 44 | case 1: 45 | channelConfig = AudioFormat.CHANNEL_OUT_MONO; 46 | break; 47 | case 2: 48 | channelConfig = AudioFormat.CHANNEL_OUT_STEREO; 49 | break; 50 | case 6: 51 | channelConfig = AudioFormat.CHANNEL_OUT_5POINT1; 52 | break; 53 | default: 54 | throw new IllegalArgumentException(); 55 | } 56 | 57 | int minBufferSize = 58 | AudioTrack.getMinBufferSize( 59 | sampleRate, 60 | channelConfig, 61 | AudioFormat.ENCODING_PCM_16BIT); 62 | 63 | int bufferSize = 2 * minBufferSize; 64 | 65 | mAudioTrack = new AudioTrack( 66 | AudioManager.STREAM_MUSIC, 67 | sampleRate, 68 | channelConfig, 69 | AudioFormat.ENCODING_PCM_16BIT, 70 | bufferSize, 71 | AudioTrack.MODE_STREAM); 72 | 73 | mSampleRate = sampleRate; 74 | 75 | try { 76 | getLatencyMethod = 77 | android.media.AudioTrack.class.getMethod("getLatency", (Class[]) null); 78 | } catch (NoSuchMethodException e) {} 79 | mLatencyUs = 0; 80 | mLastTimestampSampleTimeUs = 0; 81 | mAudioTimestamp = new AudioTimestamp(); 82 | } 83 | 84 | public long getAudioTimeUs() { 85 | long systemClockUs = System.nanoTime() / 1000; 86 | int numFramesPlayed = mAudioTrack.getPlaybackHeadPosition(); 87 | if (systemClockUs - mLastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) { 88 | mAudioTimestampSet = mAudioTrack.getTimestamp(mAudioTimestamp); 89 | if (getLatencyMethod != null) { 90 | try { 91 | mLatencyUs = (Integer) getLatencyMethod.invoke(mAudioTrack, (Object[]) null) * 1000L / 2; 92 | mLatencyUs = Math.max(mLatencyUs, 0); 93 | } catch (Exception e) { 94 | getLatencyMethod = null; 95 | } 96 | } 97 | mLastTimestampSampleTimeUs = systemClockUs; 98 | } 99 | 100 | if (mAudioTimestampSet) { 101 | // Calculate the speed-adjusted position using the timestamp (which may be in the future). 102 | long elapsedSinceTimestampUs = System.nanoTime() / 1000 - (mAudioTimestamp.nanoTime / 1000); 103 | long elapsedSinceTimestampFrames = elapsedSinceTimestampUs * mSampleRate / 1000000L; 104 | long elapsedFrames = mAudioTimestamp.framePosition + elapsedSinceTimestampFrames; 105 | long durationUs = (elapsedFrames * 1000000L) / mSampleRate; 106 | return durationUs; 107 | } else { 108 | long durationUs = (numFramesPlayed * 1000000L) / mSampleRate - mLatencyUs; 109 | return durationUs; 110 | } 111 | } 112 | 113 | public int getNumBytesQueued() { 114 | return mNumBytesQueued; 115 | } 116 | 117 | public void play() { 118 | mStopped = false; 119 | mAudioTrack.play(); 120 | } 121 | 122 | public void stop() { 123 | if (mQueue.isEmpty()) { 124 | mAudioTrack.stop(); 125 | mNumBytesQueued = 0; 126 | } else { 127 | mStopped = true; 128 | } 129 | } 130 | 131 | public void pause() { 132 | mAudioTrack.pause(); 133 | } 134 | 135 | public void flush() { 136 | if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) { 137 | return; 138 | } 139 | mAudioTrack.flush(); 140 | mQueue.clear(); 141 | mNumBytesQueued = 0; 142 | mStopped = false; 143 | } 144 | 145 | public void release() { 146 | mQueue.clear(); 147 | mNumBytesQueued = 0; 148 | mLatencyUs = 0; 149 | mLastTimestampSampleTimeUs = 0; 150 | mAudioTrack.release(); 151 | mAudioTrack = null; 152 | mStopped = false; 153 | mAudioTimestampSet = false; 154 | } 155 | 156 | public void process() { 157 | while (!mQueue.isEmpty()) { 158 | QueueElement element = mQueue.peekFirst(); 159 | int written = mAudioTrack.write(element.data, element.size, 160 | AudioTrack.WRITE_NON_BLOCKING, element.pts); 161 | if (written < 0) { 162 | throw new RuntimeException("Audiotrack.write() failed."); 163 | } 164 | 165 | mNumBytesQueued -= written; 166 | element.size -= written; 167 | if (element.size != 0) { 168 | break; 169 | } 170 | mQueue.removeFirst(); 171 | } 172 | if (mStopped) { 173 | mAudioTrack.stop(); 174 | mNumBytesQueued = 0; 175 | mStopped = false; 176 | } 177 | } 178 | 179 | public int getPlayState() { 180 | return mAudioTrack.getPlayState(); 181 | } 182 | 183 | public void write(ByteBuffer data, int size, long pts) { 184 | QueueElement element = new QueueElement(); 185 | element.data = data; 186 | element.size = size; 187 | element.pts = pts; 188 | 189 | // accumulate size written to queue 190 | mNumBytesQueued += size; 191 | mQueue.add(element); 192 | } 193 | } 194 | 195 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/zhanghui/avplayer/PlayerActivity.java: -------------------------------------------------------------------------------- 1 | package com.example.zhanghui.avplayer; 2 | 3 | import android.app.Activity; 4 | import android.content.Intent; 5 | import android.view.KeyEvent; 6 | import android.widget.MediaController; 7 | import android.net.Uri; 8 | import android.os.AsyncTask; 9 | import android.os.Bundle; 10 | import android.util.Log; 11 | import android.view.SurfaceHolder; 12 | import android.view.SurfaceView; 13 | import android.view.View; 14 | import android.view.Window; 15 | import android.view.WindowManager; 16 | 17 | import java.io.IOException; 18 | import java.util.concurrent.TimeUnit; 19 | 20 | public class PlayerActivity extends Activity implements SurfaceHolder.Callback{ 21 | 22 | private static final String TAG = "PlayerActivity"; 23 | private MediaCodecPlayer mMediaCodecPlayer; 24 | private SurfaceView mSurfaceV; 25 | private SurfaceHolder mSurfaceHolder; 26 | private MediaController mediaController; 27 | private Uri mFileUrl; 28 | private static final int SLEEP_TIME_MS = 1000; 29 | private static final long PLAY_TIME_MS = TimeUnit.MILLISECONDS.convert(4, TimeUnit.MINUTES); 30 | 31 | @Override 32 | protected void onCreate(Bundle savedInstanceState) { 33 | super.onCreate(savedInstanceState); 34 | requestWindowFeature(Window.FEATURE_NO_TITLE); 35 | getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, 36 | WindowManager.LayoutParams.FLAG_FULLSCREEN); 37 | setContentView(R.layout.activity_player); 38 | mSurfaceV = (SurfaceView) findViewById(R.id.surfaceView); 39 | mSurfaceV.getHolder().addCallback(this); 40 | View root = findViewById(R.id.root); 41 | mediaController = new MediaController(this); 42 | mediaController.setAnchorView(root); 43 | root.setOnKeyListener(new View.OnKeyListener() { 44 | @Override 45 | public boolean onKey(View v, int keyCode, KeyEvent event) { 46 | return mediaController.dispatchKeyEvent(event); 47 | } 48 | }); 49 | Intent intent = getIntent(); 50 | mFileUrl = intent.getData(); 51 | Intent i = new Intent("com.android.music.musicservicecommand"); 52 | i.putExtra("command", "pause"); 53 | this.sendBroadcast(i); 54 | } 55 | 56 | @Override 57 | public void onStart() { 58 | super.onStart(); 59 | } 60 | 61 | @Override 62 | public void onStop() { 63 | super.onStop(); 64 | if (mMediaCodecPlayer != null) { 65 | mMediaCodecPlayer.reset(); 66 | } 67 | } 68 | 69 | @Override 70 | public void onDestroy() { 71 | super.onDestroy(); 72 | if (mMediaCodecPlayer != null) { 73 | mMediaCodecPlayer.reset(); 74 | } 75 | } 76 | 77 | @Override 78 | public void surfaceCreated(SurfaceHolder holder) { 79 | this.mSurfaceHolder = holder; 80 | mSurfaceHolder.setKeepScreenOn(true); 81 | new DecodeTask().execute(); 82 | } 83 | 84 | @Override 85 | public void surfaceChanged(SurfaceHolder holder, int format, int width, 86 | int height) { 87 | 88 | } 89 | 90 | @Override 91 | public void surfaceDestroyed(SurfaceHolder holder) { 92 | if (mMediaCodecPlayer != null) { 93 | mMediaCodecPlayer.reset(); 94 | } 95 | } 96 | 97 | public class DecodeTask extends AsyncTask { 98 | 99 | @Override 100 | protected Boolean doInBackground(Void... params) { 101 | //this runs on a new thread 102 | initializePlayer(); 103 | return true; 104 | } 105 | 106 | @Override 107 | protected void onPostExecute(Boolean result) { 108 | //this runs on ui thread 109 | } 110 | } 111 | 112 | private void initializePlayer() { 113 | mMediaCodecPlayer = new MediaCodecPlayer(mSurfaceHolder, getApplicationContext()); 114 | 115 | mMediaCodecPlayer.setAudioDataSource(mFileUrl, null); 116 | mMediaCodecPlayer.setVideoDataSource(mFileUrl, null); 117 | mMediaCodecPlayer.start(); //from IDLE to PREPARING 118 | try { 119 | mMediaCodecPlayer.prepare(); 120 | } catch (IOException e) { 121 | e.printStackTrace(); 122 | } 123 | 124 | // starts video playback 125 | mMediaCodecPlayer.startThread(); 126 | 127 | long timeOut = System.currentTimeMillis() + 4*PLAY_TIME_MS; 128 | while (timeOut > System.currentTimeMillis() && !mMediaCodecPlayer.isEnded()) { 129 | try { 130 | Thread.sleep(SLEEP_TIME_MS); 131 | } catch (InterruptedException e) { 132 | e.printStackTrace(); 133 | } 134 | if (mMediaCodecPlayer.getCurrentPosition() >= mMediaCodecPlayer.getDuration() ) { 135 | Log.d(TAG, "testVideoPlayback -- current pos = " + 136 | mMediaCodecPlayer.getCurrentPosition() + 137 | ">= duration = " + mMediaCodecPlayer.getDuration()); 138 | break; 139 | } 140 | } 141 | 142 | if (timeOut > System.currentTimeMillis()) { 143 | Log.e(TAG, "video playback timeout exceeded!"); 144 | return; 145 | } 146 | 147 | Log.d(TAG, "playVideo player.reset()"); 148 | mMediaCodecPlayer.reset(); 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /app/src/main/java/com/example/zhanghui/avplayer/VideoFrameReleaseTimeHelper.java: -------------------------------------------------------------------------------- 1 | package com.example.zhanghui.avplayer; 2 | 3 | import android.annotation.TargetApi; 4 | import android.content.Context; 5 | import android.os.Handler; 6 | import android.os.HandlerThread; 7 | import android.os.Message; 8 | import android.util.Log; 9 | import android.view.Choreographer; 10 | import android.view.Choreographer.FrameCallback; 11 | import android.view.WindowManager; 12 | 13 | /** 14 | * Makes a best effort to adjust frame release timestamps for a smoother visual result. 15 | */ 16 | @TargetApi(16) 17 | public final class VideoFrameReleaseTimeHelper { 18 | 19 | private static final double DISPLAY_REFRESH_RATE_UNKNOWN = -1; 20 | private static final long CHOREOGRAPHER_SAMPLE_DELAY_MILLIS = 500; 21 | private static final long MAX_ALLOWED_DRIFT_NS = 20000000; 22 | 23 | private static final long VSYNC_OFFSET_PERCENTAGE = 80; 24 | private static final int MIN_FRAMES_FOR_ADJUSTMENT = 6; 25 | 26 | private final VSyncSampler vsyncSampler; 27 | private final boolean useDefaultDisplayVsync; 28 | private final long vsyncDurationNs; 29 | private final long vsyncOffsetNs; 30 | 31 | private long lastFramePresentationTimeUs; 32 | private long adjustedLastFrameTimeNs; 33 | private long pendingAdjustedFrameTimeNs; 34 | 35 | private boolean haveSync; 36 | private long syncUnadjustedReleaseTimeNs; 37 | private long syncFramePresentationTimeNs; 38 | private long frameCount; 39 | 40 | /** 41 | * Constructs an instance that smooths frame release timestamps but does not align them with 42 | * the default display's vsync signal. 43 | */ 44 | public VideoFrameReleaseTimeHelper() { 45 | this(DISPLAY_REFRESH_RATE_UNKNOWN); 46 | } 47 | 48 | /** 49 | * Constructs an instance that smooths frame release timestamps and aligns them with the default 50 | * display's vsync signal. 51 | * 52 | * @param context A context from which information about the default display can be retrieved. 53 | */ 54 | public VideoFrameReleaseTimeHelper(Context context) { 55 | this(getDefaultDisplayRefreshRate(context)); 56 | } 57 | 58 | private VideoFrameReleaseTimeHelper(double defaultDisplayRefreshRate) { 59 | useDefaultDisplayVsync = defaultDisplayRefreshRate != DISPLAY_REFRESH_RATE_UNKNOWN; 60 | if (useDefaultDisplayVsync) { 61 | vsyncSampler = VSyncSampler.getInstance(); 62 | vsyncDurationNs = (long) (1000000000L / defaultDisplayRefreshRate); 63 | vsyncOffsetNs = (vsyncDurationNs * VSYNC_OFFSET_PERCENTAGE) / 100; 64 | } else { 65 | vsyncSampler = null; 66 | vsyncDurationNs = -1; // Value unused. 67 | vsyncOffsetNs = -1; // Value unused. 68 | } 69 | } 70 | 71 | /** 72 | * Enables the helper. 73 | */ 74 | public void enable() { 75 | haveSync = false; 76 | if (useDefaultDisplayVsync) { 77 | vsyncSampler.addObserver(); 78 | } 79 | } 80 | 81 | /** 82 | * Disables the helper. 83 | */ 84 | public void disable() { 85 | if (useDefaultDisplayVsync) { 86 | vsyncSampler.removeObserver(); 87 | } 88 | } 89 | 90 | public long getVsyncDurationNs(){ 91 | return vsyncDurationNs; 92 | } 93 | /** 94 | * Adjusts a frame release timestamp. 95 | * 96 | * @param framePresentationTimeUs The frame's presentation time, in microseconds. 97 | * @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in 98 | * the same time base as {@link System#nanoTime()}. 99 | * @return The adjusted frame release timestamp, in nanoseconds and in the same time base as 100 | * {@link System#nanoTime()}. 101 | */ 102 | public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs) { 103 | long framePresentationTimeNs = framePresentationTimeUs * 1000; 104 | 105 | // Until we know better, the adjustment will be a no-op. 106 | long adjustedFrameTimeNs = framePresentationTimeNs; 107 | long adjustedReleaseTimeNs = unadjustedReleaseTimeNs; 108 | 109 | if (haveSync) { 110 | // See if we've advanced to the next frame. 111 | if (framePresentationTimeUs != lastFramePresentationTimeUs) { 112 | frameCount++; 113 | adjustedLastFrameTimeNs = pendingAdjustedFrameTimeNs; 114 | } 115 | if (frameCount >= MIN_FRAMES_FOR_ADJUSTMENT) { 116 | // We're synced and have waited the required number of frames to apply an adjustment. 117 | // Calculate the average frame time across all the frames we've seen since the last sync. 118 | // This will typically give us a frame rate at a finer granularity than the frame times 119 | // themselves (which often only have millisecond granularity). 120 | long averageFrameDurationNs = (framePresentationTimeNs - syncFramePresentationTimeNs) 121 | / frameCount; 122 | // Project the adjusted frame time forward using the average. 123 | long candidateAdjustedFrameTimeNs = adjustedLastFrameTimeNs + averageFrameDurationNs; 124 | 125 | if (isDriftTooLarge(candidateAdjustedFrameTimeNs, unadjustedReleaseTimeNs)) { 126 | haveSync = false; 127 | } else { 128 | adjustedFrameTimeNs = candidateAdjustedFrameTimeNs; 129 | adjustedReleaseTimeNs = syncUnadjustedReleaseTimeNs + adjustedFrameTimeNs 130 | - syncFramePresentationTimeNs; 131 | } 132 | } else { 133 | // We're synced but haven't waited the required number of frames to apply an adjustment. 134 | // Check drift anyway. 135 | if (isDriftTooLarge(framePresentationTimeNs, unadjustedReleaseTimeNs)) { 136 | haveSync = false; 137 | } 138 | } 139 | } 140 | 141 | // If we need to sync, do so now. 142 | if (!haveSync) { 143 | syncFramePresentationTimeNs = framePresentationTimeNs; 144 | syncUnadjustedReleaseTimeNs = unadjustedReleaseTimeNs; 145 | frameCount = 0; 146 | haveSync = true; 147 | onSynced(); 148 | } 149 | 150 | lastFramePresentationTimeUs = framePresentationTimeUs; 151 | pendingAdjustedFrameTimeNs = adjustedFrameTimeNs; 152 | 153 | if (vsyncSampler == null || vsyncSampler.sampledVsyncTimeNs == 0) { 154 | //if we forgot to enable VideoFrameReleaseTimeHelper, then it will go into here 155 | return adjustedReleaseTimeNs; 156 | } 157 | 158 | // Find the timestamp of the closest vsync. This is the vsync that we're targeting. 159 | long snappedTimeNs = closestVsync(adjustedReleaseTimeNs, 160 | vsyncSampler.sampledVsyncTimeNs, vsyncDurationNs); 161 | // Apply an offset so that we release before the target vsync, but after the previous one. 162 | return snappedTimeNs;// - vsyncOffsetNs; 163 | } 164 | 165 | protected void onSynced() { 166 | // Do nothing. 167 | } 168 | 169 | private boolean isDriftTooLarge(long frameTimeNs, long releaseTimeNs) { 170 | long elapsedFrameTimeNs = frameTimeNs - syncFramePresentationTimeNs; 171 | long elapsedReleaseTimeNs = releaseTimeNs - syncUnadjustedReleaseTimeNs; 172 | return Math.abs(elapsedReleaseTimeNs - elapsedFrameTimeNs) > MAX_ALLOWED_DRIFT_NS; 173 | } 174 | 175 | private static long closestVsync(long releaseTime, long sampledVsyncTime, long vsyncDuration) { 176 | long vsyncCount = (releaseTime - sampledVsyncTime) / vsyncDuration; 177 | long snappedTimeNs = sampledVsyncTime + (vsyncDuration * vsyncCount); 178 | long snappedBeforeNs; 179 | long snappedAfterNs; 180 | if (releaseTime <= snappedTimeNs) { 181 | snappedBeforeNs = snappedTimeNs - vsyncDuration; 182 | snappedAfterNs = snappedTimeNs; 183 | } else { 184 | snappedBeforeNs = snappedTimeNs; 185 | snappedAfterNs = snappedTimeNs + vsyncDuration; 186 | } 187 | long snappedAfterDiff = snappedAfterNs - releaseTime; 188 | long snappedBeforeDiff = releaseTime - snappedBeforeNs; 189 | return snappedAfterDiff < snappedBeforeDiff ? snappedAfterNs : snappedBeforeNs; 190 | } 191 | 192 | private static double getDefaultDisplayRefreshRate(Context context) { 193 | WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); 194 | return manager.getDefaultDisplay() != null ? manager.getDefaultDisplay().getRefreshRate() 195 | : DISPLAY_REFRESH_RATE_UNKNOWN; 196 | } 197 | 198 | /** 199 | * Samples display vsync timestamps. A single instance using a single {@link Choreographer} is 200 | * shared by all {@link VideoFrameReleaseTimeHelper} instances. This is done to avoid a resource 201 | * leak in the platform on API levels prior to 23. See [Internal: b/12455729]. 202 | */ 203 | private static final class VSyncSampler implements FrameCallback, Handler.Callback { 204 | 205 | public volatile long sampledVsyncTimeNs; 206 | 207 | private static final int CREATE_CHOREOGRAPHER = 0; 208 | private static final int MSG_ADD_OBSERVER = 1; 209 | private static final int MSG_REMOVE_OBSERVER = 2; 210 | 211 | private static final VSyncSampler INSTANCE = new VSyncSampler(); 212 | 213 | private final Handler handler; 214 | private final HandlerThread choreographerOwnerThread; 215 | private Choreographer choreographer; 216 | private int observerCount; 217 | 218 | public static VSyncSampler getInstance() { 219 | return INSTANCE; 220 | } 221 | 222 | private VSyncSampler() { 223 | choreographerOwnerThread = new HandlerThread("ChoreographerOwner:Handler"); 224 | choreographerOwnerThread.start(); 225 | handler = new Handler(choreographerOwnerThread.getLooper(), this); 226 | handler.sendEmptyMessage(CREATE_CHOREOGRAPHER); 227 | } 228 | 229 | /** 230 | * Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is observing 231 | * {@link #sampledVsyncTimeNs}, and hence that the value should be periodically updated. 232 | */ 233 | public void addObserver() { 234 | handler.sendEmptyMessage(MSG_ADD_OBSERVER); 235 | } 236 | 237 | /** 238 | * Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is no longer observing 239 | * {@link #sampledVsyncTimeNs}. 240 | */ 241 | public void removeObserver() { 242 | handler.sendEmptyMessage(MSG_REMOVE_OBSERVER); 243 | } 244 | 245 | @Override 246 | public void doFrame(long vsyncTimeNs) { 247 | sampledVsyncTimeNs = vsyncTimeNs; 248 | choreographer.postFrameCallbackDelayed(this, CHOREOGRAPHER_SAMPLE_DELAY_MILLIS); 249 | } 250 | 251 | @Override 252 | public boolean handleMessage(Message message) { 253 | switch (message.what) { 254 | case CREATE_CHOREOGRAPHER: { 255 | createChoreographerInstanceInternal(); 256 | return true; 257 | } 258 | case MSG_ADD_OBSERVER: { 259 | addObserverInternal(); 260 | return true; 261 | } 262 | case MSG_REMOVE_OBSERVER: { 263 | removeObserverInternal(); 264 | return true; 265 | } 266 | default: { 267 | return false; 268 | } 269 | } 270 | } 271 | 272 | private void createChoreographerInstanceInternal() { 273 | choreographer = Choreographer.getInstance(); 274 | } 275 | 276 | private void addObserverInternal() { 277 | observerCount++; 278 | if (observerCount == 1) { 279 | choreographer.postFrameCallback(this); 280 | } 281 | } 282 | 283 | private void removeObserverInternal() { 284 | observerCount--; 285 | if (observerCount == 0) { 286 | choreographer.removeFrameCallback(this); 287 | sampledVsyncTimeNs = 0; 288 | } 289 | } 290 | 291 | } 292 | 293 | } 294 | -------------------------------------------------------------------------------- /app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 11 | 12 | 17 | 18 | 25 | 26 | 32 | 33 |