├── .gitignore
├── HWEncoderExperiments
├── .gitignore
├── build.gradle
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── ic_launcher-web.png
│ ├── java
│ └── net
│ │ └── openwatch
│ │ └── hwencoderexperiments
│ │ ├── ChunkedHWRecorder.java
│ │ ├── FileUtils.java
│ │ ├── HWRecorderActivity.java
│ │ └── MediaRecorderWrapper.java
│ └── res
│ ├── drawable-hdpi
│ └── ic_launcher.png
│ ├── drawable-mdpi
│ └── ic_launcher.png
│ ├── drawable-xhdpi
│ └── ic_launcher.png
│ ├── drawable-xxhdpi
│ └── ic_launcher.png
│ ├── layout
│ └── activity_hwrecorder.xml
│ ├── menu
│ └── main.xml
│ ├── values-sw600dp
│ └── dimens.xml
│ ├── values-sw720dp-land
│ └── dimens.xml
│ ├── values-v11
│ └── styles.xml
│ └── values
│ ├── dimens.xml
│ ├── strings.xml
│ └── styles.xml
├── README.md
├── build.gradle
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | local.properties
2 | *.iml
3 | gen
4 | .gradle/
5 | lint.xml
6 | .DS_STORE
7 | .idea/
8 | *.class
9 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/build.gradle:
--------------------------------------------------------------------------------
1 | buildscript {
2 | repositories {
3 | mavenCentral()
4 | }
5 | dependencies {
6 | classpath 'com.android.tools.build:gradle:0.5.+'
7 | }
8 | }
9 | apply plugin: 'android'
10 |
11 | repositories {
12 | mavenCentral()
13 | }
14 |
15 | android {
16 | compileSdkVersion 18
17 | buildToolsVersion "18.0.1"
18 |
19 | defaultConfig {
20 | minSdkVersion 18
21 | targetSdkVersion 18
22 | }
23 | }
24 |
25 | dependencies {
26 | compile 'com.android.support:appcompat-v7:18.0.0'
27 | }
28 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
22 |
23 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/ic_launcher-web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OnlyInAmerica/HWEncoderExperiments/4506ca1a1b96ca6890eebddd12f1b6be2fbc6200/HWEncoderExperiments/src/main/ic_launcher-web.png
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/java/net/openwatch/hwencoderexperiments/ChunkedHWRecorder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2013 The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | // Enormous thanks to Andrew McFadden for his MediaCodec examples!
18 | // Adapted from http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
19 |
20 | package net.openwatch.hwencoderexperiments;
21 |
22 | import android.content.Context;
23 | import android.graphics.SurfaceTexture;
24 | import android.hardware.Camera;
25 | import android.media.*;
26 | import android.opengl.*;
27 | import android.os.Trace;
28 | import android.util.Log;
29 | import android.view.Surface;
30 |
31 | import java.io.File;
32 | import java.io.IOException;
33 | import java.nio.ByteBuffer;
34 | import java.nio.ByteOrder;
35 | import java.nio.FloatBuffer;
36 | import java.util.List;
37 |
38 | /**
39 | * Record video from the camera preview and encode it as an MP4 file. Demonstrates the use
40 | * of MediaMuxer and MediaCodec with Camera input. Does not record audio.
41 | *
42 | * Generally speaking, it's better to use MediaRecorder for this sort of thing. This example
43 | * demonstrates one possible advantage: editing of video as it's being encoded. A GLES 2.0
44 | * fragment shader is used to perform a silly color tweak every 15 frames.
45 | *
46 | * This uses various features first available in Android "Jellybean" 4.3 (API 18). There is
47 | * no equivalent functionality in previous releases. (You can send the Camera preview to a
48 | * byte buffer with a fully-specified format, but MediaCodec encoders want different input
49 | * formats on different devices, and this use case wasn't well exercised in CTS pre-4.3.)
50 | *
51 | * (This was derived from bits and pieces of CTS tests, and is packaged as such, but is not
52 | * currently part of CTS.)
53 | */
54 | public class ChunkedHWRecorder {
55 | private static final String TAG = "CameraToMpegTest";
56 | private static final boolean VERBOSE = false; // lots of logging
57 | private static final boolean TRACE = true; // systrace
58 | // where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
59 | private static String OUTPUT_DIR = "/sdcard/HWEncodingExperiments/";
60 | // parameters for the encoder
61 | private static final String VIDEO_MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
62 | private static final String AUDIO_MIME_TYPE = "audio/mp4a-latm"; // H.264 Advanced Video Coding
63 | private static final int OUTPUT_FORMAT = MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
64 | private static final int VIDEO_WIDTH = 640;
65 | private static final int VIDEO_HEIGHT = 480;
66 | private static final int FRAME_RATE = 30; // 30fps
67 | private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
68 | private static final long CHUNK_DURATION_SEC = 5; // Duration of video chunks
69 |
70 | // Display Surface
71 | private GLSurfaceView displaySurface;
72 | // encoder / muxer state
73 | private MediaCodec mVideoEncoder;
74 | private MediaCodec mAudioEncoder;
75 | private CodecInputSurface mInputSurface;
76 | private MediaMuxerWrapper mMuxerWrapper;
77 | private MediaMuxerWrapper mMuxerWrapper2;
78 | private TrackInfo mVideoTrackInfo;
79 | private TrackInfo mAudioTrackInfo;
80 | // camera state
81 | private Camera mCamera;
82 | private SurfaceTextureManager mStManager;
83 | // allocate one of these up front so we don't need to do it every time
84 | private MediaCodec.BufferInfo mVideoBufferInfo;
85 | private MediaCodec.BufferInfo mAudioBufferInfo;
86 | // The following formats are fed to MediaCodec.configure
87 | private MediaFormat mVideoFormat;
88 | private MediaFormat mAudioFormat;
89 | // The following are returned when encoder OUTPUT_FORMAT_CHANGED signaled
90 | private MediaFormat mVideoOutputFormat;
91 | private MediaFormat mAudioOutputFormat;
92 |
93 | // recording state
94 | private int leadingChunk = 1;
95 | long startWhen;
96 | int frameCount = 0;
97 | boolean eosSentToAudioEncoder = false;
98 | boolean audioEosRequested = false;
99 | boolean eosSentToVideoEncoder = false;
100 | boolean fullStopReceived = false;
101 | boolean fullStopPerformed = false;
102 |
103 | // debug state
104 | int totalFrameCount = 0;
105 | long startTime;
106 |
107 |
108 | // Audio
109 | public static final int SAMPLE_RATE = 44100;
110 | public static final int SAMPLES_PER_FRAME = 1024; // AAC
111 | public static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
112 | public static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
113 | private AudioRecord audioRecord;
114 | private long lastEncodedAudioTimeStamp = 0;
115 |
116 | // MediaRecorder
117 | boolean useMediaRecorder = false;
118 | MediaRecorderWrapper mMediaRecorderWrapper;
119 |
120 | Context c;
121 |
122 |
123 | class TrackInfo {
124 | int index = 0;
125 | MediaMuxerWrapper muxerWrapper;
126 | }
127 |
128 | class MediaMuxerWrapper {
129 | MediaMuxer muxer;
130 | final int TOTAL_NUM_TRACKS = 2;
131 | boolean started = false;
132 | int chunk;
133 | int numTracksAdded = 0;
134 | int numTracksFinished = 0;
135 |
136 | Object sync = new Object();
137 |
138 | public MediaMuxerWrapper(int format, int chunk){
139 | this.chunk = chunk;
140 | restart(format, chunk);
141 | }
142 |
143 | public int addTrack(MediaFormat format){
144 | numTracksAdded++;
145 | int trackIndex = muxer.addTrack(format);
146 | if(numTracksAdded == TOTAL_NUM_TRACKS){
147 | if (VERBOSE) Log.i(TAG, "All tracks added, starting " + ((this == mMuxerWrapper) ? "muxer1" : "muxer2") +"!");
148 | muxer.start();
149 | started = true;
150 | }
151 | return trackIndex;
152 | }
153 |
154 | public void finishTrack(){
155 | numTracksFinished++;
156 | if(numTracksFinished == TOTAL_NUM_TRACKS){
157 | if (VERBOSE) Log.i(TAG, "All tracks finished, stopping " + ((this == mMuxerWrapper) ? "muxer1" : "muxer2") + "!");
158 | stop();
159 | }
160 |
161 | }
162 |
163 | public boolean allTracksAdded(){
164 | return (numTracksAdded == TOTAL_NUM_TRACKS);
165 | }
166 |
167 | public boolean allTracksFinished(){
168 | return (numTracksFinished == TOTAL_NUM_TRACKS);
169 | }
170 |
171 |
172 | public void stop(){
173 | if(muxer != null){
174 | if(!allTracksFinished()) Log.e(TAG, "Stopping Muxer before all tracks added!");
175 | if(!started) Log.e(TAG, "Stopping Muxer before it was started");
176 | muxer.stop();
177 | muxer.release();
178 | muxer = null;
179 | started = false;
180 | chunk = 0;
181 | numTracksAdded = 0;
182 | numTracksFinished = 0;
183 | }
184 | }
185 |
186 | private String outputPathForChunk(int chunk){
187 | return OUTPUT_DIR + VIDEO_WIDTH + "x" + VIDEO_HEIGHT + "_" + chunk + ".mp4";
188 | }
189 |
190 | private void restart(int format, int chunk){
191 | stop();
192 | try {
193 | muxer = new MediaMuxer(outputPathForChunk(chunk), format);
194 | } catch (IOException e) {
195 | throw new RuntimeException("MediaMuxer creation failed", e);
196 | }
197 | }
198 | }
199 |
200 | public ChunkedHWRecorder(Context c){
201 | this.c = c;
202 | }
203 |
204 | public void setDisplaySurface(GLSurfaceView displaySurface){
205 | this.displaySurface = displaySurface;
206 | }
207 |
208 | public void setDisplayEGLContext(EGLContext context){
209 | mInputSurface.mEGLDisplayContext = context;
210 | }
211 |
212 | boolean firstFrameReady = false;
213 | boolean eosReceived = false;
214 | public void startRecording(String outputDir){
215 | if(outputDir != null)
216 | OUTPUT_DIR = outputDir;
217 |
218 | int encBitRate = 1000000; // bps
219 | int framesPerChunk = (int) CHUNK_DURATION_SEC * FRAME_RATE;
220 | Log.d(TAG, VIDEO_MIME_TYPE + " output " + VIDEO_WIDTH + "x" + VIDEO_HEIGHT + " @" + encBitRate);
221 |
222 | try {
223 | if (TRACE) Trace.beginSection("prepare");
224 | prepareCamera(VIDEO_WIDTH, VIDEO_HEIGHT, Camera.CameraInfo.CAMERA_FACING_BACK);
225 | prepareEncoder(VIDEO_WIDTH, VIDEO_HEIGHT, encBitRate);
226 | mInputSurface.makeEncodeContextCurrent();
227 | prepareSurfaceTexture();
228 | setupAudioRecord();
229 | if (TRACE) Trace.endSection();
230 |
231 |
232 | File outputHq = FileUtils.createTempFileInRootAppStorage(c, "hq.mp4");
233 | if (TRACE) Trace.beginSection("startMediaRecorder");
234 | if (useMediaRecorder) mMediaRecorderWrapper = new MediaRecorderWrapper(c, outputHq.getAbsolutePath(), mCamera);
235 | startAudioRecord();
236 | if (useMediaRecorder) mMediaRecorderWrapper.startRecording();
237 | if (TRACE) Trace.endSection();
238 | startWhen = System.nanoTime();
239 |
240 | mCamera.startPreview();
241 | SurfaceTexture st = mStManager.getSurfaceTexture();
242 | eosReceived = false;
243 |
244 | while (!(fullStopReceived && eosSentToVideoEncoder)) {
245 | // Feed any pending encoder output into the muxer.
246 | // Chunk encoding
247 | eosReceived = ((frameCount % framesPerChunk) == 0 && frameCount != 0);
248 | if (eosReceived) Log.i(TAG, "Chunkpoint on frame " + frameCount);
249 | audioEosRequested = eosReceived; // test
250 | synchronized (mVideoTrackInfo.muxerWrapper.sync){
251 | if (TRACE) Trace.beginSection("drainVideo");
252 | drainEncoder(mVideoEncoder, mVideoBufferInfo, mVideoTrackInfo, eosReceived || fullStopReceived);
253 | if (TRACE) Trace.endSection();
254 | }
255 | if (fullStopReceived){
256 | break;
257 | }
258 | frameCount++;
259 | totalFrameCount++;
260 |
261 | // Acquire a new frame of input, and render it to the Surface. If we had a
262 | // GLSurfaceView we could switch EGL contexts and call drawImage() a second
263 | // time to render it on screen. The texture can be shared between contexts by
264 | // passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context
265 | // argument.
266 | if (TRACE) Trace.beginSection("awaitImage");
267 | mStManager.awaitNewImage();
268 | if (TRACE) Trace.endSection();
269 | if (TRACE) Trace.beginSection("drawImage");
270 | mStManager.drawImage();
271 | if (TRACE) Trace.endSection();
272 |
273 |
274 | // Set the presentation time stamp from the SurfaceTexture's time stamp. This
275 | // will be used by MediaMuxer to set the PTS in the video.
276 | mInputSurface.setPresentationTime(st.getTimestamp() - startWhen);
277 |
278 | // Submit it to the encoder. The eglSwapBuffers call will block if the input
279 | // is full, which would be bad if it stayed full until we dequeued an output
280 | // buffer (which we can't do, since we're stuck here). So long as we fully drain
281 | // the encoder before supplying additional input, the system guarantees that we
282 | // can supply another frame without blocking.
283 | if (VERBOSE) Log.d(TAG, "sending frame to encoder");
284 | if (TRACE) Trace.beginSection("swapBuffers");
285 | mInputSurface.swapBuffers();
286 | if (TRACE) Trace.endSection();
287 | if (!firstFrameReady) startTime = System.nanoTime();
288 | firstFrameReady = true;
289 |
290 | /*
291 | if (TRACE) Trace.beginSection("sendAudio");
292 | sendAudioToEncoder(false);
293 | if (TRACE) Trace.endSection();
294 | */
295 | }
296 | Log.i(TAG, "Exiting video encode loop");
297 |
298 | } catch (Exception e){
299 | Log.e(TAG, "Encoding loop exception!");
300 | e.printStackTrace();
301 | } finally {
302 | }
303 | }
304 |
305 | public void stopRecording(){
306 | Log.i(TAG, "stopRecording");
307 | fullStopReceived = true;
308 | if (useMediaRecorder) mMediaRecorderWrapper.stopRecording();
309 | double recordingDurationSec = (System.nanoTime() - startTime) / 1000000000.0;
310 | Log.i(TAG, "Recorded " + recordingDurationSec + " s. Expected " + (FRAME_RATE * recordingDurationSec) + " frames. Got " + totalFrameCount + " for " + (totalFrameCount / recordingDurationSec) + " fps");
311 | }
312 |
313 | /**
314 | * Called internally to finalize HQ and last chunk
315 | */
316 | public void _stopRecording(){
317 | fullStopPerformed = true;
318 | mMediaRecorderWrapper.stopRecording();
319 | releaseCamera();
320 | releaseEncodersAndMuxer();
321 | releaseSurfaceTexture();
322 | if (mInputSurface != null) {
323 | mInputSurface.release();
324 | mInputSurface = null;
325 | }
326 | }
327 |
328 | private void setupAudioRecord(){
329 | int min_buffer_size = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);
330 | int buffer_size = SAMPLES_PER_FRAME * 10;
331 | if (buffer_size < min_buffer_size)
332 | buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
333 |
334 | audioRecord = new AudioRecord(
335 | MediaRecorder.AudioSource.MIC, // source
336 | SAMPLE_RATE, // sample rate, hz
337 | CHANNEL_CONFIG, // channels
338 | AUDIO_FORMAT, // audio format
339 | buffer_size); // buffer size (bytes)
340 | }
341 |
342 | private void startAudioRecord(){
343 | if(audioRecord != null){
344 |
345 | new Thread(new Runnable(){
346 |
347 | @Override
348 | public void run() {
349 | audioRecord.startRecording();
350 | boolean audioEosRequestedCopy = false;
351 | while(true){
352 |
353 | if(!firstFrameReady)
354 | continue;
355 | audioEosRequestedCopy = audioEosRequested; // make sure audioEosRequested doesn't change value mid loop
356 | if (audioEosRequestedCopy || fullStopReceived){ // TODO post eosReceived message with Handler?
357 | Log.i(TAG, "Audio loop caught audioEosRequested / fullStopReceived " + audioEosRequestedCopy + " " + fullStopReceived);
358 | if (TRACE) Trace.beginSection("sendAudio");
359 | sendAudioToEncoder(true);
360 | if (TRACE) Trace.endSection();
361 | }
362 | if (fullStopReceived){
363 | Log.i(TAG, "Stopping AudioRecord");
364 | audioRecord.stop();
365 | }
366 |
367 | synchronized (mAudioTrackInfo.muxerWrapper.sync){
368 | if (TRACE) Trace.beginSection("drainAudio");
369 | drainEncoder(mAudioEncoder, mAudioBufferInfo, mAudioTrackInfo, audioEosRequestedCopy || fullStopReceived);
370 | if (TRACE) Trace.endSection();
371 | }
372 |
373 | if (audioEosRequestedCopy) audioEosRequested = false;
374 |
375 | if (!fullStopReceived){
376 | if (TRACE) Trace.beginSection("sendAudio");
377 | sendAudioToEncoder(false);
378 | if (TRACE) Trace.endSection();
379 | }else{
380 | break;
381 | }
382 | } // end while
383 | }
384 | }).start();
385 |
386 | }
387 |
388 | }
389 |
390 | public void sendAudioToEncoder(boolean endOfStream) {
391 | // send current frame data to encoder
392 | try {
393 | ByteBuffer[] inputBuffers = mAudioEncoder.getInputBuffers();
394 | int inputBufferIndex = mAudioEncoder.dequeueInputBuffer(-1);
395 | if (inputBufferIndex >= 0) {
396 | ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
397 | inputBuffer.clear();
398 | long presentationTimeNs = System.nanoTime();
399 | int inputLength = audioRecord.read(inputBuffer, SAMPLES_PER_FRAME );
400 | presentationTimeNs -= (inputLength / SAMPLE_RATE ) / 1000000000;
401 | if(inputLength == AudioRecord.ERROR_INVALID_OPERATION)
402 | Log.e(TAG, "Audio read error");
403 |
404 | //long presentationTimeUs = (presentationTimeNs - startWhen) / 1000;
405 | long presentationTimeUs = (presentationTimeNs - startWhen) / 1000;
406 | if (VERBOSE) Log.i(TAG, "queueing " + inputLength + " audio bytes with pts " + presentationTimeUs);
407 | if (endOfStream) {
408 | Log.i(TAG, "EOS received in sendAudioToEncoder");
409 | mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, inputLength, presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
410 | eosSentToAudioEncoder = true;
411 | } else {
412 | mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, inputLength, presentationTimeUs, 0);
413 | }
414 | }
415 | } catch (Throwable t) {
416 | Log.e(TAG, "_offerAudioEncoder exception");
417 | t.printStackTrace();
418 | }
419 | }
420 |
421 |
422 | /**
423 | * Attempts to find a preview size that matches the provided width and height (which
424 | * specify the dimensions of the encoded video). If it fails to find a match it just
425 | * uses the default preview size.
426 | *
427 | * TODO: should do a best-fit match.
428 | */
429 | private static void choosePreviewSize(Camera.Parameters parms, int width, int height) {
430 | // We should make sure that the requested MPEG size is less than the preferred
431 | // size, and has the same aspect ratio.
432 | Camera.Size ppsfv = parms.getPreferredPreviewSizeForVideo();
433 | if (VERBOSE && ppsfv != null) {
434 | Log.d(TAG, "Camera preferred preview size for video is " +
435 | ppsfv.width + "x" + ppsfv.height);
436 | }
437 |
438 | for (Camera.Size size : parms.getSupportedPreviewSizes()) {
439 | if (size.width == width && size.height == height) {
440 | parms.setPreviewSize(width, height);
441 | return;
442 | }
443 | }
444 |
445 | Log.w(TAG, "Unable to set preview size to " + width + "x" + height);
446 | if (ppsfv != null) {
447 | parms.setPreviewSize(ppsfv.width, ppsfv.height);
448 | }
449 | }
450 |
451 | /**
452 | * Configures Camera for video capture. Sets mCamera.
453 | *
454 | * Opens a Camera and sets parameters. Does not start preview.
455 | */
456 | private void prepareCamera(int encWidth, int encHeight, int cameraType) {
457 | if (cameraType != Camera.CameraInfo.CAMERA_FACING_FRONT && cameraType != Camera.CameraInfo.CAMERA_FACING_BACK) {
458 | throw new RuntimeException("Invalid cameraType");
459 | }
460 |
461 | Camera.CameraInfo info = new Camera.CameraInfo();
462 |
463 | // Try to find a front-facing camera (e.g. for videoconferencing).
464 | int numCameras = Camera.getNumberOfCameras();
465 | for (int i = 0; i < numCameras; i++) {
466 | Camera.getCameraInfo(i, info);
467 | if (info.facing == cameraType) {
468 | mCamera = Camera.open(i);
469 | break;
470 | }
471 | }
472 | if (mCamera == null && cameraType == Camera.CameraInfo.CAMERA_FACING_FRONT) {
473 | Log.d(TAG, "No front-facing camera found; opening default");
474 | mCamera = Camera.open(); // opens first back-facing camera
475 | }
476 | if (mCamera == null) {
477 | throw new RuntimeException("Unable to open camera");
478 | }
479 |
480 | Camera.Parameters parms = mCamera.getParameters();
481 | List fpsRanges = parms.getSupportedPreviewFpsRange();
482 | int[] maxFpsRange = fpsRanges.get(fpsRanges.size() - 1);
483 | parms.setPreviewFpsRange(maxFpsRange[0], maxFpsRange[1]);
484 |
485 | choosePreviewSize(parms, encWidth, encHeight);
486 | // leave the frame rate set to default
487 | mCamera.setParameters(parms);
488 |
489 | Camera.Size size = parms.getPreviewSize();
490 | Log.d(TAG, "Camera preview size is " + size.width + "x" + size.height);
491 | }
492 |
493 | /**
494 | * Stops camera preview, and releases the camera to the system.
495 | */
496 | private void releaseCamera() {
497 | if (VERBOSE) Log.d(TAG, "releasing camera");
498 | if (mCamera != null) {
499 | mCamera.stopPreview();
500 | mCamera.release();
501 | mCamera = null;
502 | }
503 | }
504 |
505 | /**
506 | * Configures SurfaceTexture for camera preview. Initializes mStManager, and sets the
507 | * associated SurfaceTexture as the Camera's "preview texture".
508 | *
509 | * Configure the EGL surface that will be used for output before calling here.
510 | */
511 | private void prepareSurfaceTexture() {
512 | mStManager = new SurfaceTextureManager();
513 | SurfaceTexture st = mStManager.getSurfaceTexture();
514 | try {
515 | mCamera.setPreviewTexture(st);
516 | } catch (IOException ioe) {
517 | throw new RuntimeException("setPreviewTexture failed", ioe);
518 | }
519 | }
520 |
521 | /**
522 | * Releases the SurfaceTexture.
523 | */
524 | private void releaseSurfaceTexture() {
525 | if (mStManager != null) {
526 | mStManager.release();
527 | mStManager = null;
528 | }
529 | }
530 |
531 | /**
532 | * Configures encoder and muxer state, and prepares the input Surface. Initializes
533 | * mVideoEncoder, mMuxerWrapper, mInputSurface, mVideoBufferInfo, mVideoTrackInfo, and mMuxerStarted.
534 | */
535 | private void prepareEncoder(int width, int height, int bitRate) {
536 | eosSentToAudioEncoder = false;
537 | eosSentToVideoEncoder = false;
538 | fullStopReceived = false;
539 | mVideoBufferInfo = new MediaCodec.BufferInfo();
540 | mVideoTrackInfo = new TrackInfo();
541 |
542 | mVideoFormat = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, width, height);
543 |
544 | // Set some properties. Failing to specify some of these can cause the MediaCodec
545 | // configure() call to throw an unhelpful exception.
546 | mVideoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
547 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
548 | mVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
549 | mVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
550 | mVideoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
551 | if (VERBOSE) Log.d(TAG, "format: " + mVideoFormat);
552 |
553 | // Create a MediaCodec encoder, and configure it with our format. Get a Surface
554 | // we can use for input and wrap it with a class that handles the EGL work.
555 | //
556 | // If you want to have two EGL contexts -- one for display, one for recording --
557 | // you will likely want to defer instantiation of CodecInputSurface until after the
558 | // "display" EGL context is created, then modify the eglCreateContext call to
559 | // take eglGetCurrentContext() as the share_context argument.
560 | mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
561 | mVideoEncoder.configure(mVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
562 | mInputSurface = new CodecInputSurface(mVideoEncoder.createInputSurface());
563 | mVideoEncoder.start();
564 |
565 | mAudioBufferInfo = new MediaCodec.BufferInfo();
566 | mAudioTrackInfo = new TrackInfo();
567 |
568 | mAudioFormat = new MediaFormat();
569 | mAudioFormat.setString(MediaFormat.KEY_MIME, AUDIO_MIME_TYPE);
570 | mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
571 | mAudioFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
572 | mAudioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
573 | mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
574 | mAudioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384);
575 |
576 | mAudioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
577 | mAudioEncoder.configure(mAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
578 | mAudioEncoder.start();
579 |
580 | // Output filename. Ideally this would use Context.getFilesDir() rather than a
581 | // hard-coded output directory.
582 | String outputPath = OUTPUT_DIR + "chunktest." + width + "x" + height + String.valueOf(leadingChunk) + ".mp4";
583 | Log.i(TAG, "Output file is " + outputPath);
584 |
585 |
586 | // Create a MediaMuxer. We can't add the video track and start() the muxer here,
587 | // because our MediaFormat doesn't have the Magic Goodies. These can only be
588 | // obtained from the encoder after it has started processing data.
589 | //
590 | // We're not actually interested in multiplexing audio. We just want to convert
591 | // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
592 | //resetMediaMuxer(outputPath);
593 | mMuxerWrapper = new MediaMuxerWrapper(OUTPUT_FORMAT, leadingChunk);
594 | mMuxerWrapper2 = new MediaMuxerWrapper(OUTPUT_FORMAT, leadingChunk + 1); // prepared for next chunk
595 |
596 |
597 | mVideoTrackInfo.index = -1;
598 | mVideoTrackInfo.muxerWrapper = mMuxerWrapper;
599 | mAudioTrackInfo.index = -1;
600 | mAudioTrackInfo.muxerWrapper = mMuxerWrapper;
601 | }
602 |
603 | private void stopAndReleaseVideoEncoder(){
604 | eosSentToVideoEncoder = false;
605 | frameCount = 0;
606 | if (mVideoEncoder != null) {
607 | mVideoEncoder.stop();
608 | mVideoEncoder.release();
609 | mVideoEncoder = null;
610 | }
611 | }
612 |
613 |
614 | private void stopAndReleaseAudioEncoder(){
615 | lastEncodedAudioTimeStamp = 0;
616 | eosSentToAudioEncoder = false;
617 |
618 | if (mAudioEncoder != null) {
619 | mAudioEncoder.stop();
620 | mAudioEncoder.release();
621 | mAudioEncoder = null;
622 | }
623 | }
624 |
625 | private void stopAndReleaseEncoders(){
626 | stopAndReleaseVideoEncoder();
627 | stopAndReleaseAudioEncoder();
628 | }
629 |
630 | /**
631 | * This can be called within drainEncoder, when the end of stream is reached
632 | */
633 | private void chunkVideoEncoder(){
634 | stopAndReleaseVideoEncoder();
635 | // Start Encoder
636 | mVideoBufferInfo = new MediaCodec.BufferInfo();
637 | //mVideoTrackInfo = new TrackInfo();
638 | advanceVideoMediaMuxer();
639 | mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
640 | mVideoEncoder.configure(mVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
641 | mInputSurface.updateSurface(mVideoEncoder.createInputSurface());
642 | mVideoEncoder.start();
643 | mInputSurface.makeEncodeContextCurrent();
644 | }
645 |
646 | private void advanceVideoMediaMuxer(){
647 | MediaMuxerWrapper videoMuxer = (mVideoTrackInfo.muxerWrapper == mMuxerWrapper) ? mMuxerWrapper : mMuxerWrapper2;
648 | MediaMuxerWrapper audioMuxer = (mAudioTrackInfo.muxerWrapper == mMuxerWrapper) ? mMuxerWrapper : mMuxerWrapper2;
649 | Log.i("advanceVideo", "video on " + ((mVideoTrackInfo.muxerWrapper == mMuxerWrapper) ? "muxer1" : "muxer2"));
650 | if(videoMuxer == audioMuxer){
651 | // if both encoders are on same muxer, switch to other muxer
652 | leadingChunk++;
653 | if(videoMuxer == mMuxerWrapper){
654 | Log.i("advanceVideo", "encoders on same muxer. swapping.");
655 | mVideoTrackInfo.muxerWrapper = mMuxerWrapper2;
656 | // testing: can we start next muxer immediately given MediaCodec.getOutputFormat() values?
657 |
658 | }else if(videoMuxer == mMuxerWrapper2){
659 | Log.i("advanceVideo", "encoders on same muxer. swapping.");
660 | mVideoTrackInfo.muxerWrapper = mMuxerWrapper;
661 | // testing: can we start next muxer immediately given MediaCodec.getOutputFormat() values?
662 | }
663 | if(mVideoOutputFormat != null && mAudioOutputFormat != null){
664 | mVideoTrackInfo.muxerWrapper.addTrack(mVideoOutputFormat);
665 | mVideoTrackInfo.muxerWrapper.addTrack(mAudioOutputFormat);
666 | }else{
667 | Log.e(TAG, "mVideoOutputFormat or mAudioOutputFormat is null!");
668 | }
669 | }else{
670 | // if encoders are separate, finalize this muxer, and switch to others
671 | Log.i("advanceVideo", "encoders on diff muxers. restarting");
672 | mVideoTrackInfo.muxerWrapper.restart(OUTPUT_FORMAT, leadingChunk + 1); // prepare muxer for next chunk, but don't alter leadingChunk
673 | mVideoTrackInfo.muxerWrapper = mAudioTrackInfo.muxerWrapper;
674 | }
675 | }
676 |
677 | /**
678 | * This can be called within drainEncoder, when the end of stream is reached
679 | */
680 | private void chunkAudioEncoder(){
681 | stopAndReleaseAudioEncoder();
682 |
683 | // Start Encoder
684 | mAudioBufferInfo = new MediaCodec.BufferInfo();
685 | //mVideoTrackInfo = new TrackInfo();
686 | advanceAudioMediaMuxer();
687 | mAudioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
688 | mAudioEncoder.configure(mAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
689 | mAudioEncoder.start();
690 | }
691 |
692 | private void advanceAudioMediaMuxer(){
693 | MediaMuxerWrapper videoMuxer = (mVideoTrackInfo.muxerWrapper == mMuxerWrapper) ? mMuxerWrapper : mMuxerWrapper2;
694 | MediaMuxerWrapper audioMuxer = (mAudioTrackInfo.muxerWrapper == mMuxerWrapper) ? mMuxerWrapper : mMuxerWrapper2;
695 | Log.i("advanceAudio", "audio on " + ((mAudioTrackInfo.muxerWrapper == mMuxerWrapper) ? "muxer1" : "muxer2"));
696 | if(videoMuxer == audioMuxer){
697 | // If both encoders are on same muxer, switch to other muxer
698 | Log.i("advanceAudio", "encoders on same muxer. swapping.");
699 | leadingChunk++;
700 | if(videoMuxer == mMuxerWrapper){
701 | mAudioTrackInfo.muxerWrapper = mMuxerWrapper2;
702 | }else if(videoMuxer == mMuxerWrapper2){
703 | mAudioTrackInfo.muxerWrapper = mMuxerWrapper;
704 | }
705 | if(mVideoOutputFormat != null && mAudioOutputFormat != null){
706 | mAudioTrackInfo.muxerWrapper.addTrack(mVideoOutputFormat);
707 | mAudioTrackInfo.muxerWrapper.addTrack(mAudioOutputFormat);
708 | }else{
709 | Log.e(TAG, "mVideoOutputFormat or mAudioOutputFormat is null!");
710 | }
711 | }else{
712 | // if encoders are separate, finalize this muxer, and switch to others
713 | Log.i("advanceAudio", "encoders on diff muxers. restarting");
714 | mAudioTrackInfo.muxerWrapper.restart(OUTPUT_FORMAT, leadingChunk + 1); // prepare muxer for next chunk, but don't alter leadingChunk
715 | mAudioTrackInfo.muxerWrapper = mVideoTrackInfo.muxerWrapper;
716 | }
717 | }
718 |
719 | /**
720 | * Releases encoder resources.
721 | */
722 | private void releaseEncodersAndMuxer() {
723 | if (VERBOSE) Log.d(TAG, "releasing encoder objects");
724 | stopAndReleaseEncoders();
725 | if (mMuxerWrapper != null) {
726 | synchronized (mMuxerWrapper.sync){
727 | mMuxerWrapper.stop();
728 | mMuxerWrapper = null;
729 | }
730 | }
731 | if (mMuxerWrapper2 != null) {
732 | synchronized (mMuxerWrapper2.sync){
733 | mMuxerWrapper2.stop();
734 | mMuxerWrapper2 = null;
735 | }
736 | }
737 | }
738 |
739 | /**
740 | * Extracts all pending data from the encoder and forwards it to the muxer.
741 | *
742 | * If endOfStream is not set, this returns when there is no more data to drain. If it
743 | * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
744 | * Calling this with endOfStream set should be done once, right before stopping the muxer.
745 | *
746 | * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
747 | * not recording audio.
748 | */
749 | private void drainEncoder(MediaCodec encoder, MediaCodec.BufferInfo bufferInfo, TrackInfo trackInfo, boolean endOfStream) {
750 | final int TIMEOUT_USEC = 100;
751 |
752 | //TODO: Get Muxer from trackInfo
753 | MediaMuxerWrapper muxerWrapper = trackInfo.muxerWrapper;
754 |
755 | if (VERBOSE) Log.d(TAG, "drain" + ((encoder == mVideoEncoder) ? "Video" : "Audio") + "Encoder(" + endOfStream + ")");
756 | if (endOfStream && encoder == mVideoEncoder) {
757 | if (VERBOSE) Log.d(TAG, "sending EOS to " + ((encoder == mVideoEncoder) ? "video" : "audio") + " encoder");
758 | encoder.signalEndOfInputStream();
759 | eosSentToVideoEncoder = true;
760 | }
761 | //testing
762 | ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
763 |
764 | while (true) {
765 | int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
766 | if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
767 | // no output available yet
768 | if (!endOfStream) {
769 | if (VERBOSE) Log.d(TAG, "no output available. aborting drain");
770 | break; // out of while
771 | } else {
772 | if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
773 | }
774 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
775 | // not expected for an encoder
776 | encoderOutputBuffers = encoder.getOutputBuffers();
777 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
778 | // should happen before receiving buffers, and should only happen once
779 |
780 | if (muxerWrapper.started) {
781 | //Log.e(TAG, "format changed after muxer start! Can we ignore?");
782 | //throw new RuntimeException("format changed after muxer start");
783 | }else{
784 | MediaFormat newFormat = encoder.getOutputFormat();
785 | if(encoder == mVideoEncoder)
786 | mVideoOutputFormat = newFormat;
787 | else if(encoder == mAudioEncoder)
788 | mAudioOutputFormat = newFormat;
789 |
790 | // now that we have the Magic Goodies, start the muxer
791 | trackInfo.index = muxerWrapper.addTrack(newFormat);
792 | if(!muxerWrapper.allTracksAdded())
793 | break; // Allow both encoders to send output format changed before attempting to write samples
794 | }
795 |
796 | } else if (encoderStatus < 0) {
797 | Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
798 | encoderStatus);
799 | // let's ignore it
800 | } else {
801 | ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
802 | if (encodedData == null) {
803 | throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
804 | " was null");
805 | }
806 |
807 | if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
808 | // The codec config data was pulled out and fed to the muxer when we got
809 | // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
810 | if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
811 | bufferInfo.size = 0;
812 | }
813 |
814 |
815 | if (bufferInfo.size != 0) {
816 | if (!trackInfo.muxerWrapper.started) {
817 | Log.e(TAG, "Muxer not started. dropping " + ((encoder == mVideoEncoder) ? " video" : " audio") + " frames");
818 | //throw new RuntimeException("muxer hasn't started");
819 | } else{
820 | // adjust the ByteBuffer values to match BufferInfo (not needed?)
821 | encodedData.position(bufferInfo.offset);
822 | encodedData.limit(bufferInfo.offset + bufferInfo.size);
823 | if(encoder == mAudioEncoder){
824 | if(bufferInfo.presentationTimeUs < lastEncodedAudioTimeStamp)
825 | bufferInfo.presentationTimeUs = lastEncodedAudioTimeStamp += 23219; // Magical AAC encoded frame time
826 | lastEncodedAudioTimeStamp = bufferInfo.presentationTimeUs;
827 | }
828 | if(bufferInfo.presentationTimeUs < 0){
829 | bufferInfo.presentationTimeUs = 0;
830 | }
831 | muxerWrapper.muxer.writeSampleData(trackInfo.index, encodedData, bufferInfo);
832 |
833 | if (VERBOSE)
834 | Log.d(TAG, "sent " + bufferInfo.size + ((encoder == mVideoEncoder) ? " video" : " audio") + " bytes to muxer with pts " + bufferInfo.presentationTimeUs);
835 |
836 | }
837 | }
838 |
839 | encoder.releaseOutputBuffer(encoderStatus, false);
840 |
841 | if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
842 | if (!endOfStream) {
843 | Log.w(TAG, "reached end of stream unexpectedly");
844 | } else {
845 | muxerWrapper.finishTrack();
846 | if (VERBOSE) Log.d(TAG, "end of " + ((encoder == mVideoEncoder) ? " video" : " audio") + " stream reached. ");
847 | if(!fullStopReceived){
848 | if(encoder == mVideoEncoder){
849 | Log.i(TAG, "Chunking video encoder");
850 | if (TRACE) Trace.beginSection("chunkVideoEncoder");
851 | chunkVideoEncoder();
852 | if (TRACE) Trace.endSection();
853 | }else if(encoder == mAudioEncoder){
854 | Log.i(TAG, "Chunking audio encoder");
855 | if (TRACE) Trace.beginSection("chunkAudioEncoder");
856 | chunkAudioEncoder();
857 | if (TRACE) Trace.endSection();
858 | }else
859 | Log.e(TAG, "Unknown encoder passed to drainEncoder!");
860 | }else{
861 |
862 | if(encoder == mVideoEncoder){
863 | Log.i(TAG, "Stopping and releasing video encoder");
864 | stopAndReleaseVideoEncoder();
865 | } else if(encoder == mAudioEncoder){
866 | Log.i(TAG, "Stopping and releasing audio encoder");
867 | stopAndReleaseAudioEncoder();
868 | }
869 | //stopAndReleaseEncoders();
870 | }
871 | }
872 | break; // out of while
873 | }
874 | }
875 | }
876 | long endTime = System.nanoTime();
877 | }
878 |
879 |
880 | /**
881 | * Holds state associated with a Surface used for MediaCodec encoder input.
882 | *
883 | * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses
884 | * that to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to
885 | * be sent to the video encoder.
886 | *
887 | * This object owns the Surface -- releasing this will release the Surface too.
888 | */
889 | private static class CodecInputSurface {
890 | private static final int EGL_RECORDABLE_ANDROID = 0x3142;
891 | private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
892 | private EGLContext mEGLEncodeContext = EGL14.EGL_NO_CONTEXT;
893 | public static EGLContext mEGLDisplayContext = EGL14.EGL_NO_CONTEXT;
894 | private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
895 | private Surface mSurface;
896 |
897 | EGLConfig[] configs;
898 | int[] surfaceAttribs = {
899 | EGL14.EGL_NONE
900 | };
901 |
902 | /**
903 | * Creates a CodecInputSurface from a Surface.
904 | */
905 | public CodecInputSurface(Surface surface) {
906 | if (surface == null) {
907 | throw new NullPointerException();
908 | }
909 | mSurface = surface;
910 |
911 | eglSetup();
912 | }
913 |
914 | public void updateSurface(Surface newSurface){
915 | // Destroy old EglSurface
916 | EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
917 | mSurface = newSurface;
918 | // create new EglSurface
919 | mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
920 | surfaceAttribs, 0);
921 | checkEglError("eglCreateWindowSurface");
922 | // eglMakeCurrent called in chunkRecording() after mVideoEncoder.start()
923 | }
924 |
925 | /**
926 | * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
927 | */
928 | private void eglSetup() {
929 | if(VERBOSE) Log.i(TAG, "Creating EGL14 Surface");
930 | mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
931 | if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
932 | throw new RuntimeException("unable to get EGL14 display");
933 | }
934 | int[] version = new int[2];
935 | if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
936 | throw new RuntimeException("unable to initialize EGL14");
937 | }
938 |
939 | // Configure EGL for recording and OpenGL ES 2.0.
940 | int[] attribList = {
941 | EGL14.EGL_RED_SIZE, 8,
942 | EGL14.EGL_GREEN_SIZE, 8,
943 | EGL14.EGL_BLUE_SIZE, 8,
944 | EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
945 | EGL_RECORDABLE_ANDROID, 1,
946 | EGL14.EGL_NONE
947 | };
948 | configs = new EGLConfig[1];
949 | int[] numConfigs = new int[1];
950 | EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
951 | numConfigs, 0);
952 | checkEglError("eglCreateContext RGB888+recordable ES2");
953 |
954 | // Configure context for OpenGL ES 2.0.
955 | int[] attrib_list = {
956 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
957 | EGL14.EGL_NONE
958 | };
959 | if(mEGLDisplayContext == EGL14.EGL_NO_CONTEXT) Log.e(TAG, "mEGLDisplayContext not set properly");
960 | mEGLEncodeContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.eglGetCurrentContext(),
961 | attrib_list, 0);
962 | checkEglError("eglCreateContext");
963 |
964 | // Create a window surface, and attach it to the Surface we received.
965 | mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
966 | surfaceAttribs, 0);
967 | checkEglError("eglCreateWindowSurface");
968 | }
969 |
970 | /**
971 | * Discards all resources held by this class, notably the EGL context. Also releases the
972 | * Surface that was passed to our constructor.
973 | */
974 | public void release() {
975 | if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
976 | EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
977 | EGL14.EGL_NO_CONTEXT);
978 | EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
979 | EGL14.eglDestroyContext(mEGLDisplay, mEGLEncodeContext);
980 | EGL14.eglReleaseThread();
981 | EGL14.eglTerminate(mEGLDisplay);
982 | }
983 | mSurface.release();
984 |
985 | mEGLDisplay = EGL14.EGL_NO_DISPLAY;
986 | mEGLEncodeContext = EGL14.EGL_NO_CONTEXT;
987 | mEGLSurface = EGL14.EGL_NO_SURFACE;
988 |
989 | mSurface = null;
990 | }
991 |
992 | public void makeDisplayContextCurrent(){
993 | makeCurrent(mEGLDisplayContext);
994 | }
995 | public void makeEncodeContextCurrent(){
996 | makeCurrent(mEGLEncodeContext);
997 | }
998 |
999 | /**
1000 | * Makes our EGL context and surface current.
1001 | */
1002 | private void makeCurrent(EGLContext context) {
1003 | EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, context);
1004 | checkEglError("eglMakeCurrent");
1005 | }
1006 |
1007 | /**
1008 | * Calls eglSwapBuffers. Use this to "publish" the current frame.
1009 | */
1010 | public boolean swapBuffers() {
1011 | boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
1012 | checkEglError("eglSwapBuffers");
1013 | return result;
1014 | }
1015 |
1016 | /**
1017 | * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
1018 | */
1019 | public void setPresentationTime(long nsecs) {
1020 | EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
1021 | checkEglError("eglPresentationTimeANDROID");
1022 | }
1023 |
1024 | /**
1025 | * Checks for EGL errors. Throws an exception if one is found.
1026 | */
1027 | private void checkEglError(String msg) {
1028 | int error;
1029 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
1030 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
1031 | }
1032 | }
1033 | }
1034 |
1035 | /**
1036 | * Manages a SurfaceTexture. Creates SurfaceTexture and TextureRender objects, and provides
1037 | * functions that wait for frames and render them to the current EGL surface.
1038 | *
1039 | * The SurfaceTexture can be passed to Camera.setPreviewTexture() to receive camera output.
1040 | */
1041 | private static class SurfaceTextureManager
1042 | implements SurfaceTexture.OnFrameAvailableListener {
1043 | private SurfaceTexture mSurfaceTexture;
1044 | private ChunkedHWRecorder.STextureRender mTextureRender;
1045 | private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
1046 | private boolean mFrameAvailable;
1047 |
1048 | /**
1049 | * Creates instances of TextureRender and SurfaceTexture.
1050 | */
1051 | public SurfaceTextureManager() {
1052 | mTextureRender = new ChunkedHWRecorder.STextureRender();
1053 | mTextureRender.surfaceCreated();
1054 |
1055 | if (VERBOSE) Log.d(TAG, String.format("textureID=%d", mTextureRender.getTextureId()) );
1056 | mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
1057 |
1058 | // This doesn't work if this object is created on the thread that CTS started for
1059 | // these test cases.
1060 | //
1061 | // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
1062 | // create a Handler that uses it. The "frame available" message is delivered
1063 | // there, but since we're not a Looper-based thread we'll never see it. For
1064 | // this to do anything useful, OutputSurface must be created on a thread without
1065 | // a Looper, so that SurfaceTexture uses the main application Looper instead.
1066 | //
1067 | // Java language note: passing "this" out of a constructor is generally unwise,
1068 | // but we should be able to get away with it here.
1069 | mSurfaceTexture.setOnFrameAvailableListener(this);
1070 | }
1071 |
1072 | public void release() {
1073 | // this causes a bunch of warnings that appear harmless but might confuse someone:
1074 | // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
1075 | //mSurfaceTexture.release();
1076 |
1077 | mTextureRender = null;
1078 | mSurfaceTexture = null;
1079 | }
1080 |
1081 | /**
1082 | * Returns the SurfaceTexture.
1083 | */
1084 | public SurfaceTexture getSurfaceTexture() {
1085 | return mSurfaceTexture;
1086 | }
1087 |
1088 | /**
1089 | * Replaces the fragment shader.
1090 | */
1091 | public void changeFragmentShader(String fragmentShader) {
1092 | mTextureRender.changeFragmentShader(fragmentShader);
1093 | }
1094 |
1095 | /**
1096 | * Latches the next buffer into the texture. Must be called from the thread that created
1097 | * the OutputSurface object.
1098 | */
1099 | public void awaitNewImage() {
1100 | final int TIMEOUT_MS = 4500;
1101 | synchronized (mFrameSyncObject) {
1102 | while (!mFrameAvailable) {
1103 | try {
1104 | // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
1105 | // stalling the test if it doesn't arrive.
1106 | if(VERBOSE) Log.i(TAG, "Waiting for Frame in Thread");
1107 | mFrameSyncObject.wait(TIMEOUT_MS);
1108 | if (!mFrameAvailable) {
1109 | // TODO: if "spurious wakeup", continue while loop
1110 | throw new RuntimeException("Camera frame wait timed out");
1111 | }
1112 | } catch (InterruptedException ie) {
1113 | // shouldn't happen
1114 | throw new RuntimeException(ie);
1115 | }
1116 | }
1117 | mFrameAvailable = false;
1118 | }
1119 |
1120 | // Latch the data.
1121 | mTextureRender.checkGlError("before updateTexImage");
1122 | mSurfaceTexture.updateTexImage();
1123 |
1124 | }
1125 |
1126 | /**
1127 | * Draws the data from SurfaceTexture onto the current EGL surface.
1128 | */
1129 | public void drawImage() {
1130 | mTextureRender.drawFrame(mSurfaceTexture);
1131 | }
1132 |
1133 | @Override
1134 | public void onFrameAvailable(SurfaceTexture st) {
1135 | if (VERBOSE) Log.d(TAG, "new frame available");
1136 | synchronized (mFrameSyncObject) {
1137 | if (mFrameAvailable) {
1138 | throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
1139 | }
1140 | mFrameAvailable = true;
1141 | mFrameSyncObject.notifyAll();
1142 | }
1143 | }
1144 | }
1145 |
1146 | /**
1147 | * Code for rendering a texture onto a surface using OpenGL ES 2.0.
1148 | */
1149 | private static class STextureRender {
1150 | private static final int FLOAT_SIZE_BYTES = 4;
1151 | private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
1152 | private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
1153 | private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
1154 | private static final String VERTEX_SHADER =
1155 | "uniform mat4 uMVPMatrix;\n" +
1156 | "uniform mat4 uSTMatrix;\n" +
1157 | "attribute vec4 aPosition;\n" +
1158 | "attribute vec4 aTextureCoord;\n" +
1159 | "varying vec2 vTextureCoord;\n" +
1160 | "void main() {\n" +
1161 | " gl_Position = uMVPMatrix * aPosition;\n" +
1162 | " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
1163 | "}\n";
1164 | private static final String FRAGMENT_SHADER =
1165 | "#extension GL_OES_EGL_image_external : require\n" +
1166 | "precision mediump float;\n" + // highp here doesn't seem to matter
1167 | "varying vec2 vTextureCoord;\n" +
1168 | "uniform samplerExternalOES sTexture;\n" +
1169 | "void main() {\n" +
1170 | " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
1171 | "}\n";
1172 | private final float[] mTriangleVerticesData = {
1173 | // X, Y, Z, U, V
1174 | -1.0f, -1.0f, 0, 0.f, 0.f,
1175 | 1.0f, -1.0f, 0, 1.f, 0.f,
1176 | -1.0f, 1.0f, 0, 0.f, 1.f,
1177 | 1.0f, 1.0f, 0, 1.f, 1.f,
1178 | };
1179 | private FloatBuffer mTriangleVertices;
1180 | private float[] mMVPMatrix = new float[16];
1181 | private float[] mSTMatrix = new float[16];
1182 | private int mProgram;
1183 | private int mTextureID = -12345;
1184 | private int muMVPMatrixHandle;
1185 | private int muSTMatrixHandle;
1186 | private int maPositionHandle;
1187 | private int maTextureHandle;
1188 |
1189 | public STextureRender() {
1190 | mTriangleVertices = ByteBuffer.allocateDirect(
1191 | mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
1192 | .order(ByteOrder.nativeOrder()).asFloatBuffer();
1193 | mTriangleVertices.put(mTriangleVerticesData).position(0);
1194 |
1195 | Matrix.setIdentityM(mSTMatrix, 0);
1196 | }
1197 |
1198 | public int getTextureId() {
1199 | return mTextureID;
1200 | }
1201 |
1202 | public void drawFrame(SurfaceTexture st) {
1203 | checkGlError("onDrawFrame start");
1204 | st.getTransformMatrix(mSTMatrix);
1205 |
1206 | GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
1207 | GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
1208 |
1209 | GLES20.glUseProgram(mProgram);
1210 | checkGlError("glUseProgram");
1211 |
1212 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
1213 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
1214 |
1215 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
1216 | GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
1217 | TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
1218 | checkGlError("glVertexAttribPointer maPosition");
1219 | GLES20.glEnableVertexAttribArray(maPositionHandle);
1220 | checkGlError("glEnableVertexAttribArray maPositionHandle");
1221 |
1222 | mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
1223 | GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
1224 | TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
1225 | checkGlError("glVertexAttribPointer maTextureHandle");
1226 | GLES20.glEnableVertexAttribArray(maTextureHandle);
1227 | checkGlError("glEnableVertexAttribArray maTextureHandle");
1228 |
1229 | Matrix.setIdentityM(mMVPMatrix, 0);
1230 | GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
1231 | GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
1232 |
1233 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
1234 | checkGlError("glDrawArrays");
1235 | GLES20.glFinish();
1236 | }
1237 |
1238 | /**
1239 | * Initializes GL state. Call this after the EGL surface has been created and made current.
1240 | */
1241 | public void surfaceCreated() {
1242 | mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
1243 | if (mProgram == 0) {
1244 | throw new RuntimeException("failed creating program");
1245 | }
1246 | maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
1247 | checkGlError("glGetAttribLocation aPosition");
1248 | if (maPositionHandle == -1) {
1249 | throw new RuntimeException("Could not get attrib location for aPosition");
1250 | }
1251 | maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
1252 | checkGlError("glGetAttribLocation aTextureCoord");
1253 | if (maTextureHandle == -1) {
1254 | throw new RuntimeException("Could not get attrib location for aTextureCoord");
1255 | }
1256 |
1257 | muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
1258 | checkGlError("glGetUniformLocation uMVPMatrix");
1259 | if (muMVPMatrixHandle == -1) {
1260 | throw new RuntimeException("Could not get attrib location for uMVPMatrix");
1261 | }
1262 |
1263 | muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
1264 | checkGlError("glGetUniformLocation uSTMatrix");
1265 | if (muSTMatrixHandle == -1) {
1266 | throw new RuntimeException("Could not get attrib location for uSTMatrix");
1267 | }
1268 |
1269 | int[] textures = new int[1];
1270 | GLES20.glGenTextures(1, textures, 0);
1271 |
1272 | mTextureID = textures[0];
1273 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
1274 | checkGlError("glBindTexture mTextureID");
1275 |
1276 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
1277 | GLES20.GL_NEAREST);
1278 | GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
1279 | GLES20.GL_LINEAR);
1280 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
1281 | GLES20.GL_CLAMP_TO_EDGE);
1282 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
1283 | GLES20.GL_CLAMP_TO_EDGE);
1284 | checkGlError("glTexParameter");
1285 | }
1286 |
1287 | /**
1288 | * Replaces the fragment shader. Pass in null to resetWithChunk to default.
1289 | */
1290 | public void changeFragmentShader(String fragmentShader) {
1291 | if (fragmentShader == null) {
1292 | fragmentShader = FRAGMENT_SHADER;
1293 | }
1294 | GLES20.glDeleteProgram(mProgram);
1295 | mProgram = createProgram(VERTEX_SHADER, fragmentShader);
1296 | if (mProgram == 0) {
1297 | throw new RuntimeException("failed creating program");
1298 | }
1299 | }
1300 |
1301 | private int loadShader(int shaderType, String source) {
1302 | int shader = GLES20.glCreateShader(shaderType);
1303 | checkGlError("glCreateShader type=" + shaderType);
1304 | GLES20.glShaderSource(shader, source);
1305 | GLES20.glCompileShader(shader);
1306 | int[] compiled = new int[1];
1307 | GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
1308 | if (compiled[0] == 0) {
1309 | Log.e(TAG, "Could not compile shader " + shaderType + ":");
1310 | Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
1311 | GLES20.glDeleteShader(shader);
1312 | shader = 0;
1313 | }
1314 | return shader;
1315 | }
1316 |
1317 | private int createProgram(String vertexSource, String fragmentSource) {
1318 | int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
1319 | if (vertexShader == 0) {
1320 | return 0;
1321 | }
1322 | int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
1323 | if (pixelShader == 0) {
1324 | return 0;
1325 | }
1326 |
1327 | int program = GLES20.glCreateProgram();
1328 | checkGlError("glCreateProgram");
1329 | if (program == 0) {
1330 | Log.e(TAG, "Could not create program");
1331 | }
1332 | GLES20.glAttachShader(program, vertexShader);
1333 | checkGlError("glAttachShader");
1334 | GLES20.glAttachShader(program, pixelShader);
1335 | checkGlError("glAttachShader");
1336 | GLES20.glLinkProgram(program);
1337 | int[] linkStatus = new int[1];
1338 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
1339 | if (linkStatus[0] != GLES20.GL_TRUE) {
1340 | Log.e(TAG, "Could not link program: ");
1341 | Log.e(TAG, GLES20.glGetProgramInfoLog(program));
1342 | GLES20.glDeleteProgram(program);
1343 | program = 0;
1344 | }
1345 | return program;
1346 | }
1347 |
1348 | public void checkGlError(String op) {
1349 | int error;
1350 | while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
1351 | Log.e(TAG, op + ": glError " + error);
1352 | throw new RuntimeException(op + ": glError " + error);
1353 | }
1354 | }
1355 | }
1356 | }
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/java/net/openwatch/hwencoderexperiments/FileUtils.java:
--------------------------------------------------------------------------------
1 | package net.openwatch.hwencoderexperiments;
2 |
3 | import android.content.Context;
4 | import android.os.Environment;
5 | import android.util.Log;
6 |
7 | import java.io.File;
8 | import java.io.IOException;
9 |
10 | public class FileUtils {
11 |
12 | static final String TAG = "FileUtils";
13 |
14 | static final String OUTPUT_DIR = "HWEncodingExperiments"; // Directory relative to External or Internal (fallback) Storage
15 |
16 | /**
17 | * Returns a Java File initialized to a directory of given name
18 | * at the root storage location, with preference to external storage.
19 | * If the directory did not exist, it will be created at the conclusion of this call.
20 | * If a file with conflicting name exists, this method returns null;
21 | *
22 | * @param c the context to determine the internal storage location, if external is unavailable
23 | * @param directory_name the name of the directory desired at the storage location
24 | * @return a File pointing to the storage directory, or null if a file with conflicting name
25 | * exists
26 | */
27 | public static File getRootStorageDirectory(Context c, String directory_name){
28 | File result;
29 | // First, try getting access to the sdcard partition
30 | if(Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)){
31 | Log.d(TAG,"Using sdcard");
32 | result = new File(Environment.getExternalStorageDirectory(), directory_name);
33 | } else {
34 | // Else, use the internal storage directory for this application
35 | Log.d(TAG,"Using internal storage");
36 | result = new File(c.getApplicationContext().getFilesDir(), directory_name);
37 | }
38 |
39 | if(!result.exists())
40 | result.mkdir();
41 | else if(result.isFile()){
42 | return null;
43 | }
44 | Log.d("getRootStorageDirectory", result.getAbsolutePath());
45 | return result;
46 | }
47 |
48 | /**
49 | * Returns a Java File initialized to a directory of given name
50 | * within the given location.
51 | *
52 | * @param parent_directory a File representing the directory in which the new child will reside
53 | * @return a File pointing to the desired directory, or null if a file with conflicting name
54 | * exists or if getRootStorageDirectory was not called first
55 | */
56 | public static File getStorageDirectory(File parent_directory, String new_child_directory_name){
57 |
58 | File result = new File(parent_directory, new_child_directory_name);
59 | if(!result.exists())
60 | if(result.mkdir())
61 | return result;
62 | else{
63 | Log.e("getStorageDirectory", "Error creating " + result.getAbsolutePath());
64 | return null;
65 | }
66 | else if(result.isFile()){
67 | return null;
68 | }
69 |
70 | Log.d("getStorageDirectory", "directory ready: " + result.getAbsolutePath());
71 | return result;
72 | }
73 |
74 | /**
75 | * Returns a TempFile with given root, filename, and extension.
76 | * The resulting TempFile is safe for use with Android's MediaRecorder
77 | * @param c
78 | * @param root
79 | * @param filename
80 | * @param extension
81 | * @return
82 | */
83 | public static File createTempFile(Context c, File root, String filename, String extension){
84 | File output = null;
85 | try {
86 | if(filename != null){
87 | if(!extension.contains("."))
88 | extension = "." + extension;
89 | output = new File(root, filename + extension);
90 | output.createNewFile();
91 | //output = File.createTempFile(filename, extension, root);
92 | Log.i(TAG, "Created temp file: " + output.getAbsolutePath());
93 | }
94 | return output;
95 | } catch (IOException e) {
96 | e.printStackTrace();
97 | return null;
98 | }
99 | }
100 |
101 |
102 | public static File createTempFileInRootAppStorage(Context c, String filename){
103 | File recordingDir = FileUtils.getRootStorageDirectory(c, OUTPUT_DIR);
104 | return createTempFile(c, recordingDir, filename.split("\\.")[0], filename.split("\\.")[1]);
105 | }
106 |
107 | }
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/java/net/openwatch/hwencoderexperiments/HWRecorderActivity.java:
--------------------------------------------------------------------------------
1 | package net.openwatch.hwencoderexperiments;
2 |
3 | import android.app.Activity;
4 | import android.os.Bundle;
5 | import android.view.View;
6 | import android.widget.Button;
7 |
8 | public class HWRecorderActivity extends Activity {
9 | private static final String TAG = "CameraToMpegTest";
10 | boolean recording = false;
11 | ChunkedHWRecorder chunkedHWRecorder;
12 |
13 | //GLSurfaceView glSurfaceView;
14 | //GlSurfaceViewRenderer glSurfaceViewRenderer = new GlSurfaceViewRenderer();
15 |
16 | protected void onCreate (Bundle savedInstanceState){
17 | super.onCreate(savedInstanceState);
18 | setContentView(R.layout.activity_hwrecorder);
19 | //glSurfaceView = (GLSurfaceView) findViewById(R.id.glSurfaceView);
20 | //glSurfaceView.setRenderer(glSurfaceViewRenderer);
21 | }
22 |
23 | @Override
24 | public void onPause(){
25 | super.onPause();
26 | //glSurfaceView.onPause();
27 | }
28 |
29 | @Override
30 | public void onResume(){
31 | super.onResume();
32 | //glSurfaceView.onResume();
33 | }
34 |
35 | public void onRunTestButtonClicked(View v){
36 | if(!recording){
37 | try {
38 | startChunkedHWRecorder();
39 | recording = true;
40 | ((Button) v).setText("Stop Recording");
41 | } catch (Throwable throwable) {
42 | throwable.printStackTrace();
43 | }
44 | }else{
45 | chunkedHWRecorder.stopRecording();
46 | recording = false;
47 | ((Button) v).setText("Start Recording");
48 | }
49 | }
50 |
51 | /**
52 | * test entry point
53 | */
54 | public void startChunkedHWRecorder() throws Throwable {
55 | chunkedHWRecorder = new ChunkedHWRecorder(getApplicationContext());
56 | //chunkedHWRecorder.setDisplayEGLContext(context);
57 | ChunkedHWRecorderWrapper.runTest(chunkedHWRecorder);
58 | }
59 |
60 |
61 | /**
62 | * Wraps encodeCameraToMpeg(). This is necessary because SurfaceTexture will try to use
63 | * the looper in the current thread if one exists, and the CTS tests create one on the
64 | * test thread.
65 | *
66 | * The wrapper propagates exceptions thrown by the worker thread back to the caller.
67 | */
68 | private static class ChunkedHWRecorderWrapper implements Runnable {
69 | private Throwable mThrowable;
70 | private ChunkedHWRecorder chunkedHwRecorder;
71 |
72 | private ChunkedHWRecorderWrapper(ChunkedHWRecorder recorder) {
73 | chunkedHwRecorder = recorder;
74 | }
75 |
76 | /**
77 | * Entry point.
78 | */
79 | public static void runTest(ChunkedHWRecorder obj) throws Throwable {
80 | ChunkedHWRecorderWrapper wrapper = new ChunkedHWRecorderWrapper(obj);
81 | Thread th = new Thread(wrapper, "codec test");
82 | th.start();
83 | // When th.join() is called, blocks thread which catches onFrameAvailable
84 | //th.join();
85 | if (wrapper.mThrowable != null) {
86 | throw wrapper.mThrowable;
87 | }
88 | }
89 |
90 | @Override
91 | public void run() {
92 | try {
93 | chunkedHwRecorder.startRecording(null);
94 | } catch (Throwable th) {
95 | mThrowable = th;
96 | }
97 | }
98 | }
99 |
100 | /*
101 | static EGLContext context;
102 |
103 | public class GlSurfaceViewRenderer implements GLSurfaceView.Renderer{
104 |
105 | @Override
106 | public void onSurfaceCreated(GL10 gl, EGLConfig config) {
107 | Log.i(TAG, "GLSurfaceView created");
108 | context = EGL14.eglGetCurrentContext();
109 | if(context == EGL14.EGL_NO_CONTEXT)
110 | Log.e(TAG, "failed to get valid EGLContext");
111 |
112 | EGL14.eglMakeCurrent(EGL14.eglGetCurrentDisplay(), EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
113 | }
114 |
115 | @Override
116 | public void onSurfaceChanged(GL10 gl, int width, int height) {
117 |
118 | }
119 |
120 | @Override
121 | public void onDrawFrame(GL10 gl) {
122 | }
123 | }
124 | */
125 |
126 | }
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/java/net/openwatch/hwencoderexperiments/MediaRecorderWrapper.java:
--------------------------------------------------------------------------------
1 | package net.openwatch.hwencoderexperiments;
2 |
3 | import android.content.Context;
4 | import android.hardware.Camera;
5 | import android.media.CamcorderProfile;
6 | import android.media.MediaRecorder;
7 | import android.util.Log;
8 |
9 | import java.io.IOException;
10 |
11 | /**
12 | * Created by davidbrodsky on 9/23/13.
13 | */
14 | public class MediaRecorderWrapper {
15 | static final String TAG = "MediaRecorderWrapper";
16 |
17 | Camera mCamera;
18 | MediaRecorder mMediaRecorder;
19 | Context c;
20 | String outputLocation;
21 |
22 | boolean recordAudio = false;
23 | boolean isRecording = false;
24 |
25 | public MediaRecorderWrapper(Context c, String outputLocation, Camera camera){
26 | mCamera = camera;
27 | this.c = c;
28 | this.outputLocation = outputLocation;
29 | }
30 |
31 | public MediaRecorderWrapper recordAudio(boolean recordAudio){
32 | this.recordAudio = recordAudio;
33 | return this;
34 | }
35 |
36 | private boolean prepareVideoRecorder(){
37 | if(mCamera == null)
38 | return false;
39 |
40 | mMediaRecorder = new MediaRecorder();
41 | mCamera.unlock();
42 | mMediaRecorder.setCamera(mCamera);
43 | if(recordAudio) mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
44 | mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
45 |
46 | // Step 3: Set a CamcorderProfile (requires API Level 8 or higher)
47 | if(recordAudio)
48 | mMediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_720P));
49 | else{
50 | mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
51 | mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
52 | mMediaRecorder.setVideoEncodingBitRate(2500000);
53 | mMediaRecorder.setVideoSize(640, 480);
54 | mMediaRecorder.setVideoFrameRate(30);
55 | }
56 |
57 | // Step 4: Set output file
58 | mMediaRecorder.setOutputFile(outputLocation);
59 |
60 | // Step 6: Prepare configured MediaRecorder
61 | try {
62 | mMediaRecorder.prepare();
63 | } catch (IllegalStateException e) {
64 | Log.d(TAG, "IllegalStateException preparing MediaRecorder: " + e.getMessage());
65 | releaseMediaRecorder();
66 | return false;
67 | } catch (IOException e) {
68 | Log.d(TAG, "IOException preparing MediaRecorder: " + e.getMessage());
69 | releaseMediaRecorder();
70 | return false;
71 | }
72 | return true;
73 | }
74 |
75 | public boolean startRecording(){
76 | if (prepareVideoRecorder()) {
77 | // Camera is available and unlocked, MediaRecorder is prepared,
78 | // now you can start recording
79 | mMediaRecorder.start();
80 |
81 | // inform the user that recording has started
82 | isRecording = true;
83 | }else
84 | isRecording = false;
85 | return isRecording;
86 | }
87 |
88 | public boolean stopRecording(){
89 | mMediaRecorder.stop();
90 | releaseMediaRecorder();
91 | isRecording = false;
92 | return isRecording;
93 | }
94 |
95 | private void releaseMediaRecorder(){
96 | if (mMediaRecorder != null) {
97 | mMediaRecorder.reset(); // clear recorder configuration
98 | mMediaRecorder.release(); // release the recorder object
99 | mMediaRecorder = null;
100 | mCamera.lock(); // lock camera for later use
101 | }
102 | }
103 | }
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OnlyInAmerica/HWEncoderExperiments/4506ca1a1b96ca6890eebddd12f1b6be2fbc6200/HWEncoderExperiments/src/main/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/drawable-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OnlyInAmerica/HWEncoderExperiments/4506ca1a1b96ca6890eebddd12f1b6be2fbc6200/HWEncoderExperiments/src/main/res/drawable-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/drawable-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OnlyInAmerica/HWEncoderExperiments/4506ca1a1b96ca6890eebddd12f1b6be2fbc6200/HWEncoderExperiments/src/main/res/drawable-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/drawable-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OnlyInAmerica/HWEncoderExperiments/4506ca1a1b96ca6890eebddd12f1b6be2fbc6200/HWEncoderExperiments/src/main/res/drawable-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/layout/activity_hwrecorder.xml:
--------------------------------------------------------------------------------
1 |
11 |
12 |
18 |
19 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/menu/main.xml:
--------------------------------------------------------------------------------
1 |
7 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/values-sw600dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/values-sw720dp-land/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 128dp
5 |
6 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/values-v11/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 |
6 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | HWEncoderExperiments
5 | Settings
6 | Hello world!
7 | Recording...
8 |
9 |
10 |
--------------------------------------------------------------------------------
/HWEncoderExperiments/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
14 |
15 |
16 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # HWEncoderExperiments
2 |
3 | Herein lies experiments with Android 4.3's [`MediaCodec`](http://developer.android.com/reference/android/media/MediaCodec.html) and [`MediaMuxer`](http://developer.android.com/reference/android/media/MediaMuxer.html) APIs.
4 |
5 | The master branch is concerned with simultaneously producing a single high quality .mp4 as well as gapless 5 second chunk mp4s. The end goal is to allow an Android device to act as an [HLS](http://en.wikipedia.org/wiki/HTTP_Live_Streaming) / [MPEG-DASH](http://en.wikipedia.org/wiki/Dynamic_Adaptive_Streaming_over_HTTP) server.
6 |
7 | The audioonly branch shows a barebones example encoding AAC audio with Android's [`AudioRecord`](http://developer.android.com/reference/android/media/AudioRecord.html) class.
8 |
9 | ## Output
10 | Output is stored in '/sdcard/HWEncodingExperiments' in or internal storage if /sdcard isn't available.
11 |
12 | Output location is modified via `FileUtils.OUTPUT_DIR` and `FileUtils.createTempFileInRootAppStorage`.
13 |
14 |
15 | ## Note on ColorFormats
16 | This example doesn't yet intelligenty check for available color formats. If you experience a crash on `MediaCodec.configure`, try changing the appropriate part of `ChunkedAvcEncoder.prepare()`:
17 |
18 |
19 | private void prepare() {
20 | …
21 | videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar);
22 | // On devices without a TI SOC, try:
23 | // COLOR_FormatYUV420PackedSemiPlanar
24 | …
25 | }
26 |
27 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OnlyInAmerica/HWEncoderExperiments/4506ca1a1b96ca6890eebddd12f1b6be2fbc6200/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Sep 11 14:37:30 PDT 2013
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=http\://services.gradle.org/distributions/gradle-1.7-bin.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # For Cygwin, ensure paths are in UNIX format before anything is touched.
46 | if $cygwin ; then
47 | [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
48 | fi
49 |
50 | # Attempt to set APP_HOME
51 | # Resolve links: $0 may be a link
52 | PRG="$0"
53 | # Need this for relative symlinks.
54 | while [ -h "$PRG" ] ; do
55 | ls=`ls -ld "$PRG"`
56 | link=`expr "$ls" : '.*-> \(.*\)$'`
57 | if expr "$link" : '/.*' > /dev/null; then
58 | PRG="$link"
59 | else
60 | PRG=`dirname "$PRG"`"/$link"
61 | fi
62 | done
63 | SAVED="`pwd`"
64 | cd "`dirname \"$PRG\"`/" >&-
65 | APP_HOME="`pwd -P`"
66 | cd "$SAVED" >&-
67 |
68 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
69 |
70 | # Determine the Java command to use to start the JVM.
71 | if [ -n "$JAVA_HOME" ] ; then
72 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
73 | # IBM's JDK on AIX uses strange locations for the executables
74 | JAVACMD="$JAVA_HOME/jre/sh/java"
75 | else
76 | JAVACMD="$JAVA_HOME/bin/java"
77 | fi
78 | if [ ! -x "$JAVACMD" ] ; then
79 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
80 |
81 | Please set the JAVA_HOME variable in your environment to match the
82 | location of your Java installation."
83 | fi
84 | else
85 | JAVACMD="java"
86 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
87 |
88 | Please set the JAVA_HOME variable in your environment to match the
89 | location of your Java installation."
90 | fi
91 |
92 | # Increase the maximum file descriptors if we can.
93 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
94 | MAX_FD_LIMIT=`ulimit -H -n`
95 | if [ $? -eq 0 ] ; then
96 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
97 | MAX_FD="$MAX_FD_LIMIT"
98 | fi
99 | ulimit -n $MAX_FD
100 | if [ $? -ne 0 ] ; then
101 | warn "Could not set maximum file descriptor limit: $MAX_FD"
102 | fi
103 | else
104 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
105 | fi
106 | fi
107 |
108 | # For Darwin, add options to specify how the application appears in the dock
109 | if $darwin; then
110 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
111 | fi
112 |
113 | # For Cygwin, switch paths to Windows format before running java
114 | if $cygwin ; then
115 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
116 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
158 | function splitJvmOpts() {
159 | JVM_OPTS=("$@")
160 | }
161 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
162 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
163 |
164 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
165 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':HWEncoderExperiments'
2 |
--------------------------------------------------------------------------------