├── .gitignore
├── AndroidManifest.xml
├── README.md
├── ant.properties
├── build.xml
├── local.properties
├── proguard-project.txt
├── project.properties
├── res
├── drawable-hdpi
│ └── ic_launcher.png
├── drawable-ldpi
│ └── ic_launcher.png
├── drawable-mdpi
│ └── ic_launcher.png
├── drawable-xhdpi
│ └── ic_launcher.png
├── layout
│ └── main.xml
└── values
│ └── strings.xml
└── src
└── jp
└── saka
└── mediacodectest
└── MediaCodecTest.java
/.gitignore:
--------------------------------------------------------------------------------
1 | obj/*
2 | .*.swp
3 | *.*~
4 | bin/*
5 | gen/*
--------------------------------------------------------------------------------
/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | android-app-mediacodectest
2 | ==========================
3 |
4 | #概要
5 |
6 | カメラから取得したピクチャを[android.media.MediaCodec](http://developer.android.com/reference/android/media/MediaCodec.html)を使用してH.264エンコード&デコードするサンプルアプリである。
7 |
--------------------------------------------------------------------------------
/ant.properties:
--------------------------------------------------------------------------------
1 | # This file is used to override default values used by the Ant build system.
2 | #
3 | # This file must be checked into Version Control Systems, as it is
4 | # integral to the build system of your project.
5 |
6 | # This file is only used by the Ant script.
7 |
8 | # You can use this to override default values such as
9 | # 'source.dir' for the location of your java source folder and
10 | # 'out.dir' for the location of your output folder.
11 |
12 | # You can also use it define how the release builds are signed by declaring
13 | # the following properties:
14 | # 'key.store' for the location of your keystore and
15 | # 'key.alias' for the name of the key to use.
16 | # The password will be asked during the build when you use the 'release' target.
17 |
18 |
--------------------------------------------------------------------------------
/build.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
7 |
8 |
9 |
29 |
30 |
31 |
35 |
36 |
37 |
38 |
39 |
40 |
49 |
50 |
51 |
52 |
56 |
57 |
69 |
70 |
71 |
89 |
90 |
91 |
92 |
93 |
--------------------------------------------------------------------------------
/local.properties:
--------------------------------------------------------------------------------
1 | # This file is automatically generated by Android Tools.
2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 | #
4 | # This file must *NOT* be checked into Version Control Systems,
5 | # as it contains information specific to your local configuration.
6 |
7 | # location of the SDK. This is only used by Ant
8 | # For customization when using a Version Control System, please read the
9 | # header note.
10 | sdk.dir=/Users/sakamotomasakiyo/Dropbox/Develop/Android/Mac-OS-X/v1/Android SDK
11 |
--------------------------------------------------------------------------------
/proguard-project.txt:
--------------------------------------------------------------------------------
1 | # To enable ProGuard in your project, edit project.properties
2 | # to define the proguard.config property as described in that file.
3 | #
4 | # Add project specific ProGuard rules here.
5 | # By default, the flags in this file are appended to flags specified
6 | # in ${sdk.dir}/tools/proguard/proguard-android.txt
7 | # You can edit the include path and order by changing the ProGuard
8 | # include property in project.properties.
9 | #
10 | # For more details, see
11 | # http://developer.android.com/guide/developing/tools/proguard.html
12 |
13 | # Add any project specific keep options here:
14 |
15 | # If your project uses WebView with JS, uncomment the following
16 | # and specify the fully qualified class name to the JavaScript interface
17 | # class:
18 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
19 | # public *;
20 | #}
21 |
--------------------------------------------------------------------------------
/project.properties:
--------------------------------------------------------------------------------
1 | # This file is automatically generated by Android Tools.
2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 | #
4 | # This file must be checked in Version Control Systems.
5 | #
6 | # To customize properties used by the Ant build system edit
7 | # "ant.properties", and override values to adapt the script to your
8 | # project structure.
9 | #
10 | # To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
11 | #proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
12 |
13 | # Project target.
14 | target=android-16
15 |
--------------------------------------------------------------------------------
/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pingu342/android-app-mediacodectest/fa808febda209d8ab847d62dd2dc83960f0908a4/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/res/drawable-ldpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pingu342/android-app-mediacodectest/fa808febda209d8ab847d62dd2dc83960f0908a4/res/drawable-ldpi/ic_launcher.png
--------------------------------------------------------------------------------
/res/drawable-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pingu342/android-app-mediacodectest/fa808febda209d8ab847d62dd2dc83960f0908a4/res/drawable-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/res/drawable-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pingu342/android-app-mediacodectest/fa808febda209d8ab847d62dd2dc83960f0908a4/res/drawable-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/res/layout/main.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
12 |
18 |
19 |
--------------------------------------------------------------------------------
/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | MediaCodecList
4 |
5 |
--------------------------------------------------------------------------------
/src/jp/saka/mediacodectest/MediaCodecTest.java:
--------------------------------------------------------------------------------
1 | package jp.saka.mediacodectest;
2 |
3 | import android.app.Activity;
4 | import android.os.Bundle;
5 | import android.util.Log;
6 |
7 | import android.media.MediaCodec;
8 | import android.media.MediaCodecInfo;
9 | import android.media.MediaFormat;
10 | import android.media.MediaCodecList;
11 | import android.view.Surface;
12 | import android.view.SurfaceView;
13 | import android.view.SurfaceHolder;
14 | import android.hardware.Camera;
15 | import android.hardware.Camera.Parameters;
16 | import android.graphics.ImageFormat;
17 | import android.view.Window;
18 |
19 | import java.nio.ByteBuffer;
20 |
21 | public class MediaCodecTest extends Activity
22 | {
23 | private static final int UseCamera = Camera.CameraInfo.CAMERA_FACING_FRONT;
24 | //private static final int UseCamera = Camera.CameraInfo.CAMERA_FACING_BACK;
25 | //private static final int UseCamera = -1;
26 | private static final int Width = 1280;
27 | private static final int Height = 720;
28 | private static final int Bitrate = 100000;
29 | private static final int Framerate = 15;
30 |
31 | private SurfaceView mCameraSurface = null;
32 | private SurfaceView mDecodeSurface = null;
33 | private Camera mCamera = null;
34 | private RingBuffer mRingBuffer = new RingBuffer();
35 | private EncDecThread mEncDecThread = null;
36 | private boolean mResumed = false;
37 |
38 | /** Called when the activity is first created. */
39 | @Override
40 | public void onCreate(Bundle savedInstanceState) {
41 | Log.d("sakalog", "onCreate");
42 |
43 | super.onCreate(savedInstanceState);
44 | requestWindowFeature(Window.FEATURE_NO_TITLE);
45 | setContentView(R.layout.main);
46 |
47 | // エンコーダ入力映像をカメラから取得するためのカメラプレビュー用SurfaceViewを準備
48 | mCameraSurface = (SurfaceView)findViewById(R.id.CameraSurface);
49 | mCameraSurface.getHolder().addCallback(new SurfaceHolder.Callback() {
50 | @Override
51 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
52 | Log.d("sakalog", "mCameraSurface surfaceChanged");
53 |
54 | // カメラをスタート
55 | if (mResumed) {
56 | startCamera();
57 | }
58 | }
59 | @Override
60 | public void surfaceCreated(SurfaceHolder holder) {
61 | Log.d("sakalog", "mCameraSurface surfaceCreated");
62 | }
63 | @Override
64 | public void surfaceDestroyed(SurfaceHolder holder) {
65 | Log.d("sakalog", "mCameraSurface surfaceDestroyed");
66 | }
67 | });
68 |
69 | // デコーダ出力映像を描画するSurfaceViewを準備
70 | mDecodeSurface = (SurfaceView)findViewById(R.id.DecodeSurface);
71 | mDecodeSurface.getHolder().addCallback(new SurfaceHolder.Callback() {
72 | @Override
73 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
74 | Log.d("sakalog", "mDecodeSurface surfaceChanged");
75 |
76 | // エンコード・デコードを開始
77 | if (mEncDecThread != null) {
78 | mEncDecThread.finish();
79 | }
80 | if (mResumed) {
81 | mEncDecThread = new EncDecThread();
82 | mEncDecThread.start();
83 | }
84 | }
85 | @Override
86 | public void surfaceCreated(SurfaceHolder holder) {
87 | Log.d("sakalog", "mDecodeSurface surfaceCreated");
88 | }
89 | @Override
90 | public void surfaceDestroyed(SurfaceHolder holder) {
91 | Log.d("sakalog", "mDecodeSurface surfaceDestroyed");
92 | }
93 | });
94 |
95 | }
96 |
97 | @Override
98 | public void onResume() {
99 | Log.d("sakalog", "onResume");
100 | super.onResume();
101 | mResumed = true;
102 | if (mCameraSurface.getHolder().getSurface().isValid()) {
103 | Log.d("sakalog", "mCameraSurface is valid");
104 |
105 | // カメラをスタート
106 | startCamera();
107 | } else {
108 | Log.d("sakalog", "mCameraSurface is invalid");
109 | }
110 | if (mDecodeSurface.getHolder().getSurface().isValid()) {
111 | Log.d("sakalog", "mDecodeSurface is valid");
112 |
113 | // エンコード・デコードを開始
114 | if (mEncDecThread != null) {
115 | mEncDecThread.finish();
116 | }
117 | mEncDecThread = new EncDecThread();
118 | mEncDecThread.start();
119 | } else {
120 | Log.d("sakalog", "mDecodeSurface is invalid");
121 | }
122 | }
123 |
124 | @Override
125 | public void onPause() {
126 | Log.d("sakalog", "onPause");
127 | super.onPause();
128 | mResumed = false;
129 |
130 | // エンコード・デコードを終了
131 | if (mEncDecThread != null) {
132 | mEncDecThread.finish();
133 | mEncDecThread = null;
134 | }
135 |
136 | // カメラを停止
137 | stopCamera();
138 | }
139 |
140 | @Override
141 | public void onDestroy() {
142 | Log.d("sakalog", "onDestroy");
143 | super.onDestroy();
144 | }
145 |
146 | private String colorFormatName(int format) {
147 | String name;
148 | switch (format) {
149 | case MediaCodecInfo.CodecCapabilities.COLOR_Format12bitRGB444:
150 | name = "COLOR_Format12bitRGB444";
151 | break;
152 | case MediaCodecInfo.CodecCapabilities.COLOR_Format16bitARGB1555:
153 | name = "COLOR_Format16bitARGB1555";
154 | break;
155 | case MediaCodecInfo.CodecCapabilities.COLOR_Format16bitARGB4444:
156 | name = "COLOR_Format16bitARGB4444";
157 | break;
158 | case MediaCodecInfo.CodecCapabilities.COLOR_Format16bitBGR565:
159 | name = "COLOR_Format16bitBGR565";
160 | break;
161 | case MediaCodecInfo.CodecCapabilities.COLOR_Format16bitRGB565:
162 | name = "COLOR_Format16bitRGB565";
163 | break;
164 | case MediaCodecInfo.CodecCapabilities.COLOR_Format18BitBGR666:
165 | name = "COLOR_Format18BitBGR666";
166 | break;
167 | case MediaCodecInfo.CodecCapabilities.COLOR_Format18bitARGB1665:
168 | name = "COLOR_Format18bitARGB1665";
169 | break;
170 | case MediaCodecInfo.CodecCapabilities.COLOR_Format18bitRGB666:
171 | name = "COLOR_Format18bitRGB666";
172 | break;
173 | case MediaCodecInfo.CodecCapabilities.COLOR_Format19bitARGB1666:
174 | name = "COLOR_Format19bitARGB1666";
175 | break;
176 | case MediaCodecInfo.CodecCapabilities.COLOR_Format24BitABGR6666:
177 | name = "COLOR_Format24BitABGR6666";
178 | break;
179 | case MediaCodecInfo.CodecCapabilities.COLOR_Format24BitARGB6666:
180 | name = "COLOR_Format24BitARGB6666";
181 | break;
182 | case MediaCodecInfo.CodecCapabilities.COLOR_Format24bitARGB1887:
183 | name = "COLOR_Format24bitARGB1887";
184 | break;
185 | case MediaCodecInfo.CodecCapabilities.COLOR_Format24bitBGR888:
186 | name = "COLOR_Format24bitBGR888";
187 | break;
188 | case MediaCodecInfo.CodecCapabilities.COLOR_Format24bitRGB888:
189 | name = "COLOR_Format24bitRGB888";
190 | break;
191 | case MediaCodecInfo.CodecCapabilities.COLOR_Format25bitARGB1888:
192 | name = "COLOR_Format25bitARGB1888";
193 | break;
194 | case MediaCodecInfo.CodecCapabilities.COLOR_Format32bitARGB8888:
195 | name = "COLOR_Format32bitARGB8888";
196 | break;
197 | case MediaCodecInfo.CodecCapabilities.COLOR_Format32bitBGRA8888:
198 | name = "COLOR_Format32bitBGRA8888";
199 | break;
200 | case MediaCodecInfo.CodecCapabilities.COLOR_Format8bitRGB332:
201 | name = "COLOR_Format8bitRGB332";
202 | break;
203 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatCbYCrY:
204 | name = "COLOR_FormatCbYCrY";
205 | break;
206 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatCrYCbY:
207 | name = "COLOR_FormatCrYCbY";
208 | break;
209 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatL16:
210 | name = "COLOR_FormatL16";
211 | break;
212 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatL2:
213 | name = "COLOR_FormatL2";
214 | break;
215 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatL24:
216 | name = "COLOR_FormatL24";
217 | break;
218 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatL32:
219 | name = "COLOR_FormatL32";
220 | break;
221 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatL4:
222 | name = "COLOR_FormatL4";
223 | break;
224 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatL8:
225 | name = "COLOR_FormatL8";
226 | break;
227 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatMonochrome:
228 | name = "COLOR_FormatMonochrome";
229 | break;
230 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatRawBayer10bit:
231 | name = "COLOR_FormatRawBayer10bit";
232 | break;
233 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatRawBayer8bit:
234 | name = "COLOR_FormatRawBayer8bit";
235 | break;
236 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatRawBayer8bitcompressed:
237 | name = "COLOR_FormatRawBayer8bitcompressed";
238 | break;
239 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYCbYCr:
240 | name = "COLOR_FormatYCbYCr";
241 | break;
242 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYCrYCb:
243 | name = "COLOR_FormatYCrYCb";
244 | break;
245 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV411PackedPlanar:
246 | name = "COLOR_FormatYUV411PackedPlanar";
247 | break;
248 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV411Planar:
249 | name = "COLOR_FormatYUV411Planar";
250 | break;
251 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
252 | name = "COLOR_FormatYUV420PackedPlanar";
253 | break;
254 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
255 | name = "COLOR_FormatYUV420PackedSemiPlanar";
256 | break;
257 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
258 | name = "COLOR_FormatYUV420Planar";
259 | break;
260 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
261 | name = "COLOR_FormatYUV420SemiPlanar";
262 | break;
263 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422PackedPlanar:
264 | name = "COLOR_FormatYUV422PackedPlanar";
265 | break;
266 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422PackedSemiPlanar:
267 | name = "COLOR_FormatYUV422PackedSemiPlanar";
268 | break;
269 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422Planar:
270 | name = "COLOR_FormatYUV422Planar";
271 | break;
272 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422SemiPlanar:
273 | name = "COLOR_FormatYUV422SemiPlanar";
274 | break;
275 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV444Interleaved:
276 | name = "COLOR_FormatYUV444Interleaved";
277 | break;
278 | case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
279 | name = "COLOR_QCOM_FormatYUV420SemiPlanar";
280 | break;
281 | case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
282 | name = "COLOR_TI_FormatYUV420PackedSemiPlanar";
283 | break;
284 | default:
285 | name = "???";
286 | }
287 | name += "(" + format + ")";
288 | return name;
289 | }
290 |
291 | private void startCamera() {
292 |
293 | if (mCamera != null || mRingBuffer == null) {
294 | return;
295 | }
296 |
297 | if (!(UseCamera == Camera.CameraInfo.CAMERA_FACING_FRONT || UseCamera == Camera.CameraInfo.CAMERA_FACING_BACK)) {
298 | return;
299 | }
300 |
301 | int facing = UseCamera;
302 | int width = Width;
303 | int height = Height;
304 |
305 | try {
306 | int cameraId = -1;
307 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
308 |
309 | for (int i=0; i= mSetPos) {
406 | Log.d("sakalog", "set is slower than get.");
407 | return null;
408 | }
409 | int index = mGetPos % Length;
410 | byte[] buffer = mArray[index];
411 | mArray[index] = null;
412 | mGetPos++;
413 | return buffer;
414 | }
415 | public synchronized void release(byte[] buffer) {
416 | if (mCallback != null && buffer != null) {
417 | mCallback.onBufferRelease(buffer);
418 | }
419 | }
420 | public abstract static class Callback {
421 | public abstract void onBufferRelease(byte[] buffer);
422 | }
423 | }
424 |
425 | private class EncDecThread extends Thread {
426 |
427 | private boolean mForceInputEOS = false;
428 |
429 | @Override
430 | public void run() {
431 | startEncodeDecodeVideo();
432 | }
433 |
434 | public void finish() {
435 | // エンコード・デコードを終了
436 | mForceInputEOS = true;
437 | try {
438 | join();
439 | } catch (Exception e) {
440 | }
441 | }
442 |
443 | private void startEncodeDecodeVideo() {
444 | int width = Width, height = Height;
445 | int bitRate = Bitrate;
446 | int frameRate = Framerate;
447 | String mimeType = "video/avc";
448 | int threshold = 50;
449 | int maxerror = 50;
450 | Surface surface = mDecodeSurface.getHolder().getSurface();
451 | //Surface surface = null;
452 |
453 | MediaCodec encoder, decoder = null;
454 | ByteBuffer[] encoderInputBuffers;
455 | ByteBuffer[] encoderOutputBuffers;
456 | ByteBuffer[] decoderInputBuffers = null;
457 | ByteBuffer[] decoderOutputBuffers = null;
458 |
459 | // h.264(video/avc)のCodec(エンコーダ)を検索
460 | int numCodecs = MediaCodecList.getCodecCount();
461 | MediaCodecInfo codecInfo = null;
462 | for (int i = 0; i < numCodecs && codecInfo == null; i++) {
463 | MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
464 | Log.d("sakalog", "Codec : " + info.getName());
465 | if (!info.isEncoder()) {
466 | Log.d("sakalog", "not encoder");
467 | continue;
468 | }
469 | String[] types = info.getSupportedTypes();
470 | boolean found = false;
471 | for (int j = 0; j < types.length && !found; j++) {
472 | if (types[j].equals(mimeType)) {
473 | Log.d("sakalog", types[j] + " found!");
474 | found = true;
475 | } else {
476 | Log.d("sakalog", types[j]);
477 | }
478 | }
479 | if (!found)
480 | continue;
481 | codecInfo = info;
482 | }
483 | if (codecInfo == null) {
484 | Log.d("sakalog", "Encoder not found");
485 | return;
486 | }
487 | Log.d("sakalog", "Using codec : " + codecInfo.getName() + " supporting " + mimeType);
488 |
489 | // Codec(エンコーダ)への入力となる色フォーマットを決定
490 | int colorFormat = 0;
491 | MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
492 | for (int i = 0; i < capabilities.colorFormats.length /*&& colorFormat == 0*/; i++) {
493 | int format = capabilities.colorFormats[i];
494 | Log.d("sakalog", "Color format : " + colorFormatName(format));
495 | switch (format) {
496 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
497 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
498 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
499 | case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
500 | case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
501 | if (colorFormat == 0)
502 | colorFormat = format;
503 | break;
504 | default:
505 | break;
506 | }
507 | }
508 | if (colorFormat == 0) {
509 | Log.d("sakalog", "No supported color format");
510 | return;
511 | }
512 | Log.d("sakalog", "Using color format : " + colorFormatName(colorFormat));
513 |
514 | // このコードの意図は不明
515 | if (codecInfo.getName().equals("OMX.TI.DUCATI1.VIDEO.H264E")) {
516 | // This codec doesn't support a width not a multiple of 16,
517 | // so round down.
518 | width &= ~15;
519 | }
520 | int stride = width;
521 | int sliceHeight = height;
522 | if (codecInfo.getName().startsWith("OMX.Nvidia.")) {
523 | stride = (stride + 15)/16*16;
524 | sliceHeight = (sliceHeight + 15)/16*16;
525 | }
526 |
527 | // Codec(エンコーダ)のインスタンスを作成
528 | encoder = MediaCodec.createByCodecName(codecInfo.getName());
529 |
530 | // Codec(エンコーダ)を設定
531 | MediaFormat outputFormat = MediaFormat.createVideoFormat(mimeType, width, height);
532 | outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
533 | outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
534 | outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
535 | outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 75);
536 | outputFormat.setInteger("stride", stride);
537 | outputFormat.setInteger("slice-height", sliceHeight);
538 | Log.d("sakalog", "Configuring encoder with output format : " + outputFormat);
539 | encoder.configure(
540 | outputFormat, //the desired format of the output data (encoder).
541 | null, //a surface on which to render the output of this decoder. (encoderのためnull指定)
542 | null, //a crypto object to facilitate secure decryption of the media data.
543 | MediaCodec.CONFIGURE_FLAG_ENCODE //configure the component as an encoder.
544 | );
545 |
546 | // CodecのInputとOutputのバッファにはgetInputBuffers()とgetOutputBuffers()を使ってアクセスする
547 | encoder.start();
548 | encoderInputBuffers = encoder.getInputBuffers();
549 | encoderOutputBuffers = encoder.getOutputBuffers();
550 |
551 | // UseCameraはカメラ非使用の場合の静止画を生成
552 | int chromaStride = stride/2;
553 | int frameSize = stride*sliceHeight + 2*chromaStride*sliceHeight/2;
554 | byte[] stillImageFrame = new byte[frameSize];
555 | if (colorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar ||
556 | colorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar) {
557 | for (int y = 0; y < height; y++) {
558 | for (int x = 0; x < width; x++) {
559 | int Y = (x + y) & 255;
560 | int Cb = 255*x/width;
561 | int Cr = 255*y/height;
562 | stillImageFrame[y*stride + x] = (byte) Y;
563 | stillImageFrame[stride*sliceHeight + (y/2)*chromaStride + (x/2)] = (byte) Cb;
564 | stillImageFrame[stride*sliceHeight + chromaStride*(sliceHeight/2) + (y/2)*chromaStride + (x/2)] = (byte) Cr;
565 | }
566 | }
567 | } else {
568 | for (int y = 0; y < height; y++) {
569 | for (int x = 0; x < width; x++) {
570 | int Y = (x + y) & 255;
571 | int Cb = 255*x/width;
572 | int Cr = 255*y/height;
573 | stillImageFrame[y*stride + x] = (byte) Y;
574 | stillImageFrame[stride*sliceHeight + 2*(y/2)*chromaStride + 2*(x/2)] = (byte) Cb;
575 | stillImageFrame[stride*sliceHeight + 2*(y/2)*chromaStride + 2*(x/2) + 1] = (byte) Cr;
576 | }
577 | }
578 | }
579 |
580 | // エンコードとデコードを開始
581 | // フレームレートは15
582 | // フレーム間は 1000000/15 = 66666us
583 | final long kTimeOutUs = 5000;
584 | MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
585 | boolean sawInputEOS = false;
586 | boolean sawOutputEOS = false;
587 | MediaFormat oformat = null;
588 | int errors = -1;
589 | int numInputFrames = 0;
590 | int maxInputFrames = -1;
591 | int numOutputFrames = 0;
592 | float lap, lapavg = 100.0f;
593 | long lap0 = 0;
594 | int actualOutputFrame = 0;
595 | long actualOutputFrameLap = 0;
596 | actualOutputFrame = 0;
597 | actualOutputFrameLap = System.currentTimeMillis();
598 | while (!sawOutputEOS && errors < 0) {
599 |
600 | if (!sawInputEOS) {
601 | lap = (float)(System.currentTimeMillis() - lap0); // 1frameにかかる処理時間の平均
602 | lapavg += lap;
603 | lapavg /= 2.0f;
604 | float interval = 1000.0f / (float)frameRate;
605 | if (interval > lapavg) {
606 | try {
607 | long sleep = (long)(interval - lapavg);
608 | //Log.d("sakalog", "lap0 " + lap0 + " lap " + lap + " lapavg " + lapavg + " sleep " + sleep);
609 | if (sleep > 0) {
610 | Thread.sleep(sleep);
611 | }
612 | } catch (Exception e) {
613 | Log.d("sakalog", "sleep error.");
614 | }
615 | }
616 |
617 | byte[] inputFrame = null;
618 | boolean release = false;
619 | if (UseCamera == Camera.CameraInfo.CAMERA_FACING_FRONT || UseCamera == Camera.CameraInfo.CAMERA_FACING_BACK) {
620 | inputFrame = mRingBuffer.get();
621 | }
622 | if (inputFrame == null) {
623 | inputFrame = stillImageFrame;
624 | } else {
625 | release = true;
626 | }
627 | if (inputFrame != null) {
628 |
629 | // Codec(エンコーダ)のstart()に続いてdequeueInputBuffer()とdequeueOutputBuffer()を呼び出すことでバッファの所有権がCodecからクライアントに移動する
630 | // dequeueInputBuffer()とdequeueOutputBuffer()は、InputとOutputのバッファにアクセスするためのインデックスを返す
631 | // 利用可能なバッファができるまでkTimeOutUsだけ待つ
632 | // 利用可能なバッファが無い場合は-1が返される
633 | int inputBufIndex = encoder.dequeueInputBuffer(kTimeOutUs);
634 |
635 | if (inputBufIndex >= 0) {
636 | //Log.d("sakalog", "encoder input buf index " + inputBufIndex);
637 | ByteBuffer dstBuf = encoderInputBuffers[inputBufIndex];
638 |
639 | int sampleSize = frameSize;
640 | long presentationTimeUs = 0;
641 |
642 | // 1フレームぶんのデータをqueueInputBufferを使ってCodec(エンコーダ)へ提示する
643 | // maxInputFramesフレームに達したらデータの代わりにBUFFER_FLAG_END_OF_STREAMフラグを提示する
644 | if ((maxInputFrames > 0 && numInputFrames >= maxInputFrames) || mForceInputEOS) {
645 | Log.d("sakalog", "saw input EOS.");
646 | sawInputEOS = true;
647 | sampleSize = 0;
648 | } else {
649 | dstBuf.clear();
650 | dstBuf.put(inputFrame);
651 | presentationTimeUs = numInputFrames*1000000/frameRate;
652 | numInputFrames++;
653 | lap0 = System.currentTimeMillis();
654 | }
655 |
656 | encoder.queueInputBuffer(
657 | inputBufIndex,
658 | 0 /* offset */,
659 | sampleSize,
660 | presentationTimeUs,
661 | sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
662 | }
663 | }
664 |
665 | if (release) {
666 | mRingBuffer.release(inputFrame);
667 | }
668 | }
669 |
670 | // Codec(エンコーダ)からのOutputバッファ(h.264符号化データ)のインデックスを取得
671 | // BufferInfo(引数info)にはflags,offset,presentationTimeUs,sizeを取得
672 | int res = encoder.dequeueOutputBuffer(info, kTimeOutUs);
673 | if (res >= 0) {
674 | //Log.d("sakalog", "encoder output buf index " + res);
675 | int outputBufIndex = res;
676 | ByteBuffer buf = encoderOutputBuffers[outputBufIndex];
677 |
678 | // BufferInfoに合わせてバッファの読み出し位置と読み出し上限をセット
679 | buf.position(info.offset);
680 | buf.limit(info.offset + info.size);
681 |
682 | if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
683 |
684 | // Codec(エンコーダ)からのOutputバッファが、h.264符号化データではなく、BUFFER_FLAG_CODEC_CONFIGフラグである場合、Codec(デコーダ)のインスタンスをセットアップする
685 | Log.d("sakalog", "create decoder.");
686 | decoder = MediaCodec.createDecoderByType(mimeType);
687 | MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height);
688 | format.setByteBuffer("csd-0", buf);
689 | Log.d("sakalog", "Configuring decoder with input format : " + format);
690 | decoder.configure(
691 | format, //The format of the input data (decoder)
692 | surface, //a surface on which to render the output of this decoder.
693 | null, //a crypto object to facilitate secure decryption of the media data.
694 | 0 //configure the component as an decoder.
695 | );
696 | decoder.start();
697 | decoderInputBuffers = decoder.getInputBuffers();
698 | decoderOutputBuffers = decoder.getOutputBuffers();
699 | } else {
700 |
701 | // Codec(エンコーダ)からのOutputバッファが、h.264符号化データである場合、Codec(デコーダ)へ入力する
702 | int decIndex = decoder.dequeueInputBuffer(-1);
703 | //Log.d("sakalog", "decoder input buf index " + decIndex);
704 | decoderInputBuffers[decIndex].clear();
705 | decoderInputBuffers[decIndex].put(buf);
706 | decoder.queueInputBuffer(decIndex, 0, info.size, info.presentationTimeUs, info.flags);
707 | }
708 |
709 | // Codec(エンコーダ)のOutputバッファ(h.264符号化データ)を処理し終わったらCodec(エンコーダ)へ戻す
710 | encoder.releaseOutputBuffer(outputBufIndex, false /* render */);
711 | } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
712 | encoderOutputBuffers = encoder.getOutputBuffers();
713 |
714 | Log.d("sakalog", "encoder output buffers have changed.");
715 | } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
716 | MediaFormat encformat = encoder.getOutputFormat();
717 |
718 | Log.d("sakalog", "encoder output format has changed to " + encformat);
719 | }
720 |
721 | // Codec(デコーダ)の出力バッファ(rawデータ)のインデックスを取得
722 | if (decoder == null)
723 | res = MediaCodec.INFO_TRY_AGAIN_LATER;
724 | else
725 | res = decoder.dequeueOutputBuffer(info, kTimeOutUs);
726 |
727 | if (res >= 0) {
728 | //Log.d("sakalog", "decoder output buf index " + outputBufIndex);
729 | int outputBufIndex = res;
730 | ByteBuffer buf = decoderOutputBuffers[outputBufIndex];
731 |
732 | buf.position(info.offset);
733 | buf.limit(info.offset + info.size);
734 |
735 | if (info.size > 0) {
736 | //errors = checkFrame(buf, info, oformat, width, height, threshold);
737 | }
738 |
739 | // 使い終わったOutputバッファはCodec(デコーダ)に戻す
740 | decoder.releaseOutputBuffer(outputBufIndex, (surface != null) /* render */);
741 |
742 | numOutputFrames++;
743 | if ((numOutputFrames % (frameRate*3))==0) {
744 | Log.d("sakalog", "numInputFrames " + numInputFrames + " numOutputFrames " + numOutputFrames + " actualFrameRate " + (float)(numOutputFrames-actualOutputFrame)/(float)(System.currentTimeMillis()-actualOutputFrameLap)*1000.0f + " lapavg " + lapavg);
745 | actualOutputFrame = numOutputFrames;
746 | actualOutputFrameLap = System.currentTimeMillis();
747 | }
748 |
749 | if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
750 | Log.d("sakalog", "saw output EOS.");
751 | sawOutputEOS = true;
752 | }
753 | } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
754 | decoderOutputBuffers = decoder.getOutputBuffers();
755 |
756 | Log.d("sakalog", "decoder output buffers have changed.");
757 | } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
758 | oformat = decoder.getOutputFormat();
759 |
760 | Log.d("sakalog", "decoder output format has changed to " + oformat);
761 | }
762 |
763 | }
764 |
765 | encoder.stop();
766 | encoder.release();
767 | decoder.stop();
768 | decoder.release();
769 |
770 | Log.d("sakalog", "complete.");
771 | }
772 | }
773 | }
774 |
--------------------------------------------------------------------------------