├── settings.gradle
├── README.md
├── .idea
├── copyright
│ └── profiles_settings.xml
├── encodings.xml
├── vcs.xml
├── modules.xml
├── gradle.xml
├── compiler.xml
├── misc.xml
└── inspectionProfiles
│ └── Project_Default.xml
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── app
├── src
│ ├── main
│ │ ├── res
│ │ │ ├── mipmap-hdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-mdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-xhdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-xxhdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── values
│ │ │ │ ├── styles.xml
│ │ │ │ ├── dimens.xml
│ │ │ │ └── strings.xml
│ │ │ ├── values-ja
│ │ │ │ └── strings.xml
│ │ │ ├── values-w820dp
│ │ │ │ └── dimens.xml
│ │ │ ├── layout
│ │ │ │ ├── activity_main.xml
│ │ │ │ └── fragment_camera.xml
│ │ │ └── menu
│ │ │ │ └── menu_main.xml
│ │ ├── java
│ │ │ └── com
│ │ │ │ └── serenegiant
│ │ │ │ ├── timelapserecordingsample
│ │ │ │ ├── MainActivity.java
│ │ │ │ ├── CameraFragment.java
│ │ │ │ └── CameraGLView.java
│ │ │ │ ├── media
│ │ │ │ ├── TLMediaAudioEncoder.java
│ │ │ │ ├── AbstractTLMediaAudioEncoder.java
│ │ │ │ ├── TLMediaMovieBuilder.java
│ │ │ │ ├── TLMediaVideoEncoder.java
│ │ │ │ └── TLMediaEncoder.java
│ │ │ │ └── glutils
│ │ │ │ ├── RenderHandler.java
│ │ │ │ ├── GLDrawer2D.java
│ │ │ │ └── EGLBase.java
│ │ └── AndroidManifest.xml
│ └── androidTest
│ │ └── java
│ │ └── com
│ │ └── serenegiant
│ │ └── timelapserecordingsample
│ │ └── ApplicationTest.java
├── proguard-rules.pro
├── build.gradle
└── app.iml
├── .gitignore
├── gradle.properties
├── TimeLapseRecordingSample.iml
├── gradlew.bat
├── gradlew
└── LICENSE
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # TimeLapseRecordingSample
2 | Time lapse recording sample using Media Codec API for Android
3 |
--------------------------------------------------------------------------------
/.idea/copyright/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saki4510t/TimeLapseRecordingSample/HEAD/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saki4510t/TimeLapseRecordingSample/HEAD/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saki4510t/TimeLapseRecordingSample/HEAD/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saki4510t/TimeLapseRecordingSample/HEAD/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saki4510t/TimeLapseRecordingSample/HEAD/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values-ja/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | TimeLapseRecordingSample
4 | 録画ボタン以外の画面にタッチしている間だけ録画します
5 | 設定
6 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Thu Dec 07 13:31:39 JST 2017
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | TimeLapseRecordingSample
3 | Settings
4 | Record movie only while touching screen(without record button)
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 64dp
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in /Users/saki/android-sdks/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | /captures
3 | *.so
4 |
5 | /.idea/*.xml
6 | /.idea/libraries
7 | !.idea/codeStyleSettings.xml
8 | !.idea/copyright/*.xml
9 | !.idea/fileColors.xml
10 | !.idea/encodings.xml
11 | !.idea/gradle.xml
12 | !.idea/runConfigurations/*.xml
13 |
14 | !.idea/inspectionProfiles/*.xml
15 | .idea/inspectionProfiles/profiles_settings.xml
16 |
17 | !.idea/scopes/*.xml
18 | .idea/scopes/scope_settings.xml
19 |
20 | !.idea/templateLanguages.xml
21 | !.idea/vcs.xml
22 |
23 | # Gradle
24 | .gradle
25 | build
26 | .externalNativeBuild
27 | /local.properties
28 | *.iml
29 |
30 | # files for the dex VM
31 | *.dex
32 |
33 | # Java class files
34 | *.class
35 |
36 | bugreport.txt
37 | traces.txt
38 | /aandusb-aar/fataar-release.aar
39 | crash.txt
40 | error.txt
41 |
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion versionCompiler
5 | buildToolsVersion versionBuildTool
6 |
7 | compileOptions {
8 | sourceCompatibility javaSourceCompatibility
9 | targetCompatibility javaTargetCompatibility
10 | }
11 |
12 | defaultConfig {
13 | applicationId "com.serenegiant.timeLapserecordingsample"
14 | minSdkVersion 18
15 | targetSdkVersion versionTarget
16 | versionCode 1
17 | versionName "1.0"
18 | }
19 | buildTypes {
20 | release {
21 | minifyEnabled false
22 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
23 | }
24 | }
25 | }
26 |
27 | dependencies {
28 | compile fileTree(dir: 'libs', include: ['*.jar'])
29 | compile "com.android.support:appcompat-v7:${supportLibVersion}"
30 | }
31 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | # Default value: -Xmx10248m -XX:MaxPermSize=256m
13 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
14 |
15 | # When configured, Gradle will run in incubating parallel mode.
16 | # This option should only be used with decoupled projects. More details, visit
17 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
18 | # org.gradle.parallel=true
--------------------------------------------------------------------------------
/TimeLapseRecordingSample.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
26 |
33 |
--------------------------------------------------------------------------------
/app/src/main/res/menu/menu_main.xml:
--------------------------------------------------------------------------------
1 |
26 |
30 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/serenegiant/timelapserecordingsample/ApplicationTest.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.timelapserecordingsample;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: ApplicationTest.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.app.Application;
28 | import android.test.ApplicationTestCase;
29 |
30 | /**
31 | * Testing Fundamentals
32 | */
33 | public class ApplicationTest extends ApplicationTestCase {
34 | public ApplicationTest() {
35 | super(Application.class);
36 | // do nothing now
37 | }
38 | }
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_camera.xml:
--------------------------------------------------------------------------------
1 |
26 |
31 |
32 |
37 |
38 |
48 |
49 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/timelapserecordingsample/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.timelapserecordingsample;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: MainActivity.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.app.Activity;
28 | import android.os.Bundle;
29 | import android.view.Menu;
30 | import android.view.MenuItem;
31 |
32 | public class MainActivity extends Activity {
33 |
34 | @Override
35 | protected void onCreate(Bundle savedInstanceState) {
36 | super.onCreate(savedInstanceState);
37 | setContentView(R.layout.activity_main);
38 |
39 | if (savedInstanceState == null) {
40 | getFragmentManager().beginTransaction()
41 | .add(R.id.container, new CameraFragment()).commit();
42 | }
43 | }
44 |
45 | @Override
46 | public boolean onCreateOptionsMenu(Menu menu) {
47 |
48 | // Inflate the menu; this adds items to the action bar if it is present.
49 | getMenuInflater().inflate(R.menu.menu_main, menu);
50 | return true;
51 | }
52 |
53 | @Override
54 | public boolean onOptionsItemSelected(MenuItem item) {
55 | // Handle action bar item clicks here. The action bar will
56 | // automatically handle clicks on the Home/Up button, so long
57 | // as you specify a parent activity in AndroidManifest.xml.
58 | int id = item.getItemId();
59 | if (id == R.id.action_settings) {
60 | return true;
61 | }
62 | return super.onOptionsItemSelected(item);
63 | }
64 |
65 | }
66 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
26 |
27 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
41 |
42 |
47 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 | 1.7
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/media/TLMediaAudioEncoder.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.media;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: TLMediaAudioEncoder.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.content.Context;
28 | import android.media.AudioFormat;
29 | import android.media.AudioRecord;
30 | import android.media.MediaRecorder;
31 | import android.util.Log;
32 |
33 | import java.nio.ByteBuffer;
34 |
35 | /**
36 | * Encoder class to encode audio data with AAC encoder and save into intermediate files
37 | */
38 | public final class TLMediaAudioEncoder extends AbstractTLMediaAudioEncoder {
39 | private static final boolean DEBUG = false;
40 | private static final String TAG = "TLMediaAudioEncoder";
41 |
42 | private static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
43 | private static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
44 |
45 | /**
46 | * Constructor(this class only support monaural audio source)
47 | * @param context
48 | * @param base_path
49 | * @param listener
50 | */
51 | public TLMediaAudioEncoder(final Context context, final String base_path, final MediaEncoderListener listener) {
52 | super(context, base_path, listener, DEFAULT_SAMPLE_RATE, DEFAULT_BIT_RATE);
53 | }
54 |
55 | /**
56 | * Constructor(this class only support monaural audio source)
57 | * @param context
58 | * @param base_path
59 | * @param listener
60 | * @param sample_rate default value is 44100(44.1kHz, 44.1KHz is only guarantee value on all devices)
61 | * @param bit_rate default value is 64000(64kbps)
62 | */
63 | public TLMediaAudioEncoder(final Context context, final String base_path, final MediaEncoderListener listener,
64 | final int sample_rate, final int bit_rate) {
65 | super(context, base_path, listener, sample_rate, bit_rate);
66 | }
67 |
68 | @Override
69 | protected void recordingLoop() {
70 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
71 | try {
72 | final int min_buffer_size = AudioRecord.getMinBufferSize(
73 | mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
74 | int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
75 | if (buffer_size < min_buffer_size)
76 | buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
77 |
78 | final AudioRecord audioRecord = new AudioRecord(
79 | MediaRecorder.AudioSource.MIC, mSampleRate,
80 | AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
81 | try {
82 | if ((audioRecord.getState() == AudioRecord.STATE_INITIALIZED) && (mIsRunning)) {
83 | if (DEBUG) Log.v(TAG, "AudioThread:start_from_encoder audio recording");
84 | final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
85 | int readBytes;
86 | audioRecord.startRecording();
87 | try {
88 | while (mIsRunning && isRecording()) {
89 | // read audio data from internal mic
90 | buf.clear();
91 | readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
92 | if (readBytes > 0) {
93 | // set audio data to encoder
94 | encode(buf, readBytes, getPTSUs());
95 | frameAvailableSoon();
96 | }
97 | }
98 | frameAvailableSoon();
99 | } finally {
100 | audioRecord.stop();
101 | }
102 | }
103 | } finally {
104 | audioRecord.release();
105 | }
106 | } catch (Exception e) {
107 | Log.e(TAG, "AudioThread#run", e);
108 | } finally {
109 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_DEFAULT);
110 | }
111 | }
112 |
113 | }
114 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # For Cygwin, ensure paths are in UNIX format before anything is touched.
46 | if $cygwin ; then
47 | [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
48 | fi
49 |
50 | # Attempt to set APP_HOME
51 | # Resolve links: $0 may be a link
52 | PRG="$0"
53 | # Need this for relative symlinks.
54 | while [ -h "$PRG" ] ; do
55 | ls=`ls -ld "$PRG"`
56 | link=`expr "$ls" : '.*-> \(.*\)$'`
57 | if expr "$link" : '/.*' > /dev/null; then
58 | PRG="$link"
59 | else
60 | PRG=`dirname "$PRG"`"/$link"
61 | fi
62 | done
63 | SAVED="`pwd`"
64 | cd "`dirname \"$PRG\"`/" >&-
65 | APP_HOME="`pwd -P`"
66 | cd "$SAVED" >&-
67 |
68 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
69 |
70 | # Determine the Java command to use to start the JVM.
71 | if [ -n "$JAVA_HOME" ] ; then
72 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
73 | # IBM's JDK on AIX uses strange locations for the executables
74 | JAVACMD="$JAVA_HOME/jre/sh/java"
75 | else
76 | JAVACMD="$JAVA_HOME/bin/java"
77 | fi
78 | if [ ! -x "$JAVACMD" ] ; then
79 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
80 |
81 | Please set the JAVA_HOME variable in your environment to match the
82 | location of your Java installation."
83 | fi
84 | else
85 | JAVACMD="java"
86 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
87 |
88 | Please set the JAVA_HOME variable in your environment to match the
89 | location of your Java installation."
90 | fi
91 |
92 | # Increase the maximum file descriptors if we can.
93 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
94 | MAX_FD_LIMIT=`ulimit -H -n`
95 | if [ $? -eq 0 ] ; then
96 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
97 | MAX_FD="$MAX_FD_LIMIT"
98 | fi
99 | ulimit -n $MAX_FD
100 | if [ $? -ne 0 ] ; then
101 | warn "Could not set maximum file descriptor limit: $MAX_FD"
102 | fi
103 | else
104 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
105 | fi
106 | fi
107 |
108 | # For Darwin, add options to specify how the application appears in the dock
109 | if $darwin; then
110 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
111 | fi
112 |
113 | # For Cygwin, switch paths to Windows format before running java
114 | if $cygwin ; then
115 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
116 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
158 | function splitJvmOpts() {
159 | JVM_OPTS=("$@")
160 | }
161 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
162 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
163 |
164 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
165 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/glutils/RenderHandler.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.glutils;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: RenderHandler.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.graphics.SurfaceTexture;
28 | import android.opengl.EGLContext;
29 | import android.text.TextUtils;
30 | import android.util.Log;
31 | import android.view.Surface;
32 | import android.view.SurfaceHolder;
33 |
34 | /**
35 | * Helper class to draw texture to whole view on private thread
36 | */
37 | public final class RenderHandler implements Runnable {
38 | private static final boolean DEBUG = false; // TODO set false on releasing
39 | private static final String TAG = "RenderHandler";
40 |
41 | private final Object mSync = new Object();
42 | private EGLContext mShard_context;
43 | private boolean mIsRecordable;
44 | private Object mSurface;
45 | private int mTexId = -1;
46 | private float[] mTexMatrix;
47 |
48 | private boolean mRequestSetEglContext;
49 | private boolean mRequestRelease;
50 | private int mRequestDraw;
51 |
52 | public static final RenderHandler createHandler(String name) {
53 | if (DEBUG) Log.v(TAG, "createHandler:");
54 | final RenderHandler handler = new RenderHandler();
55 | synchronized (handler.mSync) {
56 | new Thread(handler, !TextUtils.isEmpty(name) ? name : TAG).start();
57 | try {
58 | handler.mSync.wait();
59 | } catch (InterruptedException e) {
60 | }
61 | }
62 | return handler;
63 | }
64 |
65 | public final void setEglContext(EGLContext shared_context, int tex_id, Object surface, boolean isRecordable) {
66 | if (DEBUG) Log.i(TAG, "setEglContext:");
67 | if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture) && !(surface instanceof SurfaceHolder))
68 | throw new RuntimeException("unsupported window type:" + surface);
69 | synchronized (mSync) {
70 | if (mRequestRelease) return;
71 | mShard_context = shared_context;
72 | mTexId = tex_id;
73 | mSurface = surface;
74 | mIsRecordable = isRecordable;
75 | mRequestSetEglContext = true;
76 | mSync.notifyAll();
77 | try {
78 | mSync.wait();
79 | } catch (InterruptedException e) {
80 | }
81 | }
82 | }
83 |
84 | public final void draw() {
85 | draw(mTexId, mTexMatrix);
86 | }
87 |
88 | public final void draw(int tex_id) {
89 | draw(tex_id, mTexMatrix);
90 | }
91 |
92 | public final void draw(final float[] tex_matrix) {
93 | draw(mTexId, tex_matrix);
94 | }
95 |
96 | public final void draw(int tex_id, final float[] tex_matrix) {
97 | synchronized (mSync) {
98 | if (mRequestRelease) return;
99 | mTexId = tex_id;
100 | mTexMatrix = tex_matrix;
101 | mRequestDraw++;
102 | mSync.notifyAll();
103 | }
104 | }
105 |
106 | public boolean isValid() {
107 | synchronized (mSync) {
108 | return !(mSurface instanceof Surface) || ((Surface)mSurface).isValid();
109 | }
110 | }
111 |
112 | public final void release() {
113 | if (DEBUG) Log.i(TAG, "internal_release:");
114 | synchronized (mSync) {
115 | if (mRequestRelease) return;
116 | mRequestRelease = true;
117 | mSync.notifyAll();
118 | try {
119 | mSync.wait();
120 | } catch (InterruptedException e) {
121 | // ignore
122 | }
123 | }
124 | }
125 |
126 | //********************************************************************************
127 | //********************************************************************************
128 | private EGLBase mEgl;
129 | private EGLBase.EglSurface mInputSurface;
130 | private GLDrawer2D mDrawer;
131 |
132 | @Override
133 | public final void run() {
134 | if (DEBUG) Log.i(TAG, "RenderHandler thread started:");
135 | synchronized (mSync) {
136 | mRequestSetEglContext = mRequestRelease = false;
137 | mRequestDraw = 0;
138 | mSync.notifyAll();
139 | }
140 | boolean localRequestDraw;
141 | for (;;) {
142 | synchronized (mSync) {
143 | if (mRequestRelease) break;
144 | if (mRequestSetEglContext) {
145 | mRequestSetEglContext = false;
146 | internalPrepare();
147 | }
148 | localRequestDraw = mRequestDraw > 0;
149 | if (localRequestDraw) {
150 | mRequestDraw--;
151 | // mSync.notifyAll();
152 | }
153 | }
154 | if (localRequestDraw) {
155 | if ((mEgl != null) && mTexId >= 0) {
156 | mInputSurface.makeCurrent();
157 | mDrawer.draw(mTexId, mTexMatrix);
158 | mInputSurface.swap();
159 | }
160 | } else {
161 | synchronized(mSync) {
162 | try {
163 | mSync.wait();
164 | } catch (InterruptedException e) {
165 | break;
166 | }
167 | }
168 | }
169 | }
170 | synchronized (mSync) {
171 | mRequestRelease = true;
172 | internalRelease();
173 | mSync.notifyAll();
174 | }
175 | if (DEBUG) Log.i(TAG, "RenderHandler thread finished:");
176 | }
177 |
178 | private final void internalPrepare() {
179 | if (DEBUG) Log.i(TAG, "internalPrepare:");
180 | internalRelease();
181 | mEgl = new EGLBase(mShard_context, false, mIsRecordable);
182 |
183 | mInputSurface = mEgl.createFromSurface(mSurface);
184 |
185 | mInputSurface.makeCurrent();
186 | mDrawer = new GLDrawer2D();
187 | mSurface = null;
188 | mSync.notifyAll();
189 | }
190 |
191 | private final void internalRelease() {
192 | if (DEBUG) Log.i(TAG, "internalRelease:");
193 | if (mInputSurface != null) {
194 | mInputSurface.release();
195 | mInputSurface = null;
196 | }
197 | if (mDrawer != null) {
198 | mDrawer.release();
199 | mDrawer = null;
200 | }
201 | if (mEgl != null) {
202 | mEgl.release();
203 | mEgl = null;
204 | }
205 | }
206 |
207 | }
208 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/media/AbstractTLMediaAudioEncoder.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.media;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: TLMediaAudioEncoder.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.annotation.SuppressLint;
28 | import android.content.Context;
29 | import android.media.AudioFormat;
30 | import android.media.MediaCodec;
31 | import android.media.MediaCodecInfo;
32 | import android.media.MediaCodecList;
33 | import android.media.MediaFormat;
34 | import android.util.Log;
35 |
36 | import java.io.IOException;
37 |
38 | /**
39 | * Encoder class to encode audio data with AAC encoder and save into intermediate files
40 | */
41 | public abstract class AbstractTLMediaAudioEncoder extends TLMediaEncoder {
42 | private static final boolean DEBUG = false;
43 | private final String TAG = getClass().getSimpleName();
44 |
45 | private static final String MIME_TYPE = "audio/mp4a-latm";
46 | protected static final int DEFAULT_SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
47 | protected static final int DEFAULT_BIT_RATE = 64000;
48 |
49 | protected final int mSampleRate; // 44100 = 44.1[KHz] is only setting guaranteed to be available on all devices.
50 | protected final int mBitRate; // 64000
51 |
52 | private AudioThread mAudioThread = null;
53 |
54 | /**
55 | * Constructor(this class only support monaural audio source)
56 | * @param context
57 | * @param base_path
58 | * @param listener
59 | * @param sample_rate default value is 44100(44.1kHz, 44.1KHz is only guarantee value on all devices)
60 | * @param bit_rate default value is 64000(64kbps)
61 | */
62 | public AbstractTLMediaAudioEncoder(final Context context, final String base_path,
63 | final MediaEncoderListener listener,
64 | final int sample_rate, final int bit_rate) {
65 |
66 | super(context, base_path, 1, listener);
67 | mSampleRate = sample_rate > 0 ? sample_rate : DEFAULT_SAMPLE_RATE;
68 | mBitRate = bit_rate > 0 ? bit_rate : DEFAULT_BIT_RATE;
69 | }
70 |
71 | @Override
72 | protected MediaFormat internal_prepare() throws IOException {
73 | if (DEBUG) Log.v(TAG, "prepare:");
74 | // prepare MediaCodec for AAC encoding of audio data from inernal mic.
75 | final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
76 | if (audioCodecInfo == null) {
77 | Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
78 | return null;
79 | }
80 | if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
81 |
82 | final MediaFormat format = MediaFormat.createAudioFormat(MIME_TYPE, mSampleRate, 1);
83 | format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
84 | format.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
85 | format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
86 | format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
87 | // format.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
88 | // format.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
89 | if (DEBUG) Log.i(TAG, "prepare finishing:format=" + format);
90 | return format;
91 | }
92 |
93 | @Override
94 | protected MediaCodec internal_configure(MediaCodec codec,
95 | final MediaFormat format) throws IOException {
96 |
97 | if (DEBUG) Log.v(TAG, "internal_configure:");
98 | if (codec == null)
99 | codec = MediaCodec.createEncoderByType(MIME_TYPE);
100 | codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
101 | return codec;
102 | }
103 |
104 | @Override
105 | protected void callOnResume() {
106 | super.callOnResume();
107 | // create and execute audio capturing thread using internal mic
108 | if (mAudioThread == null) {
109 | mAudioThread = new AudioThread();
110 | mAudioThread.start();
111 | }
112 | }
113 |
114 | @Override
115 | protected void callOnPause() {
116 | mAudioThread = null;
117 | }
118 |
119 | /**
120 | * audio sampling loop. this method is executed on private thread
121 | * this method should return if mIsRunning=false or mRequestStop=true or mIsEOS=true.
122 | */
123 | protected abstract void recordingLoop();
124 |
125 | /**
126 | * Thread to capture audio data from internal mic as uncompressed 16bit PCM data
127 | * and write them to the MediaCodec encoder
128 | */
129 | private final class AudioThread extends Thread {
130 | @Override
131 | public final void run() {
132 | try {
133 | recordingLoop();
134 | } catch (Exception e) {
135 | Log.e(TAG, "AudioThread#run", e);
136 | }
137 | if (DEBUG) Log.v(TAG, "AudioThread:finished");
138 | }
139 | }
140 |
141 | /**
142 | * select the first codec that match a specific MIME type
143 | * @param mimeType
144 | * @return
145 | */
146 | @SuppressLint("LongLogTag")
147 | private static final MediaCodecInfo selectAudioCodec(final String mimeType) {
148 | if (DEBUG) Log.v("AbstractTLMediaAudioEncoder", "selectAudioCodec:");
149 |
150 | MediaCodecInfo result = null;
151 | // get the list of available codecs
152 | final int numCodecs = MediaCodecList.getCodecCount();
153 | LOOP: for (int i = 0; i < numCodecs; i++) {
154 | final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
155 | if (!codecInfo.isEncoder()) { // skip decoder
156 | continue;
157 | }
158 | final String[] types = codecInfo.getSupportedTypes();
159 | for (int j = 0; j < types.length; j++) {
160 | if (DEBUG) Log.i("AbstractTLMediaAudioEncoder",
161 | "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
162 | if (types[j].equalsIgnoreCase(mimeType)) {
163 | result = codecInfo;
164 | break LOOP;
165 | }
166 | }
167 | }
168 | return result;
169 | }
170 |
171 | }
172 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/glutils/GLDrawer2D.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.glutils;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: GLDrawer2D.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.opengl.GLES11Ext;
28 | import android.opengl.GLES20;
29 | import android.opengl.Matrix;
30 | import android.util.Log;
31 |
32 | import java.nio.ByteBuffer;
33 | import java.nio.ByteOrder;
34 | import java.nio.FloatBuffer;
35 |
36 | /**
37 | * Helper class to draw to whole view using specific texture and texture matrix
38 | */
39 | public class GLDrawer2D {
40 | private static final boolean DEBUG = false; // TODO set false on releasing
41 | private static final String TAG = "GLDrawer2D";
42 |
43 | private static final String vss
44 | = "uniform mat4 uMVPMatrix;\n"
45 | + "uniform mat4 uTexMatrix;\n"
46 | + "attribute highp vec4 aPosition;\n"
47 | + "attribute highp vec4 aTextureCoord;\n"
48 | + "varying highp vec2 vTextureCoord;\n"
49 | + "\n"
50 | + "void main() {\n"
51 | + " gl_Position = uMVPMatrix * aPosition;\n"
52 | + " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n"
53 | + "}\n";
54 | private static final String fss
55 | = "#extension GL_OES_EGL_image_external : require\n"
56 | + "precision mediump float;\n"
57 | + "uniform samplerExternalOES sTexture;\n"
58 | + "varying highp vec2 vTextureCoord;\n"
59 | + "void main() {\n"
60 | + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
61 | + "}";
62 | private static final float[] VERTICES = { 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
63 | private static final float[] TEXCOORD = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
64 |
65 | private final FloatBuffer pVertex;
66 | private final FloatBuffer pTexCoord;
67 | private int hProgram;
68 | int maPositionLoc;
69 | int maTextureCoordLoc;
70 | int muMVPMatrixLoc;
71 | int muTexMatrixLoc;
72 | private final float[] mMvpMatrix = new float[16];
73 |
74 | private static final int FLOAT_SZ = Float.SIZE / 8;
75 | private static final int VERTEX_NUM = 4;
76 | private static final int VERTEX_SZ = VERTEX_NUM * 2;
77 | /**
78 | * Constructor
79 | * this should be called in GL context
80 | */
81 | public GLDrawer2D() {
82 | pVertex = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
83 | .order(ByteOrder.nativeOrder()).asFloatBuffer();
84 | pVertex.put(VERTICES);
85 | pVertex.flip();
86 | pTexCoord = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
87 | .order(ByteOrder.nativeOrder()).asFloatBuffer();
88 | pTexCoord.put(TEXCOORD);
89 | pTexCoord.flip();
90 |
91 | hProgram = loadShader(vss, fss);
92 | GLES20.glUseProgram(hProgram);
93 | maPositionLoc = GLES20.glGetAttribLocation(hProgram, "aPosition");
94 | maTextureCoordLoc = GLES20.glGetAttribLocation(hProgram, "aTextureCoord");
95 | muMVPMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uMVPMatrix");
96 | muTexMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uTexMatrix");
97 |
98 | Matrix.setIdentityM(mMvpMatrix, 0);
99 | GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
100 | GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, mMvpMatrix, 0);
101 | GLES20.glVertexAttribPointer(maPositionLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pVertex);
102 | GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pTexCoord);
103 | GLES20.glEnableVertexAttribArray(maPositionLoc);
104 | GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
105 | }
106 |
107 | /**
108 | * terminating, this should be called in GL context
109 | */
110 | public void release() {
111 | if (hProgram >= 0)
112 | GLES20.glDeleteProgram(hProgram);
113 | hProgram = -1;
114 | }
115 |
116 | /**
117 | * draw specific texture with specific texture matrix
118 | * @param tex_id texture ID
119 | * @param tex_matrix texture matrix、if this is null, the last one use(we don't check size of this array and needs at least 16 of float)
120 | */
121 | public void draw(int tex_id, float[] tex_matrix) {
122 | GLES20.glUseProgram(hProgram);
123 | if (tex_matrix != null)
124 | GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, tex_matrix, 0);
125 | GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
126 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
127 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex_id);
128 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_NUM);
129 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
130 | GLES20.glUseProgram(0);
131 | }
132 |
133 | /**
134 | * Set model/view/projection transform matrix
135 | * @param matrix
136 | * @param offset
137 | */
138 | public void setMatrix(final float[] matrix, final int offset) {
139 | if ((matrix != null) && (matrix.length >= offset + 16)) {
140 | System.arraycopy(matrix, offset, mMvpMatrix, 0, 16);
141 | }
142 | }
143 |
144 | /**
145 | * create external texture
146 | * @return texture ID
147 | */
148 | public static int initTex() {
149 | if (DEBUG) Log.v(TAG, "initTex:");
150 | final int[] tex = new int[1];
151 | GLES20.glGenTextures(1, tex, 0);
152 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
153 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
154 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
155 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
156 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
157 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
158 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
159 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
160 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
161 | return tex[0];
162 | }
163 |
164 | /**
165 | * delete specific texture
166 | */
167 | public static void deleteTex(int hTex) {
168 | if (DEBUG) Log.v(TAG, "deleteTex:");
169 | final int[] tex = new int[] {hTex};
170 | GLES20.glDeleteTextures(1, tex, 0);
171 | }
172 |
173 | /**
174 | * load, compile and link shader
175 | * @param vss source of vertex shader
176 | * @param fss source of fragment shader
177 | * @return
178 | */
179 | public static int loadShader(String vss, String fss) {
180 | if (DEBUG) Log.v(TAG, "loadShader:");
181 | int vs = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
182 | GLES20.glShaderSource(vs, vss);
183 | GLES20.glCompileShader(vs);
184 | final int[] compiled = new int[1];
185 | GLES20.glGetShaderiv(vs, GLES20.GL_COMPILE_STATUS, compiled, 0);
186 | if (compiled[0] == 0) {
187 | if (DEBUG) Log.e(TAG, "Failed to compile vertex shader:"
188 | + GLES20.glGetShaderInfoLog(vs));
189 | GLES20.glDeleteShader(vs);
190 | vs = 0;
191 | }
192 |
193 | int fs = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
194 | GLES20.glShaderSource(fs, fss);
195 | GLES20.glCompileShader(fs);
196 | GLES20.glGetShaderiv(fs, GLES20.GL_COMPILE_STATUS, compiled, 0);
197 | if (compiled[0] == 0) {
198 | if (DEBUG) Log.w(TAG, "Failed to compile fragment shader:"
199 | + GLES20.glGetShaderInfoLog(fs));
200 | GLES20.glDeleteShader(fs);
201 | fs = 0;
202 | }
203 |
204 | final int program = GLES20.glCreateProgram();
205 | GLES20.glAttachShader(program, vs);
206 | GLES20.glAttachShader(program, fs);
207 | GLES20.glLinkProgram(program);
208 |
209 | return program;
210 | }
211 |
212 | }
213 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/Project_Default.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
--------------------------------------------------------------------------------
/app/app.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | generateDebugSources
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/timelapserecordingsample/CameraFragment.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.timelapserecordingsample;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: CameraFragment.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.app.Activity;
28 | import android.app.Fragment;
29 | import android.content.pm.ActivityInfo;
30 | import android.media.MediaScannerConnection;
31 | import android.os.Bundle;
32 | import android.text.TextUtils;
33 | import android.util.Log;
34 | import android.view.LayoutInflater;
35 | import android.view.MotionEvent;
36 | import android.view.View;
37 | import android.view.View.OnClickListener;
38 | import android.view.ViewGroup;
39 | import android.widget.ImageButton;
40 | import android.widget.Toast;
41 |
42 | import com.serenegiant.media.TLMediaAudioEncoder;
43 | import com.serenegiant.media.TLMediaEncoder;
44 | import com.serenegiant.media.TLMediaMovieBuilder;
45 | import com.serenegiant.media.TLMediaVideoEncoder;
46 |
47 | import java.io.IOException;
48 |
49 | public class CameraFragment extends Fragment {
50 | private static final boolean DEBUG = false; // TODO set false on releasing
51 | private static final String TAG = "CameraFragment";
52 |
53 | /**
54 | * for camera preview display
55 | */
56 | private CameraGLView mCameraView;
57 | /**
58 | * button for start/stop recording
59 | */
60 | private ImageButton mRecordButton;
61 | private TLMediaVideoEncoder mVideoEncoder;
62 | private TLMediaAudioEncoder mAudioEncoder;
63 | private TLMediaMovieBuilder mMuxer;
64 | private boolean mIsRecording;
65 | private String mMovieName;
66 |
67 | public CameraFragment() {
68 | // need default constructor
69 | }
70 |
71 | @Override
72 | public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
73 | final View rootView = inflater.inflate(R.layout.fragment_camera, container, false);
74 | mCameraView = rootView.findViewById(R.id.cameraView);
75 | mCameraView.setVideoSize(1280, 720);
76 | mCameraView.setOnTouchListener(mOnTouchListener);
77 | mRecordButton = rootView.findViewById(R.id.record_button);
78 | mRecordButton.setOnClickListener(mOnClickListener);
79 | return rootView;
80 | }
81 |
82 | @Override
83 | public void onResume() {
84 | super.onResume();
85 | if (DEBUG) Log.v(TAG, "onResume:");
86 | mCameraView.onResume();
87 | }
88 |
89 | @Override
90 | public void onPause() {
91 | if (DEBUG) Log.v(TAG, "onPause:");
92 | stopRecording();
93 | mCameraView.onPause();
94 | super.onPause();
95 | }
96 |
97 | /*
98 | *
99 | */
100 | public final void fixedScreenOrientation(final boolean fixed) {
101 | getActivity().setRequestedOrientation(
102 | fixed ? ActivityInfo.SCREEN_ORIENTATION_LOCKED : ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
103 | }
104 |
105 | /**
106 | * method when touch record button
107 | */
108 | private final OnClickListener mOnClickListener = new OnClickListener() {
109 | @Override
110 | public void onClick(View view) {
111 | switch (view.getId()) {
112 | case R.id.record_button:
113 | if (!mIsRecording) {
114 | startRecording();
115 | } else {
116 | stopRecording();
117 | }
118 | break;
119 | }
120 | }
121 | };
122 |
123 | private final View.OnTouchListener mOnTouchListener = new View.OnTouchListener() {
124 | @Override
125 | public boolean onTouch(View v, MotionEvent event) {
126 | if (mIsRecording) {
127 | switch (event.getAction()) {
128 | case MotionEvent.ACTION_DOWN:
129 | resumeRecording();
130 | break;
131 | case MotionEvent.ACTION_MOVE:
132 | break;
133 | case MotionEvent.ACTION_CANCEL:
134 | case MotionEvent.ACTION_UP:
135 | pauseRecording();
136 | break;
137 | }
138 | return true;
139 | } else
140 | return false;
141 | }
142 | };
143 |
144 | /**
145 | * start recording
146 | * This is a sample project and call this on UI thread to avoid being complicated
147 | * but basically this should be called on private thread because preparing
148 | * of encoder may be heavy work on some devices
149 | */
150 | private void startRecording() {
151 | if (mIsRecording) return;
152 | if (DEBUG) Log.v(TAG, "start:");
153 | try {
154 | mRecordButton.setColorFilter(0xffffff00); // turn yellow
155 | mMovieName = TAG; // + System.nanoTime();
156 | if (true) {
157 | // for video capturing
158 | mVideoEncoder = new TLMediaVideoEncoder(getActivity(), mMovieName, mMediaEncoderListener);
159 | try {
160 | mVideoEncoder.setFormat(mCameraView.getVideoWidth(), mCameraView.getVideoHeight());
161 | mVideoEncoder.prepare();
162 | } catch (Exception e) {
163 | Log.e(TAG, "startRecording:", e);
164 | mVideoEncoder.release();
165 | mVideoEncoder = null;
166 | throw e;
167 | }
168 | }
169 | if (true) {
170 | // for audio capturing
171 | mAudioEncoder = new TLMediaAudioEncoder(getActivity(), mMovieName, mMediaEncoderListener);
172 | try {
173 | mAudioEncoder.prepare();
174 | } catch (Exception e) {
175 | Log.e(TAG, "startRecording:", e);
176 | mAudioEncoder.release();
177 | mAudioEncoder = null;
178 | throw e;
179 | }
180 | }
181 | if (mVideoEncoder != null) {
182 | mVideoEncoder.start(true);
183 | }
184 | if (mAudioEncoder != null) {
185 | mAudioEncoder.start(true);
186 | }
187 | mIsRecording = true;
188 | Toast.makeText(getActivity(), R.string.recording_hint, Toast.LENGTH_SHORT).show();
189 | } catch (Exception e) {
190 | mRecordButton.setColorFilter(0);
191 | Log.e(TAG, "startCapture:", e);
192 | }
193 | fixedScreenOrientation(mIsRecording);
194 | }
195 |
196 | /**
197 | * request stop recording
198 | */
199 | private void stopRecording() {
200 | if (!mIsRecording) return;
201 | if (DEBUG) Log.v(TAG, "stop");
202 | mIsRecording = false;
203 | mRecordButton.setColorFilter(0); // return to default color
204 | if (mVideoEncoder != null) {
205 | mVideoEncoder.stop();
206 | mVideoEncoder.release();
207 | }
208 | if (mAudioEncoder != null) {
209 | mAudioEncoder.stop();
210 | mAudioEncoder.release();
211 | }
212 | fixedScreenOrientation(mIsRecording);
213 | try {
214 | mMuxer = new TLMediaMovieBuilder(getActivity(), mMovieName);
215 | mMuxer.build(mTLMediaMovieBuilderCallback);
216 | } catch (Exception e) {
217 | e.printStackTrace();
218 | }
219 | }
220 |
221 | /**
222 | * resume recording
223 | */
224 | private void resumeRecording() {
225 | if (!mIsRecording) return;
226 | mRecordButton.setColorFilter(0xffff0000); // turn red
227 | try {
228 | if (mVideoEncoder != null) {
229 | if (mVideoEncoder.isPaused())
230 | mVideoEncoder.resume();
231 | }
232 | if (mAudioEncoder != null) {
233 | if (mAudioEncoder.isPaused())
234 | mAudioEncoder.resume();
235 | }
236 | } catch (IOException e) {
237 | stopRecording();
238 | }
239 | }
240 |
241 | /**
242 | * pause recording
243 | */
244 | private void pauseRecording() {
245 | if (!mIsRecording) return;
246 | mRecordButton.setColorFilter(0xffffff00); // turn yellow
247 | if ((mVideoEncoder != null) && !mVideoEncoder.isPaused())
248 | try {
249 | mVideoEncoder.pause();
250 | } catch (Exception e) {
251 | Log.e(TAG, "pauseRecording:", e);
252 | mVideoEncoder.release();
253 | mVideoEncoder = null;
254 | }
255 | if ((mAudioEncoder != null) && !mAudioEncoder.isPaused())
256 | try {
257 | mAudioEncoder.pause();
258 | } catch (Exception e) {
259 | Log.e(TAG, "pauseRecording:", e);
260 | mAudioEncoder.release();
261 | mAudioEncoder = null;
262 | }
263 | }
264 |
265 | /**
266 | * callback methods from encoder
267 | */
268 | private final TLMediaEncoder.MediaEncoderListener mMediaEncoderListener
269 | = new TLMediaEncoder.MediaEncoderListener() {
270 |
271 | @Override
272 | public void onPrepared(TLMediaEncoder encoder) {
273 | if (DEBUG) Log.v(TAG, "onPrepared:encoder=" + encoder);
274 | }
275 |
276 | @Override
277 | public void onStopped(TLMediaEncoder encoder) {
278 | if (DEBUG) Log.v(TAG, "onStopped:encoder=" + encoder);
279 | }
280 |
281 | @Override
282 | public void onResume(TLMediaEncoder encoder) {
283 | if (DEBUG) Log.v(TAG, "onResume:encoder=" + encoder);
284 | if (encoder instanceof TLMediaVideoEncoder)
285 | mCameraView.setVideoEncoder((TLMediaVideoEncoder)encoder);
286 | }
287 |
288 | @Override
289 | public void onPause(TLMediaEncoder encoder) {
290 | if (DEBUG) Log.v(TAG, "onPause:encoder=" + encoder);
291 | if (encoder instanceof TLMediaVideoEncoder)
292 | mCameraView.setVideoEncoder(null);
293 | }
294 | };
295 |
296 | /**
297 | * callback methods from TLMediaMovieBuilder
298 | */
299 | private TLMediaMovieBuilder.TLMediaMovieBuilderCallback mTLMediaMovieBuilderCallback
300 | = new TLMediaMovieBuilder.TLMediaMovieBuilderCallback() {
301 |
302 | @Override
303 | public void onFinished(String output_path) {
304 | if (DEBUG) Log.v(TAG, "onFinished:");
305 | mMuxer = null;
306 | if (!TextUtils.isEmpty(output_path)) {
307 | final Activity activity = CameraFragment.this.getActivity();
308 | if ((activity == null) || activity.isFinishing()) return;
309 | // add movie to gallery
310 | MediaScannerConnection.scanFile(activity, new String[] {output_path}, null, null);
311 | }
312 | }
313 |
314 | @Override
315 | public void onError(Exception e) {
316 | if (DEBUG) Log.v(TAG, "onError:" + e.getMessage());
317 | }
318 | };
319 | }
320 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/media/TLMediaMovieBuilder.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.media;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: TLMediaMovieBuilder.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.content.Context;
28 | import android.media.MediaCodec;
29 | import android.media.MediaFormat;
30 | import android.media.MediaMuxer;
31 | import android.os.Environment;
32 | import android.text.TextUtils;
33 | import android.util.Log;
34 |
35 | import java.io.DataInputStream;
36 | import java.io.File;
37 | import java.io.IOException;
38 | import java.nio.ByteBuffer;
39 | import java.text.SimpleDateFormat;
40 | import java.util.GregorianCalendar;
41 | import java.util.Locale;
42 |
43 | /**
44 | * Builder class to build actual mp4 file from intermediate files made by TLMediaEncoder and it's inheritor
45 | */
46 | public class TLMediaMovieBuilder {
47 | private static final boolean DEBUG = false;
48 | private static final String TAG = "TLMediaMovieBuilder";
49 |
50 | private static final long MSEC30US = 1000000 / 30;
51 | private static String DIR_NAME = "TimeLapseRecordingSample";
52 |
53 | private final File mBaseDir;
54 | private String mOutputPath;
55 | private MuxerTask mMuxerTask;
56 |
57 | public interface TLMediaMovieBuilderCallback {
58 | /**
59 | * called when finished movie building
60 | * @param output_path output movie file path, may null when canceled or error occurred etc.
61 | */
62 | public void onFinished(String output_path);
63 | /**
64 | * called when error occurred while movie building
65 | * @param e
66 | */
67 | public void onError(Exception e);
68 | }
69 |
70 | /**
71 | * set output directory name
72 | * the actual directory is {DIRECTORY_MOVIES}/dir_name
73 | * @param dir_name
74 | */
75 | public static final void setDirName(final String dir_name) {
76 | if (TextUtils.isEmpty(dir_name))
77 | throw new IllegalArgumentException("dir_name should not be null/empty");
78 | DIR_NAME = dir_name;
79 | }
80 |
81 | /**
82 | * Constructor
83 | * @param movie_name directory name where intermediate files exist
84 | * @throws IOException
85 | */
86 | public TLMediaMovieBuilder(final Context context,
87 | final String movie_name) throws IOException {
88 |
89 | mBaseDir = new File(context.getExternalFilesDir(Environment.DIRECTORY_MOVIES), movie_name);
90 | mOutputPath = getCaptureFile(Environment.DIRECTORY_MOVIES, ".mp4").toString();
91 | }
92 |
93 | /**
94 | * get output movie file path
95 | * @return
96 | */
97 | public String getOutputPath() {
98 | return mOutputPath;
99 | }
100 |
101 | /**
102 | * set output movie file path, should be called before #build
103 | * @param path
104 | */
105 | public void setOutputPath(final String path) {
106 | mOutputPath = path;
107 | }
108 |
109 | /**
110 | * build movie file from intermediate file.
111 | * this method is executed asynchronously.
112 | */
113 | public synchronized void build(final TLMediaMovieBuilderCallback callback) {
114 | if (DEBUG) Log.v(TAG, "build:");
115 | cancel();
116 | mMuxerTask = new MuxerTask(this, callback);
117 | mMuxerTask.start();
118 | }
119 |
120 | public synchronized void cancel() {
121 | if (mMuxerTask != null) {
122 | mMuxerTask.cancel();
123 | }
124 | }
125 |
126 | private final synchronized void finishBuild(MuxerTask muxer_task) {
127 | if (muxer_task.equals(mMuxerTask))
128 | mMuxerTask = null;
129 | }
130 |
131 | //**********************************************************************
132 | //**********************************************************************
133 | /**
134 | * make output file name
135 | * @param type Environment.DIRECTORY_MOVIES / Environment.DIRECTORY_DCIM etc.
136 | * @param ext .mp4(.m4a for audio) or .png
137 | * @return return null when this app has no writing permission to external storage.
138 | */
139 | public static final File getCaptureFile(final String type, final String ext) {
140 | final File dir = new File(Environment.getExternalStoragePublicDirectory(type), DIR_NAME);
141 | Log.d(TAG, "path=" + dir.toString());
142 | dir.mkdirs();
143 | if (dir.canWrite()) {
144 | return new File(dir, getDateTimeString() + ext);
145 | }
146 | return null;
147 | }
148 |
149 | /**
150 | * make String came from current datetime
151 | * @return
152 | */
153 | private static final String getDateTimeString() {
154 | final GregorianCalendar now = new GregorianCalendar();
155 | final SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US);
156 | return dateTimeFormat.format(now.getTime());
157 | }
158 |
159 | /**
160 | * building task executing on private thread
161 | */
162 | private static final class MuxerTask extends Thread {
163 | private final Object mSync = new Object();
164 | private final TLMediaMovieBuilder mBuilder;
165 | private final File mMovieDir;
166 | private final TLMediaMovieBuilderCallback mCallback;
167 | private final String mMuxerFilePath;
168 |
169 | private volatile boolean mIsRunning = true;
170 |
171 | public MuxerTask(final TLMediaMovieBuilder builder, final TLMediaMovieBuilderCallback callback) {
172 | super(TAG);
173 | mBuilder = builder;
174 | mMovieDir = builder.mBaseDir;
175 | mCallback = callback;
176 | mMuxerFilePath = builder.mOutputPath;
177 | }
178 |
179 | public void cancel() {
180 | mIsRunning = false;
181 | }
182 |
183 | @Override
184 | public void run() {
185 | if (DEBUG) Log.v(TAG, "MuxerTask#run");
186 | boolean isMuxerStarted = false;
187 | try {
188 | final MediaMuxer muxer = new MediaMuxer(mMuxerFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
189 | if (muxer != null)
190 | try {
191 | int videoTrack = -1;
192 | int audioTrack = -1;
193 | final DataInputStream videoIn = TLMediaEncoder.openInputStream(mMovieDir, TLMediaEncoder.TYPE_VIDEO, 0);
194 | if (videoIn != null) {
195 | final MediaFormat format = TLMediaEncoder.readFormat(videoIn);
196 | if (format != null) {
197 | videoTrack = muxer.addTrack(format);
198 | if (DEBUG) Log.v(TAG, "found video data:format=" + format + "track=" + videoTrack);
199 | }
200 | }
201 | final DataInputStream audioIn = TLMediaEncoder.openInputStream(mMovieDir, TLMediaEncoder.TYPE_AUDIO, 0);
202 | if (audioIn != null) {
203 | final MediaFormat format = TLMediaEncoder.readFormat(audioIn);
204 | if (format != null) {
205 | audioTrack = muxer.addTrack(format);
206 | if (DEBUG) Log.v(TAG, "found audio data:format=" + format + "track=" + audioTrack);
207 | }
208 | }
209 | if ((videoTrack >= 0) || (audioTrack >= 0)) {
210 | if (DEBUG) Log.v(TAG, "start muxing");
211 | ByteBuffer videoBuf = null;
212 | MediaCodec.BufferInfo videoBufInfo = null;
213 | TLMediaEncoder.TLMediaFrameHeader videoFrameHeader = null;
214 | if (videoTrack >= 0) {
215 | videoBufInfo = new MediaCodec.BufferInfo();
216 | videoFrameHeader = new TLMediaEncoder.TLMediaFrameHeader();
217 | }
218 | ByteBuffer audioBuf = null;
219 | MediaCodec.BufferInfo audioBufInfo = new MediaCodec.BufferInfo();
220 | TLMediaEncoder.TLMediaFrameHeader audioFrameHeader = null;
221 | if (audioTrack >= 0) {
222 | audioBufInfo = new MediaCodec.BufferInfo();
223 | audioFrameHeader = new TLMediaEncoder.TLMediaFrameHeader();
224 | }
225 | final byte[] readBuf = new byte[64 * 1024];
226 | isMuxerStarted = true;
227 | int videoSequence = 0;
228 | int audioSequence = 0;
229 | long videoTimeOffset = -1, videoPresentationTimeUs = -MSEC30US;
230 | long audioTimeOffset = -1, audioPresentationTimeUs = -MSEC30US;
231 | muxer.start();
232 | for (; mIsRunning && ((videoTrack >= 0) || (audioTrack >= 0)); ) {
233 | if (videoTrack >= 0) {
234 | try {
235 | videoBuf = TLMediaEncoder.readStream(videoIn, videoFrameHeader, videoBuf, readBuf);
236 | videoFrameHeader.asBufferInfo(videoBufInfo);
237 | if (videoSequence != videoFrameHeader.sequence) {
238 | videoSequence = videoFrameHeader.sequence;
239 | videoTimeOffset = videoPresentationTimeUs - videoBufInfo.presentationTimeUs + MSEC30US;
240 | }
241 | videoBufInfo.presentationTimeUs += videoTimeOffset;
242 | muxer.writeSampleData(videoTrack, videoBuf, videoBufInfo);
243 | videoPresentationTimeUs = videoBufInfo.presentationTimeUs;
244 | } catch (IllegalArgumentException e) {
245 | if (DEBUG) Log.d(TAG, String.format("MuxerTask(video):size=%d,presentationTimeUs=%d,",
246 | videoBufInfo.size, videoBufInfo.presentationTimeUs) + videoFrameHeader, e);
247 | videoTrack = -1; // end
248 | } catch (IOException e) {
249 | videoTrack = -1; // end
250 | }
251 | }
252 | if (audioTrack >= 0) {
253 | try {
254 | audioBuf = TLMediaEncoder.readStream(audioIn, audioFrameHeader, audioBuf, readBuf);
255 | audioFrameHeader.asBufferInfo(audioBufInfo);
256 | if (audioSequence != audioFrameHeader.sequence) {
257 | audioSequence = audioFrameHeader.sequence;
258 | audioTimeOffset = audioPresentationTimeUs - audioBufInfo.presentationTimeUs + MSEC30US;
259 | }
260 | audioBufInfo.presentationTimeUs += audioTimeOffset;
261 | muxer.writeSampleData(audioTrack, audioBuf, audioBufInfo);
262 | audioPresentationTimeUs = audioBufInfo.presentationTimeUs;
263 | } catch (IllegalArgumentException e) {
264 | if (DEBUG) Log.d(TAG, String.format("MuxerTask(audio):size=%d,presentationTimeUs=%d,",
265 | audioBufInfo.size, audioBufInfo.presentationTimeUs) + audioFrameHeader, e);
266 | audioTrack = -1; // end
267 | } catch (IOException e) {
268 | audioTrack = -1; // end
269 | }
270 | }
271 | }
272 | muxer.stop();
273 | }
274 | if (videoIn != null) {
275 | videoIn.close();
276 | }
277 | if (audioIn != null) {
278 | audioIn.close();
279 | }
280 | } finally {
281 | muxer.release();
282 | }
283 | } catch (Exception e) {
284 | Log.w(TAG, "failed to build movie file:", e);
285 | mIsRunning = false;
286 | synchronized (mSync) {
287 | if (mCallback != null) {
288 | mCallback.onError(e);
289 | }
290 | }
291 | }
292 | // remove intermediate files and its directory
293 | TLMediaEncoder.delete(mMovieDir);
294 | mBuilder.finishBuild(this);
295 | if (DEBUG) Log.v(TAG, "MuxerTask#finished");
296 | synchronized (mSync) {
297 | if (mCallback != null) {
298 | mCallback.onFinished(mIsRunning && isMuxerStarted ? mMuxerFilePath : null);
299 | }
300 | }
301 | }
302 | }
303 | }
304 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/media/TLMediaVideoEncoder.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.media;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: TLMediaVideoEncoder.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.content.Context;
28 | import android.media.MediaCodec;
29 | import android.media.MediaCodecInfo;
30 | import android.media.MediaCodecList;
31 | import android.media.MediaFormat;
32 | import android.opengl.EGLContext;
33 | import android.util.Log;
34 | import android.view.Surface;
35 |
36 | import com.serenegiant.glutils.RenderHandler;
37 |
38 | import java.io.IOException;
39 |
40 | /**
41 | * Video Encoder
42 | */
43 | public class TLMediaVideoEncoder extends TLMediaEncoder {
44 | private static final boolean DEBUG = false;
45 | private static final String TAG = TLMediaVideoEncoder.class.getSimpleName();
46 |
47 | private static final String MIME_TYPE = "video/avc";
48 | private static final int DEFAULT_VIDEO_WIDTH = 640;
49 | private static final int DEFAULT_VIDEO_HEIGHT = 480;
50 | private static final int DEFAULT_FRAME_RATE = 25;
51 | private static final float DEFAULT_BPP = 0.25f;
52 | private static final int DEFAULT_IFRAME_INTERVALS = 2;
53 | private static final int MAX_BITRATE = 17825792; // 17Mbps
54 |
55 | private int mWidth = DEFAULT_VIDEO_WIDTH;
56 | private int mHeight = DEFAULT_VIDEO_HEIGHT;
57 | private int mFrameRate = DEFAULT_FRAME_RATE;
58 | private int mBitRate = -1;
59 | private int mIFrameIntervals = DEFAULT_IFRAME_INTERVALS;
60 |
61 | private RenderHandler mRenderHandler;
62 | private Surface mSurface;
63 |
64 | /**
65 | * Constructor
66 | * @param context
67 | * @param base_path
68 | * @param listener
69 | */
70 | public TLMediaVideoEncoder(final Context context,
71 | final String base_path, final MediaEncoderListener listener) {
72 |
73 | super(context, base_path, 0, listener);
74 | if (DEBUG) Log.i(TAG, "TLMediaVideoEncoder: ");
75 | mRenderHandler = RenderHandler.createHandler(TAG);
76 | }
77 |
78 | /**
79 | * get Surface for input
80 | */
81 | public final Surface getInputSurface() {
82 | if (mSurface == null)
83 | throw new IllegalStateException("encoder have not initialized yet");
84 | return mSurface;
85 | }
86 |
87 | public boolean frameAvailableSoon(final float[] tex_matrix) {
88 | boolean result;
89 | if (result = super.frameAvailableSoon())
90 | mRenderHandler.draw(tex_matrix);
91 | return result;
92 | }
93 |
94 | @Override
95 | public boolean frameAvailableSoon() {
96 | boolean result;
97 | if (result = super.frameAvailableSoon())
98 | mRenderHandler.draw(null);
99 | return result;
100 | }
101 |
102 | /**
103 | * setup video encoder. should be called before #prepare
104 | * @param width negative value means using default value(640)
105 | * @param height negative value means using default value(480)
106 | * @param width
107 | * @param height
108 | */
109 | public void setFormat(final int width, final int height) {
110 | setFormat(width, height, mFrameRate, -1, mIFrameIntervals);
111 | }
112 |
113 | /**
114 | * setup video encoder. should be called before #prepare
115 | * @param width negative value means using default value(640)
116 | * @param height negative value means using default value(480)
117 | * @param framerate negative value means using default value(30fps)
118 | * @param bitrate negative value means using default value(calculate from BPP0.25, width,height and framerate)
119 | */
120 | public void setFormat(final int width, final int height, final int framerate, final int bitrate) {
121 | setFormat(width, height, framerate, bitrate, mIFrameIntervals);
122 | }
123 |
124 | /**
125 | * setup video encoder. should be called before #prepare
126 | * @param width negative value means using default value(640)
127 | * @param height negative value means using default value(480)
128 | * @param framerate negative value means using default value(30fps)
129 | * @param bitrate negative value means using default value(calculate from BPP0.25, width,height and framerate)
130 | * @param iframe_intervals negative value means using default value(10)
131 | */
132 | public void setFormat(final int width, final int height,
133 | final int framerate, final int bitrate, final int iframe_intervals) {
134 |
135 | if (DEBUG) Log.v(TAG, String.format("requested setFormat:size(%d,%d),fps=%d,bps=%d,iframe=%d",
136 | width, height, framerate, bitrate, iframe_intervals));
137 | if (mSurface != null)
138 | throw new IllegalStateException("already prepared");
139 | if (width > 0) mWidth = width;
140 | else mWidth = DEFAULT_VIDEO_WIDTH;
141 |
142 | if (height > 0) mHeight = height;
143 | else mHeight = DEFAULT_VIDEO_HEIGHT;
144 |
145 | if (framerate > 0) mFrameRate = framerate;
146 | else mFrameRate = DEFAULT_FRAME_RATE;
147 |
148 | mBitRate = bitrate;
149 |
150 | if (iframe_intervals > 0) mIFrameIntervals = iframe_intervals;
151 | else mIFrameIntervals = DEFAULT_IFRAME_INTERVALS;
152 | if (DEBUG) Log.v(TAG, String.format("setFormat:size(%d,%d),fps=%d,bps=%d,iframe=%d",
153 | mWidth, mHeight, mFrameRate, mBitRate, mIFrameIntervals));
154 | }
155 |
156 | @Override
157 | protected MediaFormat internal_prepare() throws IOException {
158 | if (DEBUG) Log.i(TAG, "internal_prepare: ");
159 |
160 | final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
161 | if (videoCodecInfo == null) {
162 | Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
163 | return null;
164 | }
165 | if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
166 |
167 | final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
168 | if (DEBUG) Log.i(TAG, "prepare finishing:format=" + format);
169 | return format;
170 | }
171 |
172 | @Override
173 | protected MediaCodec internal_configure(MediaCodec previous_codec,
174 | final MediaFormat format) throws IOException {
175 |
176 | if (DEBUG) Log.v(TAG, "internal_configure:");
177 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
178 | format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate > 0 ? mBitRate : calcBitRate());
179 | format.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate);
180 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameIntervals);
181 | if (DEBUG) Log.i(TAG, "format: " + format);
182 |
183 | if (previous_codec == null)
184 | previous_codec = MediaCodec.createEncoderByType(MIME_TYPE);
185 | previous_codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
186 | mSurface = previous_codec.createInputSurface(); // API >= 18
187 | return previous_codec;
188 | }
189 |
190 | public void setEglContext(EGLContext shared_context, int tex_id) {
191 | mRenderHandler.setEglContext(shared_context, tex_id, mSurface, true);
192 | }
193 |
194 | @Override
195 | protected void internal_release() {
196 | if (DEBUG) Log.i(TAG, "internal_release: ");
197 | if (mSurface != null) {
198 | mSurface.release();
199 | mSurface = null;
200 | }
201 | if (mRenderHandler != null) {
202 | mRenderHandler.release();
203 | mRenderHandler = null;
204 | }
205 | super.internal_release();
206 | }
207 |
208 | /**
209 | * calculate bit rate
210 | * @return
211 | */
212 | private final int calcBitRate() {
213 | int bitrate = (int)(DEFAULT_BPP * mFrameRate * mWidth * mHeight);
214 | if (bitrate > MAX_BITRATE) bitrate = MAX_BITRATE;
215 | Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
216 | return bitrate;
217 | }
218 |
219 | /**
220 | * select first encoder matched to specific MIME
221 | * @param mimeType
222 | * @return return null if not found
223 | */
224 | @SuppressWarnings("deprecation")
225 | protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
226 | if (DEBUG) Log.v(TAG, "selectVideoCodec:");
227 |
228 | // get the list of available codecs
229 | final int numCodecs = MediaCodecList.getCodecCount();
230 | for (int i = 0; i < numCodecs; i++) {
231 | final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
232 |
233 | if (!codecInfo.isEncoder()) { // skipp decoder
234 | continue;
235 | }
236 | // select first codec that match a specific MIME type and color format
237 | final String[] types = codecInfo.getSupportedTypes();
238 | for (String type : types) {
239 | if (type.equalsIgnoreCase(mimeType)) {
240 | if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + type);
241 | int format = selectColorFormat(codecInfo, mimeType);
242 | if (format > 0) {
243 | return codecInfo;
244 | }
245 | }
246 | }
247 | }
248 | return null;
249 | }
250 |
251 | /**
252 | * select color format that the specific codec supports
253 | * @return return 0 if not found
254 | */
255 | protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
256 | if (DEBUG) Log.i(TAG, "selectColorFormat: ");
257 | int result = 0;
258 | final MediaCodecInfo.CodecCapabilities caps;
259 | try {
260 | Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
261 | caps = codecInfo.getCapabilitiesForType(mimeType);
262 | } finally {
263 | Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
264 | }
265 | for (int colorFormat: caps.colorFormats) {
266 | if (isRecognizedVideoFormat(colorFormat)) {
267 | result = colorFormat;
268 | break;
269 | }
270 | }
271 | if (result == 0)
272 | Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
273 | return result;
274 | }
275 |
276 | /**
277 | * color format values that this class supports(only COLOR_FormatSurface)
278 | */
279 | protected static int[] recognizedFormats;
280 | static {
281 | recognizedFormats = new int[] {
282 | // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
283 | // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
284 | // MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
285 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
286 | };
287 | }
288 |
289 | /**
290 | * return whether specific color format can be used on this class
291 | * @param colorFormat
292 | * @return return true if this class supports specific color format
293 | */
294 | private static final boolean isRecognizedVideoFormat(final int colorFormat) {
295 | if (DEBUG) Log.i(TAG, "isRecognizedVideoFormat:colorFormat=" + colorFormat);
296 | final int n = recognizedFormats != null ? recognizedFormats.length : 0;
297 | for (int i = 0; i < n; i++) {
298 | if (recognizedFormats[i] == colorFormat) {
299 | return true;
300 | }
301 | }
302 | return false;
303 | }
304 |
305 | }
306 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
203 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/glutils/EGLBase.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.glutils;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: EGLBase.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.annotation.TargetApi;
28 | import android.graphics.SurfaceTexture;
29 | import android.opengl.EGL14;
30 | import android.opengl.EGLConfig;
31 | import android.opengl.EGLContext;
32 | import android.opengl.EGLDisplay;
33 | import android.opengl.EGLSurface;
34 | import android.os.Build;
35 | import android.util.Log;
36 | import android.view.Surface;
37 | import android.view.SurfaceHolder;
38 | import android.view.SurfaceView;
39 |
40 | @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
41 | public class EGLBase { // API >= 17
42 | private static final boolean DEBUG = false; // TODO set false on releasing
43 | private static final String TAG = "EGLBase";
44 |
45 | private static final int EGL_RECORDABLE_ANDROID = 0x3142;
46 |
47 | private EGLConfig mEglConfig = null;
48 | private EGLContext mEglContext = EGL14.EGL_NO_CONTEXT;
49 | private EGLDisplay mEglDisplay = EGL14.EGL_NO_DISPLAY;
50 | private EGLContext mDefaultContext = EGL14.EGL_NO_CONTEXT;
51 |
52 | public static class EglSurface {
53 | private final EGLBase mEgl;
54 | private EGLSurface mEglSurface = EGL14.EGL_NO_SURFACE;
55 | private final int mWidth, mHeight;
56 |
57 | EglSurface(EGLBase egl, Object surface) {
58 | if (DEBUG) Log.v(TAG, "EglSurface:");
59 | if (!(surface instanceof SurfaceView)
60 | && !(surface instanceof Surface)
61 | && !(surface instanceof SurfaceHolder)
62 | && !(surface instanceof SurfaceTexture))
63 | throw new IllegalArgumentException("unsupported surface");
64 | mEgl = egl;
65 | mEglSurface = mEgl.createWindowSurface(surface);
66 | mWidth = mEgl.querySurface(mEglSurface, EGL14.EGL_WIDTH);
67 | mHeight = mEgl.querySurface(mEglSurface, EGL14.EGL_HEIGHT);
68 | if (DEBUG) Log.v(TAG, String.format("EglSurface:size(%d,%d)", mWidth, mHeight));
69 | }
70 |
71 | EglSurface(EGLBase egl, int width, int height) {
72 | if (DEBUG) Log.v(TAG, "EglSurface:");
73 | mEgl = egl;
74 | mEglSurface = mEgl.createOffscreenSurface(width, height);
75 | mWidth = width;
76 | mHeight = height;
77 | }
78 |
79 | public void makeCurrent() {
80 | mEgl.makeCurrent(mEglSurface);
81 | }
82 |
83 | public void swap() {
84 | mEgl.swap(mEglSurface);
85 | }
86 |
87 | public EGLContext getContext() {
88 | return mEgl.getContext();
89 | }
90 |
91 | public void release() {
92 | if (DEBUG) Log.v(TAG, "EglSurface:internal_release:");
93 | mEgl.makeDefault();
94 | mEgl.destroyWindowSurface(mEglSurface);
95 | mEglSurface = EGL14.EGL_NO_SURFACE;
96 | }
97 | }
98 |
99 | public EGLBase(EGLContext shared_context, boolean with_depth_buffer, boolean isRecordable) {
100 | if (DEBUG) Log.v(TAG, "EGLBase:");
101 | init(shared_context, with_depth_buffer, isRecordable);
102 | }
103 |
104 | public void release() {
105 | if (DEBUG) Log.v(TAG, "internal_release:");
106 | if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
107 | destroyContext();
108 | EGL14.eglTerminate(mEglDisplay);
109 | EGL14.eglReleaseThread();
110 | }
111 | mEglDisplay = EGL14.EGL_NO_DISPLAY;
112 | mEglContext = EGL14.EGL_NO_CONTEXT;
113 | }
114 |
115 | public EglSurface createFromSurface(Object surface) {
116 | if (DEBUG) Log.v(TAG, "createFromSurface:");
117 | final EglSurface eglSurface = new EglSurface(this, surface);
118 | eglSurface.makeCurrent();
119 | return eglSurface;
120 | }
121 |
122 | public EglSurface createOffscreen(int width, int height) {
123 | if (DEBUG) Log.v(TAG, "createOffscreen:");
124 | final EglSurface eglSurface = new EglSurface(this, width, height);
125 | eglSurface.makeCurrent();
126 | return eglSurface;
127 | }
128 |
129 | public EGLContext getContext() {
130 | return mEglContext;
131 | }
132 |
133 | public int querySurface(final EGLSurface eglSurface, final int what) {
134 | final int[] value = new int[1];
135 | EGL14.eglQuerySurface(mEglDisplay, eglSurface, what, value, 0);
136 | return value[0];
137 | }
138 |
139 | private void init(EGLContext shared_context, boolean with_depth_buffer, boolean isRecordable) {
140 | if (DEBUG) Log.v(TAG, "init:");
141 | if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
142 | throw new RuntimeException("EGL already set up");
143 | }
144 |
145 | mEglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
146 | if (mEglDisplay == EGL14.EGL_NO_DISPLAY) {
147 | throw new RuntimeException("eglGetDisplay failed");
148 | }
149 |
150 | final int[] version = new int[2];
151 | if (!EGL14.eglInitialize(mEglDisplay, version, 0, version, 1)) {
152 | mEglDisplay = null;
153 | throw new RuntimeException("eglInitialize failed");
154 | }
155 |
156 | shared_context = shared_context != null ? shared_context : EGL14.EGL_NO_CONTEXT;
157 | if (mEglContext == EGL14.EGL_NO_CONTEXT) {
158 | mEglConfig = getConfig(with_depth_buffer, isRecordable);
159 | if (mEglConfig == null) {
160 | throw new RuntimeException("chooseConfig failed");
161 | }
162 | // create EGL rendering context
163 | mEglContext = createContext(shared_context);
164 | }
165 | // confirm whether the EGL rendering context is successfully created
166 | final int[] values = new int[1];
167 | EGL14.eglQueryContext(mEglDisplay, mEglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
168 | if (DEBUG) Log.d(TAG, "EGLContext created, client version " + values[0]);
169 | makeDefault(); // makeCurrent(EGL14.EGL_NO_SURFACE);
170 | }
171 |
172 | /**
173 | * change context to draw this window surface
174 | * @return
175 | */
176 | private boolean makeCurrent(EGLSurface surface) {
177 | // if (DEBUG) Log.v(TAG, "makeCurrent:");
178 | if (mEglDisplay == null) {
179 | if (DEBUG) Log.d(TAG, "makeCurrent:eglDisplay not initialized");
180 | }
181 | if (surface == null || surface == EGL14.EGL_NO_SURFACE) {
182 | int error = EGL14.eglGetError();
183 | if (error == EGL14.EGL_BAD_NATIVE_WINDOW) {
184 | Log.e(TAG, "makeCurrent:returned EGL_BAD_NATIVE_WINDOW.");
185 | }
186 | return false;
187 | }
188 | // attach EGL renderring context to specific EGL window surface
189 | if (!EGL14.eglMakeCurrent(mEglDisplay, surface, surface, mEglContext)) {
190 | Log.w(TAG, "eglMakeCurrent:" + EGL14.eglGetError());
191 | return false;
192 | }
193 | return true;
194 | }
195 |
196 | private void makeDefault() {
197 | if (DEBUG) Log.v(TAG, "makeDefault:");
198 | if (!EGL14.eglMakeCurrent(mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
199 | Log.w("TAG", "makeDefault" + EGL14.eglGetError());
200 | }
201 | }
202 |
203 | private int swap(EGLSurface surface) {
204 | // if (DEBUG) Log.v(TAG, "swap:");
205 | if (!EGL14.eglSwapBuffers(mEglDisplay, surface)) {
206 | final int err = EGL14.eglGetError();
207 | if (DEBUG) Log.w(TAG, "swap:err=" + err);
208 | return err;
209 | }
210 | return EGL14.EGL_SUCCESS;
211 | }
212 |
213 | private EGLContext createContext(EGLContext shared_context) {
214 | // if (DEBUG) Log.v(TAG, "createContext:");
215 |
216 | final int[] attrib_list = {
217 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
218 | EGL14.EGL_NONE
219 | };
220 | final EGLContext context = EGL14.eglCreateContext(mEglDisplay, mEglConfig, shared_context, attrib_list, 0);
221 | checkEglError("eglCreateContext");
222 | return context;
223 | }
224 |
225 | private void destroyContext() {
226 | if (DEBUG) Log.v(TAG, "destroyContext:");
227 |
228 | if (!EGL14.eglDestroyContext(mEglDisplay, mEglContext)) {
229 | Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mEglContext);
230 | Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
231 | }
232 | mEglContext = EGL14.EGL_NO_CONTEXT;
233 | if (mDefaultContext != EGL14.EGL_NO_CONTEXT) {
234 | if (!EGL14.eglDestroyContext(mEglDisplay, mDefaultContext)) {
235 | Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mDefaultContext);
236 | Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
237 | }
238 | mDefaultContext = EGL14.EGL_NO_CONTEXT;
239 | }
240 | }
241 |
242 | private EGLSurface createWindowSurface(Object nativeWindow) {
243 | if (DEBUG) Log.v(TAG, "createWindowSurface:nativeWindow=" + nativeWindow);
244 |
245 | final int[] surfaceAttribs = {
246 | EGL14.EGL_NONE
247 | };
248 | EGLSurface result = null;
249 | try {
250 | result = EGL14.eglCreateWindowSurface(mEglDisplay, mEglConfig, nativeWindow, surfaceAttribs, 0);
251 | } catch (IllegalArgumentException e) {
252 | Log.e(TAG, "eglCreateWindowSurface", e);
253 | }
254 | return result;
255 | }
256 |
257 | /**
258 | * Creates an EGL surface associated with an offscreen buffer.
259 | */
260 | private EGLSurface createOffscreenSurface(int width, int height) {
261 | if (DEBUG) Log.v(TAG, "createOffscreenSurface:");
262 | int[] surfaceAttribs = {
263 | EGL14.EGL_WIDTH, width,
264 | EGL14.EGL_HEIGHT, height,
265 | EGL14.EGL_NONE
266 | };
267 | EGLSurface result = null;
268 | try {
269 | result = EGL14.eglCreatePbufferSurface(mEglDisplay, mEglConfig, surfaceAttribs, 0);
270 | checkEglError("eglCreatePbufferSurface");
271 | if (result == null) {
272 | throw new RuntimeException("surface was null");
273 | }
274 | } catch (IllegalArgumentException e) {
275 | Log.e(TAG, "createOffscreenSurface", e);
276 | } catch (RuntimeException e) {
277 | Log.e(TAG, "createOffscreenSurface", e);
278 | }
279 | return result;
280 | }
281 |
282 | private void destroyWindowSurface(EGLSurface surface) {
283 | if (DEBUG) Log.v(TAG, "destroySurface:");
284 |
285 | if (surface != EGL14.EGL_NO_SURFACE) {
286 | EGL14.eglMakeCurrent(mEglDisplay,
287 | EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
288 | EGL14.eglDestroySurface(mEglDisplay, surface);
289 | }
290 | surface = EGL14.EGL_NO_SURFACE;
291 | if (DEBUG) Log.v(TAG, "destroySurface:finished");
292 | }
293 |
294 | private void checkEglError(String msg) {
295 | int error;
296 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
297 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
298 | }
299 | }
300 |
301 | @SuppressWarnings("unused")
302 | private EGLConfig getConfig(boolean with_depth_buffer, boolean isRecordable) {
303 | final int[] attribList = {
304 | EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
305 | EGL14.EGL_RED_SIZE, 8,
306 | EGL14.EGL_GREEN_SIZE, 8,
307 | EGL14.EGL_BLUE_SIZE, 8,
308 | EGL14.EGL_ALPHA_SIZE, 8,
309 | EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL14.EGL_STENCIL_SIZE, 8,
310 | EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL_RECORDABLE_ANDROID, 1, // this flag need to recording of MediaCodec
311 | EGL14.EGL_NONE, EGL14.EGL_NONE, // with_depth_buffer ? EGL14.EGL_DEPTH_SIZE : EGL14.EGL_NONE,
312 | // with_depth_buffer ? 16 : 0,
313 | EGL14.EGL_NONE
314 | };
315 | int offset = 10;
316 | if (false) { // ステンシルバッファ(常時未使用)
317 | attribList[offset++] = EGL14.EGL_STENCIL_SIZE;
318 | attribList[offset++] = 8;
319 | }
320 | if (with_depth_buffer) { // デプスバッファ
321 | attribList[offset++] = EGL14.EGL_DEPTH_SIZE;
322 | attribList[offset++] = 16;
323 | }
324 | if (isRecordable && (Build.VERSION.SDK_INT >= 18)) {// MediaCodecの入力用Surfaceの場合
325 | attribList[offset++] = EGL_RECORDABLE_ANDROID;
326 | attribList[offset++] = 1;
327 | }
328 | for (int i = attribList.length - 1; i >= offset; i--) {
329 | attribList[i] = EGL14.EGL_NONE;
330 | }
331 | final EGLConfig[] configs = new EGLConfig[1];
332 | final int[] numConfigs = new int[1];
333 | if (!EGL14.eglChooseConfig(mEglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
334 | // XXX it will be better to fallback to RGB565
335 | Log.w(TAG, "unable to find RGBA8888 / " + " EGLConfig");
336 | return null;
337 | }
338 | return configs[0];
339 | }
340 | }
341 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/timelapserecordingsample/CameraGLView.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.timelapserecordingsample;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: CameraGLView.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.content.Context;
28 | import android.graphics.SurfaceTexture;
29 | import android.hardware.Camera;
30 | import android.opengl.EGL14;
31 | import android.opengl.GLES20;
32 | import android.opengl.GLSurfaceView;
33 | import android.opengl.Matrix;
34 | import android.os.Handler;
35 | import android.os.Looper;
36 | import android.os.Message;
37 | import android.util.AttributeSet;
38 | import android.util.Log;
39 | import android.view.Display;
40 | import android.view.Surface;
41 | import android.view.SurfaceHolder;
42 | import android.view.WindowManager;
43 |
44 | import com.serenegiant.glutils.GLDrawer2D;
45 | import com.serenegiant.media.TLMediaVideoEncoder;
46 |
47 | import java.io.IOException;
48 | import java.util.List;
49 |
50 | import javax.microedition.khronos.egl.EGLConfig;
51 | import javax.microedition.khronos.opengles.GL10;
52 |
53 | /**
54 | * Sub class of GLSurfaceView to display camera preview and write video frame to capturing surface
55 | */
56 | public final class CameraGLView extends GLSurfaceView {
57 |
58 | private static final boolean DEBUG = false; // TODO set false on releasing
59 | private static final String TAG = "CameraGLView";
60 |
61 | private static final int CAMERA_ID = 0;
62 |
63 | private static final int SCALE_STRETCH_FIT = 0;
64 | private static final int SCALE_KEEP_ASPECT_VIEWPORT = 1;
65 | private static final int SCALE_KEEP_ASPECT = 2;
66 | private static final int SCALE_CROP_CENTER = 3;
67 |
68 | private final CameraSurfaceRenderer mRenderer;
69 | private boolean mHasSurface;
70 | private final CameraHandler mCameraHandler;
71 | private int mVideoWidth, mVideoHeight;
72 | private int mRotation;
73 | private int mScaleMode = SCALE_CROP_CENTER;
74 |
75 | public CameraGLView(Context context) {
76 | this(context, null, 0);
77 | }
78 |
79 | public CameraGLView(Context context, AttributeSet attrs) {
80 | this(context, attrs, 0);
81 | }
82 |
83 | public CameraGLView(Context context, AttributeSet attrs, int defStyle) {
84 | super(context, attrs);
85 | if (DEBUG) Log.v(TAG, "CameraGLView:");
86 | mRenderer = new CameraSurfaceRenderer();
87 | setEGLContextClientVersion(2); // GLES 2.0, API >= 8
88 | setRenderer(mRenderer);
89 | final CameraThread thread = new CameraThread();
90 | thread.start();
91 | mCameraHandler = thread.getHandler();
92 | }
93 |
94 | @Override
95 | public void onResume() {
96 | if (DEBUG) Log.v(TAG, "onResume:");
97 | super.onResume();
98 | if (mHasSurface) {
99 | if (DEBUG) Log.v(TAG, "surface already exist");
100 | mCameraHandler.startPreview(getWidth(), getHeight());
101 | }
102 | }
103 |
104 | @Override
105 | public void onPause() {
106 | if (DEBUG) Log.v(TAG, "onPause:");
107 | // just request stop previewing
108 | mCameraHandler.stopPreview();
109 | super.onPause();
110 | }
111 |
112 | @Override
113 | public void surfaceDestroyed(SurfaceHolder holder) {
114 | if (DEBUG) Log.v(TAG, "surfaceDestroyed:");
115 | // wait for finish previewing here
116 | // otherwise camera try to display on un-exist Surface and some error will occur
117 | mCameraHandler.release(true);
118 | mHasSurface = false;
119 | mRenderer.onSurfaceDestroyed();
120 | super.surfaceDestroyed(holder);
121 | }
122 |
123 | public void setScaleMode(final int mode) {
124 | if (mScaleMode != mode) {
125 | mScaleMode = mode;
126 | queueEvent(new Runnable() {
127 | @Override
128 | public void run() {
129 | mRenderer.updateViewport();
130 | }
131 | });
132 | }
133 | }
134 |
135 | public int getScaleMode() {
136 | return mScaleMode;
137 | }
138 |
139 | @SuppressWarnings("SuspiciousNameCombination")
140 | public void setVideoSize(final int width, final int height) {
141 | if ((mRotation % 180) == 0) {
142 | mVideoWidth = width;
143 | mVideoHeight = height;
144 | } else {
145 | mVideoWidth = height;
146 | mVideoHeight = width;
147 | }
148 | queueEvent(new Runnable() {
149 | @Override
150 | public void run() {
151 | mRenderer.updateViewport();
152 | }
153 | });
154 | }
155 |
156 | public int getVideoWidth() {
157 | return mVideoWidth;
158 | }
159 |
160 | public int getVideoHeight() {
161 | return mVideoHeight;
162 | }
163 |
164 | public SurfaceTexture getSurfaceTexture() {
165 | if (DEBUG) Log.v(TAG, "getSurfaceTexture:");
166 | return mRenderer != null ? mRenderer.mSTexture : null;
167 | }
168 |
169 | public void setVideoEncoder(final TLMediaVideoEncoder encoder) {
170 | if (DEBUG) Log.v(TAG, "setVideoEncoder:tex_id=" + mRenderer.hTex);
171 | queueEvent(new Runnable() {
172 | @Override
173 | public void run() {
174 | synchronized (mRenderer) {
175 | try {
176 | if (encoder != null) {
177 | encoder.setEglContext(EGL14.eglGetCurrentContext(), mRenderer.hTex);
178 | }
179 | mRenderer.mVideoEncoder = encoder;
180 | } catch (RuntimeException e) {
181 | mRenderer.mVideoEncoder = null;
182 | }
183 | }
184 | }
185 | });
186 | }
187 |
188 | //********************************************************************************
189 | //********************************************************************************
190 | /**
191 | * GLSurfaceViewのRenderer
192 | */
193 | private final class CameraSurfaceRenderer
194 | implements GLSurfaceView.Renderer,
195 | SurfaceTexture.OnFrameAvailableListener { // API >= 11
196 |
197 | private SurfaceTexture mSTexture; // API >= 11
198 | private int hTex;
199 | private GLDrawer2D mDrawer;
200 | private final float[] mStMatrix = new float[16];
201 | private final float[] mMvpMatrix = new float[16];
202 | private TLMediaVideoEncoder mVideoEncoder;
203 |
204 | public CameraSurfaceRenderer() {
205 | if (DEBUG) Log.v(TAG, "CameraSurfaceRenderer:");
206 | Matrix.setIdentityM(mMvpMatrix, 0);
207 | }
208 |
209 | @Override
210 | public void onSurfaceCreated(GL10 unused, EGLConfig config) {
211 | if (DEBUG) Log.v(TAG, "onSurfaceCreated:");
212 | // This renderer required OES_EGL_image_external extension
213 | final String extensions = GLES20.glGetString(GLES20.GL_EXTENSIONS); // API >= 8
214 | // if (DEBUG) Log.i(TAG, "onSurfaceCreated:Gl extensions: " + extensions);
215 | if (!extensions.contains("OES_EGL_image_external"))
216 | throw new RuntimeException("This system does not support OES_EGL_image_external.");
217 | // create texture ID
218 | hTex = GLDrawer2D.initTex();
219 | // create SurfaceTexture using the texture ID.
220 | mSTexture = new SurfaceTexture(hTex);
221 | mSTexture.setOnFrameAvailableListener(this);
222 | // XXX clear screen with yellow color
223 | // so that let easy to see the actual view rectangle and camera images for testing.
224 | GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
225 | mHasSurface = true;
226 | // create object for preview display
227 | mDrawer = new GLDrawer2D();
228 | mDrawer.setMatrix(mMvpMatrix, 0);
229 | }
230 |
231 | @Override
232 | public void onSurfaceChanged(GL10 unused, int width, int height) {
233 | if (DEBUG) Log.v(TAG, "onSurfaceChanged:");
234 | // if at least with or height is zero, initialization of this view is still progress.
235 | if ((width == 0) || (height == 0)) return;
236 | updateViewport();
237 | mCameraHandler.startPreview(width, height);
238 | }
239 |
240 | public void onSurfaceDestroyed() {
241 | if (DEBUG) Log.v(TAG, "onSurfaceDestroyed:");
242 | mDrawer = null;
243 | if (mSTexture != null) {
244 | mSTexture.release();
245 | mSTexture = null;
246 | }
247 | }
248 |
249 | private final void updateViewport() {
250 | final int view_width = getWidth();
251 | final int view_height = getHeight();
252 | if (view_width == 0 || view_height == 0) return;
253 | GLES20.glViewport(0, 0, view_width, view_height);
254 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
255 | final float video_width = mVideoWidth;
256 | final float video_height = mVideoHeight;
257 | if (video_width == 0 || video_height == 0) return;
258 | Matrix.setIdentityM(mMvpMatrix, 0);
259 | final float view_aspect = view_width / (float)view_height;
260 | switch (mScaleMode) {
261 | case SCALE_STRETCH_FIT:
262 | break;
263 | case SCALE_KEEP_ASPECT_VIEWPORT: {
264 | final float req = video_width / video_height;
265 | int x, y;
266 | int width, height;
267 | if (view_aspect > req) {
268 | // if view is wider than camera image, calc width of drawing area based on view height
269 | y = 0;
270 | height = view_height;
271 | width = (int) (req * view_height);
272 | x = (view_width - width) / 2;
273 | } else {
274 | // if view is higher than camera image, calc height of drawing area based on view width
275 | x = 0;
276 | width = view_width;
277 | height = (int) (view_width / req);
278 | y = (view_height - height) / 2;
279 | }
280 | // set viewport to draw keeping aspect ration of camera image
281 | GLES20.glViewport(x, y, width, height);
282 | break;
283 | }
284 | case SCALE_KEEP_ASPECT:
285 | case SCALE_CROP_CENTER: {
286 | final float scale_x = view_width / video_width;
287 | final float scale_y = view_height / video_height;
288 | final float scale = (mScaleMode == SCALE_CROP_CENTER
289 | ? Math.max(scale_x, scale_y) : Math.min(scale_x, scale_y));
290 | final float width = scale * video_width;
291 | final float height = scale * video_height;
292 | Matrix.scaleM(mMvpMatrix, 0, width / view_width, height / view_height, 1.0f);
293 | break;
294 | }
295 | }
296 | if (mDrawer != null)
297 | mDrawer.setMatrix(mMvpMatrix, 0);
298 | }
299 |
300 | private volatile boolean requestUpdateTex = false;
301 | private boolean flip = true;
302 | /**
303 | * drawing to GLSurface
304 | * we set renderMode to GLSurfaceView.RENDERMODE_WHEN_DIRTY,
305 | * this method is only called when #requestRender is called(= when texture is required to update)
306 | * if you don't set RENDERMODE_WHEN_DIRTY, this method is called at maximum 60fps
307 | */
308 | @Override
309 | public void onDrawFrame(GL10 unused) {
310 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
311 |
312 | if (requestUpdateTex) {
313 | requestUpdateTex = false;
314 | // update texture(came from camera)
315 | mSTexture.updateTexImage();
316 | // get texture matrix
317 | mSTexture.getTransformMatrix(mStMatrix);
318 | }
319 | // draw to preview screen
320 | mDrawer.draw(hTex, mStMatrix);
321 | flip = !flip;
322 | if (flip) { // ~30fps
323 | synchronized (this) {
324 | if (mVideoEncoder != null) {
325 | // notify to capturing thread that the camera frame is available.
326 | mVideoEncoder.frameAvailableSoon(mStMatrix);
327 | }
328 | }
329 | }
330 | }
331 |
332 | @Override
333 | public void onFrameAvailable(SurfaceTexture st) {
334 | requestUpdateTex = true;
335 | }
336 | }
337 |
338 | /**
339 | * Handler class for asynchronous camera operation
340 | */
341 | private static final class CameraHandler extends Handler {
342 | private static final int MSG_PREVIEW_START = 1;
343 | private static final int MSG_PREVIEW_STOP = 2;
344 | private static final int MSG_RELEASE = 9;
345 | private CameraThread mThread;
346 |
347 | public CameraHandler(CameraThread thread) {
348 | mThread = thread;
349 | }
350 |
351 | public void startPreview(int width, int height) {
352 | sendMessage(obtainMessage(MSG_PREVIEW_START, width, height));
353 | }
354 |
355 | /**
356 | * request to stop camera preview
357 | */
358 | public void stopPreview() {
359 | synchronized (this) {
360 | if (mThread != null && mThread.mIsRunning) {
361 | sendEmptyMessage(MSG_PREVIEW_STOP);
362 | }
363 | }
364 | }
365 |
366 | /**
367 | * request to release camera thread and handler
368 | * @param needWait need to wait
369 | */
370 | public void release(boolean needWait) {
371 | synchronized (this) {
372 | if (mThread != null && mThread.mIsRunning) {
373 | sendEmptyMessage(MSG_RELEASE);
374 | if (needWait) {
375 | try {
376 | if (DEBUG) Log.d(TAG, "wait for terminating of camera thread");
377 | wait();
378 | } catch (InterruptedException e) {
379 | }
380 | }
381 | }
382 | }
383 | }
384 |
385 | /**
386 | * message handler for camera thread
387 | */
388 | @Override
389 | public void handleMessage(Message msg) {
390 | switch (msg.what) {
391 | case MSG_PREVIEW_START:
392 | mThread.startPreview(msg.arg1, msg.arg2);
393 | break;
394 | case MSG_PREVIEW_STOP:
395 | mThread.stopPreview();
396 | break;
397 | case MSG_RELEASE:
398 | mThread.stopPreview();
399 | Looper.myLooper().quit();
400 | synchronized (this) {
401 | notifyAll();
402 | mThread = null;
403 | }
404 | break;
405 | default:
406 | throw new RuntimeException("unknown message:what=" + msg.what);
407 | }
408 | }
409 | }
410 |
411 | /**
412 | * Thread for asynchronous operation of camera preview
413 | */
414 | @SuppressWarnings("deprecation")
415 | private final class CameraThread extends Thread {
416 | private final Object mReadyFence = new Object();
417 | private CameraHandler mHandler;
418 | private volatile boolean mIsRunning = false;
419 | private Camera mCamera;
420 | private boolean mIsFrontFace;
421 |
422 | public CameraThread() {
423 | super("Camera thread");
424 | }
425 |
426 | public CameraHandler getHandler() {
427 | synchronized (mReadyFence) {
428 | try {
429 | mReadyFence.wait();
430 | } catch (InterruptedException e) {
431 | }
432 | }
433 | return mHandler;
434 | }
435 |
436 | /**
437 | * message loop
438 | * prepare Looper and create Handler for this thread
439 | */
440 | @Override
441 | public void run() {
442 | if (DEBUG) Log.d(TAG, "Camera thread start");
443 | Looper.prepare();
444 | synchronized (mReadyFence) {
445 | mHandler = new CameraHandler(this);
446 | mIsRunning = true;
447 | mReadyFence.notify();
448 | }
449 | Looper.loop();
450 | if (DEBUG) Log.d(TAG, "Camera thread finish");
451 | synchronized (mReadyFence) {
452 | mHandler = null;
453 | mIsRunning = false;
454 | }
455 | }
456 |
457 | /**
458 | * start camera preview
459 | * @param width
460 | * @param height
461 | */
462 | private final void startPreview(int width, int height) {
463 | if (DEBUG) Log.v(TAG, "startPreview:");
464 | if (mCamera == null) {
465 | // This is a sample project so just use 0 as camera ID.
466 | // it is better to selecting camera is available
467 | try {
468 | mCamera = Camera.open(CAMERA_ID);
469 | final Camera.Parameters params = mCamera.getParameters();
470 | final List focusModes = params.getSupportedFocusModes();
471 | if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
472 | params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
473 | } else if(focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
474 | params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
475 | } else {
476 | if (DEBUG) Log.i(TAG, "Camera does not support autofocus");
477 | }
478 | // let's try fastest frame rate. You will get near 60fps, but your device become hot.
479 | final List supportedFpsRange = params.getSupportedPreviewFpsRange();
480 | // final int n = supportedFpsRange != null ? supportedFpsRange.size() : 0;
481 | // int[] range;
482 | // for (int i = 0; i < n; i++) {
483 | // range = supportedFpsRange.get(i);
484 | // Log.i(TAG, String.format("supportedFpsRange(%d)=(%d,%d)", i, range[0], range[1]));
485 | // }
486 | final int[] max_fps = supportedFpsRange.get(supportedFpsRange.size() - 1);
487 | if (DEBUG) Log.i(TAG, String.format("fps:%d-%d", max_fps[0], max_fps[1]));
488 | params.setPreviewFpsRange(max_fps[0], max_fps[1]);
489 | params.setRecordingHint(true);
490 | // request preview size
491 | // this is a sample project and just use fixed value
492 | // if you want to use other size, you also need to change the recording size.
493 | params.setPreviewSize(1280, 720);
494 | // final Size sz = params.getPreferredPreviewSizeForVideo();
495 | // if (sz != null)
496 | // params.setPreviewSize(sz.width, sz.height);
497 | // rotate camera preview according to the device orientation
498 | setRotation(params);
499 | mCamera.setParameters(params);
500 | // get the actual preview size
501 | final Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
502 | Log.i(TAG, String.format("previewSize(%d, %d)", previewSize.width, previewSize.height));
503 | // adjust view size with keeping the aspect ration of camera preview.
504 | // here is not a UI thread and we should request parent view to execute.
505 | CameraGLView.this.post(new Runnable() {
506 | @Override
507 | public void run() {
508 | setVideoSize(previewSize.width, previewSize.height);
509 | }
510 | });
511 | final SurfaceTexture st = getSurfaceTexture();
512 | st.setDefaultBufferSize(previewSize.width, previewSize.height);
513 | mCamera.setPreviewTexture(st);
514 | } catch (IOException e) {
515 | Log.e(TAG, "startPreview:", e);
516 | if (mCamera != null) {
517 | mCamera.release();
518 | mCamera = null;
519 | }
520 | } catch (RuntimeException e) {
521 | Log.e(TAG, "startPreview:", e);
522 | if (mCamera != null) {
523 | mCamera.release();
524 | mCamera = null;
525 | }
526 | }
527 | if (mCamera != null) {
528 | // start camera preview display
529 | mCamera.startPreview();
530 | }
531 | } // if (mCamera == null)
532 | }
533 |
534 | /**
535 | * stop camera preview
536 | */
537 | private void stopPreview() {
538 | if (DEBUG) Log.v(TAG, "stopPreview:");
539 | if (mCamera != null) {
540 | mCamera.stopPreview();
541 | mCamera.release();
542 | mCamera = null;
543 | }
544 | }
545 |
546 | /**
547 | * rotate preview screen according to the device orientation
548 | * @param params
549 | */
550 | private final void setRotation(Camera.Parameters params) {
551 | if (DEBUG) Log.v(TAG, "setRotation:");
552 |
553 | final Display display = ((WindowManager)getContext()
554 | .getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
555 | int rotation = display.getRotation();
556 | int degrees = 0;
557 | switch (rotation) {
558 | case Surface.ROTATION_0: degrees = 0; break;
559 | case Surface.ROTATION_90: degrees = 90; break;
560 | case Surface.ROTATION_180: degrees = 180; break;
561 | case Surface.ROTATION_270: degrees = 270; break;
562 | }
563 | // get whether the camera is front camera or back camera
564 | final Camera.CameraInfo info =
565 | new android.hardware.Camera.CameraInfo();
566 | android.hardware.Camera.getCameraInfo(CAMERA_ID, info);
567 | mIsFrontFace = (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
568 | if (mIsFrontFace) { // front camera
569 | degrees = (info.orientation + degrees) % 360;
570 | degrees = (360 - degrees) % 360; // reverse
571 | } else { // back camera
572 | degrees = (info.orientation - degrees + 360) % 360;
573 | }
574 | // apply rotation setting
575 | mCamera.setDisplayOrientation(degrees);
576 | mRotation = degrees;
577 | // XXX This method fails to call and camera stops working on some devices.
578 | // params.setRotation(degrees);
579 | }
580 |
581 | }
582 | }
583 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/media/TLMediaEncoder.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.media;
2 |
3 | /*
4 | * TimeLapseRecordingSample
5 | * Sample project to capture audio and video periodically from internal mic/camera
6 | * and save as time lapsed MPEG4 file.
7 | *
8 | * Copyright (c) 2015 saki t_saki@serenegiant.com
9 | *
10 | * File name: TLMediaEncoder.java
11 | *
12 | * Licensed under the Apache License, Version 2.0 (the "License");
13 | * you may not use this file except in compliance with the License.
14 | * You may obtain a copy of the License at
15 | *
16 | * http://www.apache.org/licenses/LICENSE-2.0
17 | *
18 | * Unless required by applicable law or agreed to in writing, software
19 | * distributed under the License is distributed on an "AS IS" BASIS,
20 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
21 | * See the License for the specific language governing permissions and
22 | * limitations under the License.
23 | *
24 | * All files in the folder are under this Apache License, Version 2.0.
25 | */
26 |
27 | import android.content.Context;
28 | import android.media.MediaCodec;
29 | import android.media.MediaFormat;
30 | import android.os.Environment;
31 | import android.text.TextUtils;
32 | import android.util.Log;
33 |
34 | import org.json.JSONException;
35 | import org.json.JSONObject;
36 |
37 | import java.io.BufferedInputStream;
38 | import java.io.BufferedOutputStream;
39 | import java.io.DataInputStream;
40 | import java.io.DataOutputStream;
41 | import java.io.File;
42 | import java.io.FileInputStream;
43 | import java.io.FileNotFoundException;
44 | import java.io.FileOutputStream;
45 | import java.io.IOException;
46 | import java.nio.BufferOverflowException;
47 | import java.nio.ByteBuffer;
48 | import java.util.Locale;
49 | import java.util.concurrent.LinkedBlockingDeque;
50 |
51 | /**
52 | * abstract class to audio/video frames into intermediate file
53 | * using MediaCodec encoder so that pause / resume feature is available.
54 | */
55 | public abstract class TLMediaEncoder {
56 | private static final boolean DEBUG = false;
57 | private static final String TAG_STATIC = "TLMediaEncoder";
58 | private final String TAG = getClass().getSimpleName();
59 |
60 | protected static final int TIMEOUT_USEC = 10000; // 10[msec]
61 |
62 | private static final int STATE_RELEASE = 0;
63 | private static final int STATE_INITIALIZED = 1;
64 | private static final int STATE_PREPARING = 2;
65 | private static final int STATE_PREPARED = 3;
66 | private static final int STATE_PAUSING = 4;
67 | private static final int STATE_PAUSED = 5;
68 | private static final int STATE_RESUMING = 6;
69 | private static final int STATE_RUNNING = 7;
70 |
71 | private static final int REQUEST_NON = 0;
72 | private static final int REQUEST_PREPARE = 1;
73 | private static final int REQUEST_RESUME = 2;
74 | private static final int REQUEST_STOP = 3;
75 | private static final int REQUEST_PAUSE = 4;
76 | private static final int REQUEST_DRAIN = 5;
77 |
78 | static final int TYPE_VIDEO = 0;
79 | static final int TYPE_AUDIO = 1;
80 | /**
81 | * callback listener
82 | */
83 | public interface MediaEncoderListener {
84 | /**
85 | * called when encoder finished preparing
86 | * @param encoder
87 | */
88 | public void onPrepared(TLMediaEncoder encoder);
89 | /**
90 | * called when encoder stopped
91 | * @param encoder
92 | */
93 | public void onStopped(TLMediaEncoder encoder);
94 | /**
95 | * called when resuming
96 | * @param encoder
97 | */
98 | public void onResume(TLMediaEncoder encoder);
99 | /**
100 | * called when pausing
101 | * @param encoder
102 | */
103 | public void onPause(TLMediaEncoder encoder);
104 | }
105 |
106 | private final Object mSync = new Object();
107 | private final LinkedBlockingDeque mRequestQueue = new LinkedBlockingDeque();
108 | protected volatile boolean mIsRunning;
109 | private boolean mIsEOS;
110 | private MediaCodec mMediaCodec; // API >= 16(Android4.1.2)
111 | private MediaFormat mConfigFormat;
112 | private ByteBuffer[] encoderOutputBuffers;
113 | private ByteBuffer[] encoderInputBuffers;
114 | private MediaCodec.BufferInfo mBufferInfo; // API >= 16(Android4.1.2)
115 | private final MediaEncoderListener mListener;
116 |
117 | private final File mBaseDir;
118 | private final int mType;
119 | private Exception mCurrentException;
120 | private int mState = STATE_RELEASE;
121 | private DataOutputStream mCurrentOutputStream;
122 | private int mSequence;
123 | private int mNumFrames = -1;
124 | private int mFrameCounts;
125 |
126 | /**
127 | * constructor
128 | * @param movie_name this values is used as a directory name for intermediate files
129 | * @param listener
130 | */
131 | public TLMediaEncoder(final Context context, final String movie_name, final int type, final MediaEncoderListener listener) {
132 | if (DEBUG) Log.v(TAG, "TLMediaEncoder");
133 | if (TextUtils.isEmpty(movie_name)) throw new IllegalArgumentException("movie_name should not be null");
134 | mBaseDir = new File(context.getExternalFilesDir(Environment.DIRECTORY_MOVIES), movie_name);
135 | mBaseDir.mkdirs();
136 | mType = type;
137 | mListener = listener;
138 | mBufferInfo = new MediaCodec.BufferInfo();
139 | new Thread(mEncoderTask, getClass().getSimpleName()).start();
140 | synchronized (mSync) {
141 | try {
142 | mSync.wait();
143 | } catch (InterruptedException e) {
144 | // ignore
145 | }
146 | }
147 | }
148 |
149 | /*
150 | * prepare encoder. This method will be called once.
151 | * @throws IOException
152 | */
153 | public final void prepare() throws Exception {
154 | if (DEBUG) Log.v(TAG, "prepare");
155 | synchronized (mSync) {
156 | if (!mIsRunning || (mState != STATE_INITIALIZED))
157 | throw new IllegalStateException("not ready/already released:" + mState);
158 | }
159 | setRequestAndWait(REQUEST_PREPARE);
160 | }
161 |
162 | /**
163 | * start encoder
164 | */
165 | public final void start() throws IOException {
166 | start(false);
167 | }
168 |
169 | /**
170 | * start encoder with specific sequence
171 | * @param pauseAfterStarted
172 | * @throws IOException
173 | */
174 | public void start(boolean pauseAfterStarted) throws IOException {
175 | if (DEBUG) Log.v(TAG, "start");
176 | synchronized (mSync) {
177 | if (!mIsRunning || ((mState != STATE_PREPARING) && (mState != STATE_PREPARED)))
178 | throw new IllegalStateException("not prepare/already released:" + mState);
179 | if (pauseAfterStarted) {
180 | setRequest(REQUEST_PAUSE);
181 | } else {
182 | resume(-1);
183 | }
184 | }
185 | }
186 |
187 | /**
188 | * request stop encoder
189 | * current implementation is same as release and don't re-use again.
190 | */
191 | public void stop() {
192 | if (DEBUG) Log.v(TAG, "stop");
193 | if (mState > STATE_INITIALIZED) {
194 | removeRequest(REQUEST_DRAIN);
195 | try {
196 | setRequestAndWait(REQUEST_STOP);
197 | } catch (Exception e) {
198 | Log.w(TAG, "stop:", e);
199 | }
200 | }
201 | }
202 |
203 | /**
204 | * request resume encoder
205 | * @throws IOException
206 | */
207 | public void resume() throws IOException {
208 | resume(-1);
209 | }
210 |
211 | /**
212 | * request resume encoder. after obtaining more than specific frames, automatically become pause state.
213 | * @param num_frames if num_frames is negative value, automatic pausing is disabled.
214 | * @throws IOException
215 | */
216 | public void resume(final int num_frames) throws IOException {
217 | if (DEBUG) Log.v(TAG, "resume");
218 | synchronized (mSync) {
219 | if (!mIsRunning
220 | || ((mState != STATE_PREPARING) && (mState != STATE_PREPARED)
221 | && (mState != STATE_PAUSING) && (mState != STATE_PAUSED)))
222 | throw new IllegalStateException("not ready to resume:" + mState);
223 | mNumFrames = num_frames;
224 | }
225 | setRequest(REQUEST_RESUME);
226 | }
227 |
228 | /**
229 | * request pause encoder
230 | */
231 | public void pause() throws Exception {
232 | if (DEBUG) Log.v(TAG, "pause");
233 | removeRequest(REQUEST_DRAIN);
234 | setRequestFirst(REQUEST_PAUSE);
235 | }
236 |
237 | /**
238 | * get whether this encoder is pause state
239 | * @return
240 | */
241 | public boolean isPaused() {
242 | synchronized (mSync) {
243 | return (mState == STATE_PAUSING) || (mState == STATE_PAUSED);
244 | }
245 | }
246 |
247 | /**
248 | * calling this method notify encoder that the input data is already available or will be available soon
249 | * @return return tur if this encoder can accept input data
250 | */
251 | public boolean frameAvailableSoon() {
252 | // if (DEBUG) Log.v(TAG, "frameAvailableSoon");
253 | synchronized (mSync) {
254 | if (mState != STATE_RUNNING) {
255 | return false;
256 | }
257 | }
258 | removeRequest(REQUEST_DRAIN);
259 | setRequest(REQUEST_DRAIN);
260 | return true;
261 | }
262 |
263 | public void release() {
264 | removeRequest(REQUEST_DRAIN);
265 | setRequestFirst(REQUEST_STOP);
266 | }
267 |
268 | //********************************************************************************
269 | //********************************************************************************
270 | /**
271 | * prepare MediaFormat instance for this encoder.
272 | * If there are previous intermediate files exist in current movie directory,
273 | * this method may not be called.
274 | * @return
275 | * @throws IOException
276 | */
277 | protected abstract MediaFormat internal_prepare() throws IOException;
278 |
279 | /**
280 | * execute MediaCodec#configure.
281 | * this method will be called every resuming
282 | * @param previous_codec
283 | * @param format
284 | * @return
285 | * @throws IOException
286 | */
287 | protected abstract MediaCodec internal_configure(MediaCodec previous_codec, MediaFormat format) throws IOException;
288 |
289 |
290 | protected void callOnPrepared() {
291 | if (mListener != null) {
292 | try {
293 | mListener.onPrepared(TLMediaEncoder.this);
294 | } catch (Exception e) {
295 | Log.e(TAG, "callOnPrepared:", e);
296 | }
297 | }
298 | }
299 |
300 | protected void callOnResume() {
301 | if (mListener != null) {
302 | try {
303 | mListener.onResume(this);
304 | } catch (Exception e) {
305 | Log.e(TAG, "callOnResume:", e);
306 | }
307 | }
308 | }
309 |
310 | protected void callOnPause() {
311 | if (mListener != null) {
312 | try {
313 | mListener.onPause(this);
314 | } catch (Exception e) {
315 | Log.e(TAG, "callOnPause:", e);
316 | }
317 | }
318 | }
319 |
320 | protected void callOnStopped() {
321 | if (mListener != null) {
322 | try {
323 | mListener.onStopped(this);
324 | } catch (Exception e) {
325 | Log.e(TAG, "callOnStopped:", e);
326 | }
327 | }
328 | }
329 | //********************************************************************************
330 | //********************************************************************************
331 | private final void setState(final int state, final Exception e) {
332 | synchronized (mSync) {
333 | mState = state;
334 | mCurrentException = e;
335 | mSync.notifyAll();
336 | }
337 | }
338 |
339 | private final void setRequest(final int request) {
340 | mRequestQueue.offer(request);
341 | }
342 |
343 | private final void setRequestFirst(final int request) {
344 | mRequestQueue.offerFirst(request);
345 | }
346 |
347 | private final void removeRequest(final int request) {
348 | for (; mRequestQueue.remove(request) ;);
349 | }
350 |
351 | private final void setRequestAndWait(final int request) throws Exception {
352 | synchronized (mSync) {
353 | mRequestQueue.offer(request);
354 | try {
355 | mSync.wait();
356 | if (mCurrentException != null)
357 | throw mCurrentException;
358 | } catch (InterruptedException e) {
359 | }
360 | }
361 | }
362 |
363 | /**
364 | * wait request
365 | * @return
366 | */
367 | private final int waitRequest() {
368 | // if (DEBUG) Log.v(TAG, "waitRequest:");
369 | Integer request = null;
370 | try {
371 | request = mRequestQueue.take();
372 | } catch (InterruptedException e) {
373 | }
374 | return request != null ? request : REQUEST_NON;
375 | }
376 |
377 | private final Runnable mEncoderTask = new Runnable() {
378 | @Override
379 | public void run() {
380 | int request = REQUEST_NON;
381 | if (DEBUG) Log.v(TAG, "#run");
382 | mIsRunning = true;
383 | setState(STATE_INITIALIZED, null);
384 | for (; mIsRunning; ) {
385 | if (request == REQUEST_NON) { // if there is no handling request
386 | request = waitRequest(); // wait for next request
387 | }
388 | if (request == REQUEST_STOP) {
389 | handlePauseRecording();
390 | mIsRunning = false;
391 | break;
392 | }
393 | if (mState == STATE_RUNNING) {
394 | request = handleRunning(request);
395 | } else {
396 | if (request == REQUEST_DRAIN) {
397 | request = REQUEST_NON; // just clear request
398 | removeRequest(REQUEST_DRAIN);
399 | continue;
400 | }
401 | switch (mState) {
402 | case STATE_RELEASE:
403 | setState(STATE_RELEASE, new IllegalStateException("state=" + mState + ",request=" + request));
404 | mIsRunning = false;
405 | continue;
406 | case STATE_INITIALIZED:
407 | request = handleInitialized(request);
408 | break;
409 | case STATE_PREPARING:
410 | request = handlePreparing(request);
411 | break;
412 | case STATE_PREPARED:
413 | request = handlePrepared(request);
414 | break;
415 | case STATE_PAUSING:
416 | request = handlePausing(request);
417 | break;
418 | case STATE_PAUSED:
419 | request = handlePaused(request);
420 | break;
421 | case STATE_RESUMING:
422 | request = handleResuming(request);
423 | break;
424 | default:
425 | } // end of switch (mState)
426 | }
427 | } // end of for mIsRunning
428 | if (DEBUG) Log.v(TAG, "#run:finished");
429 | setState(STATE_RELEASE, null);
430 | // internal_release all related objects
431 | internal_release();
432 | }
433 | };
434 |
435 | private final int handleRunning(int request) {
436 | if (DEBUG) Log.v(TAG, "STATE_RUNNING");
437 | switch (request) {
438 | case REQUEST_RESUME:
439 | request = REQUEST_NON; // just clear request
440 | break;
441 | case REQUEST_PAUSE:
442 | setState(STATE_PAUSING, null);
443 | break;
444 | case REQUEST_DRAIN:
445 | request = REQUEST_NON;
446 | drain();
447 | break;
448 | default:
449 | setState(STATE_INITIALIZED, new IllegalStateException("state=" + mState + ",request=" + request));
450 | request = REQUEST_NON;
451 | }
452 | return request;
453 | }
454 |
455 | private final int handleInitialized(int request) {
456 | if (DEBUG) Log.v(TAG, "STATE_INITIALIZED");
457 | switch (request) {
458 | case REQUEST_PREPARE:
459 | setState(STATE_PREPARING, null);
460 | break;
461 | default:
462 | setState(STATE_INITIALIZED, new IllegalStateException("state=" + mState + ",request=" + request));
463 | request = REQUEST_NON;
464 | }
465 | return request;
466 | }
467 |
468 | private final int handlePreparing(int request) {
469 | if (DEBUG) Log.v(TAG, "STATE_PREPARING");
470 | request = REQUEST_NON;
471 | try {
472 | checkLastSequence();
473 | if (mConfigFormat == null)
474 | mConfigFormat = internal_prepare();
475 | if (mConfigFormat != null) {
476 | setState(STATE_PREPARED, null);
477 | } else {
478 | setState(STATE_INITIALIZED, new IllegalArgumentException());
479 | }
480 | callOnPrepared();
481 | } catch (IOException e) {
482 | setState(STATE_INITIALIZED, e);
483 | }
484 | return request;
485 | }
486 |
487 | private final int handlePrepared(int request) {
488 | if (DEBUG) Log.v(TAG, "STATE_PREPARED");
489 | switch (request) {
490 | case REQUEST_PREPARE:
491 | request = REQUEST_NON; // just clear request
492 | break;
493 | case REQUEST_RESUME:
494 | setState(STATE_RESUMING, null);
495 | break;
496 | case REQUEST_PAUSE:
497 | setState(STATE_PAUSING, null);
498 | break;
499 | default:
500 | setState(STATE_INITIALIZED, new IllegalStateException("state=" + mState + ",request=" + request));
501 | request = REQUEST_NON;
502 | }
503 | return request;
504 | }
505 |
506 | private final int handlePausing(int request) {
507 | if (DEBUG) Log.v(TAG, "STATE_PAUSING");
508 | request = REQUEST_NON;
509 | handlePauseRecording();
510 | setState(STATE_PAUSED, null);
511 | callOnPause();
512 | return request;
513 | }
514 |
515 | private final int handlePaused(int request) {
516 | if (DEBUG) Log.v(TAG, "STATE_PAUSED");
517 | switch (request) {
518 | case REQUEST_RESUME:
519 | setState(STATE_RESUMING, null);
520 | break;
521 | case REQUEST_PAUSE:
522 | request = REQUEST_NON; // just clear request
523 | break;
524 | default:
525 | setState(STATE_INITIALIZED, new IllegalStateException("state=" + mState + ",request=" + request));
526 | request = REQUEST_NON;
527 | }
528 | return request;
529 | }
530 |
531 | private final int handleResuming(int request) {
532 | if (DEBUG) Log.v(TAG, "STATE_RESUMING");
533 | request = REQUEST_NON;
534 | try {
535 | mIsEOS = false;
536 | mMediaCodec = internal_configure(mMediaCodec, mConfigFormat);
537 | mCurrentOutputStream = openOutputStream(); // changeOutputStream();
538 | mMediaCodec.start();
539 | encoderOutputBuffers = mMediaCodec.getOutputBuffers();
540 | encoderInputBuffers = mMediaCodec.getInputBuffers();
541 | mFrameCounts = -1;
542 | setState(STATE_RUNNING, null);
543 | callOnResume();
544 | } catch (IOException e) {
545 | setState(STATE_INITIALIZED, e);
546 | }
547 | return request;
548 | }
549 | //********************************************************************************
550 | //********************************************************************************
551 | /**
552 | * handle pausing request
553 | * this method is called from message handler of EncoderHandler
554 | */
555 | private final void handlePauseRecording() {
556 | if (DEBUG) Log.v(TAG, "handlePauseRecording");
557 | // process all available output data
558 | drain();
559 | // send EOS to MediaCodec encoder(request to stop)
560 | signalEndOfInputStream();
561 | // process output data again for EOS signal
562 | drain();
563 | if (mCurrentOutputStream != null)
564 | try {
565 | mCurrentOutputStream.flush();
566 | mCurrentOutputStream.close();
567 | } catch (IOException e) {
568 | Log.e(TAG, "handlePauseRecording:", e);
569 | }
570 | mCurrentOutputStream = null;
571 | encoderOutputBuffers = encoderInputBuffers = null;
572 | mRequestQueue.clear();
573 | if (mMediaCodec != null) {
574 | try {
575 | mMediaCodec.stop();
576 | mMediaCodec.release();
577 | mMediaCodec = null;
578 | } catch (Exception e) {
579 | Log.e(TAG, "failed releasing MediaCodec", e);
580 | }
581 | }
582 | }
583 |
584 | /**
585 | * Release all related objects
586 | */
587 | protected void internal_release() {
588 | if (DEBUG) Log.d(TAG, "internal_release:");
589 | callOnStopped();
590 | mIsRunning = false;
591 | if (mMediaCodec != null) {
592 | try {
593 | mMediaCodec.stop();
594 | mMediaCodec.release();
595 | mMediaCodec = null;
596 | } catch (Exception e) {
597 | Log.e(TAG, "failed releasing MediaCodec", e);
598 | }
599 | }
600 | mBufferInfo = null;
601 | }
602 |
603 | protected void signalEndOfInputStream() {
604 | if (DEBUG) Log.d(TAG, "sending EOS to encoder");
605 | // signalEndOfInputStream is only available for video encoding with surface
606 | // and equivalent sending a empty buffer with BUFFER_FLAG_END_OF_STREAM flag.
607 | // mMediaCodec.signalEndOfInputStream(); // API >= 18
608 | encode(null, 0, getPTSUs());
609 | }
610 |
611 | protected boolean isRecording() {
612 | return mIsRunning && (mState == STATE_RUNNING) && (!mIsEOS);
613 | }
614 |
615 | /**
616 | * Method to set byte array to the MediaCodec encoder
617 | * if you use Surface to input data to encoder, you should not call this method
618 | * @param buffer
619 | * @param length length of byte array, zero means EOS.
620 | * @param presentationTimeUs
621 | */
622 | // protected void encode(final byte[] buffer, final int length, final long presentationTimeUs) {
623 | protected void encode(final ByteBuffer buffer, int length, long presentationTimeUs) {
624 | if (!mIsRunning || !isRecording()) return;
625 | while (mIsRunning) {
626 | final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
627 | if (inputBufferIndex >= 0) {
628 | final ByteBuffer inputBuffer = encoderInputBuffers[inputBufferIndex];
629 | inputBuffer.clear();
630 | if (buffer != null) {
631 | inputBuffer.put(buffer);
632 | }
633 | if (length <= 0) {
634 | // send EOS
635 | mIsEOS = true;
636 | if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
637 | mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
638 | presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
639 | } else {
640 | mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
641 | presentationTimeUs, 0);
642 | }
643 | break;
644 | } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
645 | // wait for MediaCodec encoder is ready to encode
646 | // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
647 | // will wait for maximum TIMEOUT_USEC(10msec) on each call
648 | }
649 | }
650 | }
651 |
652 | /**
653 | * working buffer
654 | */
655 | private byte[] writeBuffer = new byte[1024];
656 | /**
657 | * drain encoded data and write them to intermediate file
658 | */
659 | protected void drain() {
660 | if (mMediaCodec == null) return;
661 | int encoderStatus;
662 | while (mIsRunning && (mState == STATE_RUNNING)) {
663 | // get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
664 | try {
665 | encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
666 | } catch (IllegalStateException e) {
667 | break;
668 | }
669 | if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
670 | // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
671 | if (!mIsEOS) {
672 | break; // out of while
673 | }
674 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
675 | if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
676 | // this should not come when encoding
677 | encoderOutputBuffers = mMediaCodec.getOutputBuffers();
678 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
679 | if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
680 | // this status indicate the output format of codec is changed
681 | // this should come only once before actual encoded data
682 | // but this status never come on Android4.3 or less
683 | // and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
684 | // get output format from codec and pass them to muxer
685 | // getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
686 | if (mSequence == 0) { // sequence 0 is for saving MediaFormat
687 | final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
688 | try {
689 | writeFormat(mCurrentOutputStream, mConfigFormat, format);
690 | // changeOutputStream();
691 | } catch (IOException e) {
692 | Log.e(TAG, "drain:failed to write MediaFormat ", e);
693 | }
694 | }
695 | } else if (encoderStatus < 0) {
696 | // unexpected status
697 | if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
698 | } else {
699 | final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
700 | if (encodedData == null) {
701 | // this never should come...may be a MediaCodec internal error
702 | throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
703 | }
704 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
705 | // You should set output format to muxer here when you target Android4.3 or less
706 | // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
707 | // therefor we should expand and prepare output format from buffer data.
708 | // This sample is for API>=18(>=Android 4.3), just ignore this flag here
709 | if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
710 | mBufferInfo.size = 0;
711 | }
712 |
713 | if (mBufferInfo.size != 0) {
714 | mFrameCounts++;
715 | if (mCurrentOutputStream == null) {
716 | throw new RuntimeException("drain:temporary file not ready");
717 | }
718 | // write encoded data to muxer(need to adjust presentationTimeUs.
719 | mBufferInfo.presentationTimeUs = getPTSUs();
720 | try {
721 | writeStream(mCurrentOutputStream, mSequence, mFrameCounts, mBufferInfo, encodedData, writeBuffer);
722 | } catch (IOException e) {
723 | throw new RuntimeException("drain:failed to writeStream:" + e.getMessage());
724 | }
725 | prevOutputPTSUs = mBufferInfo.presentationTimeUs;
726 | }
727 | // return buffer to encoder
728 | mMediaCodec.releaseOutputBuffer(encoderStatus, false);
729 | if ((mNumFrames > 0) && (mFrameCounts >= mNumFrames)) {
730 | setState(STATE_PAUSING, null); // request pause
731 | }
732 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
733 | // when EOS come.
734 | mIsRunning = false;
735 | break; // out of while
736 | }
737 | }
738 | }
739 | }
740 |
741 | /**
742 | * time when previous encoding[micro second(s)]
743 | */
744 | private long prevOutputPTSUs = 0;
745 | /**
746 | * time when start encoding[micro seconds]
747 | */
748 | private long firstTimeUs = -1;
749 | /**
750 | * get next encoding presentationTimeUs
751 | * @return
752 | */
753 | protected long getPTSUs() {
754 | if (firstTimeUs < 0) firstTimeUs = System.nanoTime() / 1000L;
755 | long result = System.nanoTime() / 1000L - firstTimeUs;
756 | if (result < prevOutputPTSUs) {
757 | final long delta = prevOutputPTSUs - result + 8333; // add approx 1/120 sec as a bias
758 | result += delta;
759 | firstTimeUs += delta;
760 | }
761 | return result;
762 | }
763 |
764 | private void checkLastSequence() {
765 | if (DEBUG) Log.v(TAG, "checkLastSequence:");
766 | int sequence = -1;
767 | MediaFormat configFormat = null;
768 | try {
769 | final DataInputStream in = openInputStream(mBaseDir, mType, 0);
770 | if (in != null)
771 | try {
772 | // read MediaFormat data for MediaCodec and for MediaMuxer
773 | readHeader(in);
774 | configFormat = asMediaFormat(in.readUTF()); // for MediaCodec
775 | in.readUTF(); // for MediaMuxer
776 | // search last sequence
777 | // this is not a effective implementation for large intermediate file.
778 | // ex. it may be better to split into multiple files for each sequence
779 | // or split into two files; file for control block and file for raw bit stream.
780 | final TLMediaFrameHeader header = new TLMediaFrameHeader();
781 | for (; mIsRunning ;) {
782 | readHeader(in, header);
783 | in.skipBytes(header.size);
784 | sequence = Math.max(sequence, header.sequence);
785 | }
786 | } finally {
787 | in.close();
788 | }
789 | } catch (Exception e) {
790 | // ignore
791 | }
792 | mSequence = sequence;
793 | mConfigFormat = configFormat;
794 | if (sequence < 0) {
795 | // if intermediate files do not exist or invalid, remove them and re-create intermediate directory
796 | delete(mBaseDir);
797 | mBaseDir.mkdirs();
798 | }
799 | if (DEBUG) Log.v(TAG, "checkLastSequence:finished. sequence=" + sequence);
800 | }
801 |
802 | /*package*/static class TLMediaFrameHeader {
803 | public int sequence;
804 | public int frameNumber;
805 | public long presentationTimeUs;
806 | public int size;
807 | public int flags;
808 |
809 | public MediaCodec.BufferInfo asBufferInfo() {
810 | final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
811 | info.set(0, size, presentationTimeUs, flags);
812 | return info;
813 | }
814 |
815 | public MediaCodec.BufferInfo asBufferInfo(final MediaCodec.BufferInfo info) {
816 | info.set(0, size, presentationTimeUs, flags);
817 | return info;
818 | }
819 |
820 | @Override
821 | public String toString() {
822 | return String.format(Locale.US,
823 | "TLMediaFrameHeader(sequence=%d,frameNumber=%d,presentationTimeUs=%d,size=%d,flags=%d)",
824 | sequence, frameNumber, presentationTimeUs, size, flags);
825 | }
826 | }
827 |
828 | private static String getSequenceFilePath(final File base_dir, final int type, final long sequence) {
829 | // final File file = new File(base_dir, String.format("%s-%d.raw", (type == 1 ? "audio" : "video"), sequence));
830 | final File file = new File(base_dir, String.format("%s-0.raw", (type == 1 ? "audio" : "video")));
831 | return file.getAbsolutePath();
832 | }
833 |
834 | /**
835 | * open intermediate file for next sequence
836 | * @return
837 | * @throws IOException
838 | */
839 | private final DataOutputStream openOutputStream() throws IOException {
840 | if (mCurrentOutputStream != null)
841 | try {
842 | mCurrentOutputStream.flush();
843 | mCurrentOutputStream.close();
844 | } catch (IOException e) {
845 | Log.e(TAG, "openOutputStream: failed to flush temporary file", e);
846 | throw e;
847 | }
848 | mSequence++;
849 | final String path = getSequenceFilePath(mBaseDir, mType, mSequence);
850 | return new DataOutputStream(new BufferedOutputStream(new FileOutputStream(path, mSequence > 0)));
851 | }
852 |
853 | /*package*/static final DataInputStream openInputStream(final File base_dir, final int type, final int sequence) throws IOException {
854 | final String path = getSequenceFilePath(base_dir, type, sequence);
855 | DataInputStream in = null;
856 | try {
857 | in = new DataInputStream(new BufferedInputStream((new FileInputStream(path))));
858 | } catch (FileNotFoundException e) {
859 | // if (DEBUG) Log.e(TAG, "openInputStream:" + path, e);
860 | }
861 | return in;
862 | }
863 |
864 | /**
865 | * convert ByteBuffer into String
866 | * @param buffer
867 | * @return
868 | */
869 | private static final String asString(final ByteBuffer buffer) {
870 | final byte[] temp = new byte[16];
871 | final StringBuilder sb = new StringBuilder();
872 | int n = (buffer != null ? buffer.limit() : 0);
873 | if (n > 0) {
874 | buffer.rewind();
875 | int sz = (n > 16 ? 16 : n);
876 | n -= sz;
877 | for (; sz > 0; sz = (n > 16 ? 16 : n), n -= sz) {
878 | buffer.get(temp, 0, sz);
879 | for (int i = 0; i < sz; i++) {
880 | sb.append(temp[i]).append(',');
881 | }
882 | }
883 | }
884 | return sb.toString();
885 | }
886 |
887 | /**
888 | * convert transcribe String into ByteBuffer
889 | * @param str
890 | * @return
891 | */
892 | private static final ByteBuffer asByteBuffer(final String str) {
893 | final String[] hex = str.split(",");
894 | final int m = hex.length;
895 | final byte[] temp = new byte[m];
896 | int n = 0;
897 | for (int i = 0; i < m; i++) {
898 | if (!TextUtils.isEmpty(hex[i]))
899 | temp[n++] = (byte)Integer.parseInt(hex[i]);
900 | }
901 | if (n > 0)
902 | return ByteBuffer.wrap(temp, 0, n);
903 | else
904 | return null;
905 | }
906 |
907 | private static final String asString(final MediaFormat format) {
908 | final JSONObject map = new JSONObject();
909 | try {
910 | if (format.containsKey(MediaFormat.KEY_MIME))
911 | map.put(MediaFormat.KEY_MIME, format.getString(MediaFormat.KEY_MIME));
912 | if (format.containsKey(MediaFormat.KEY_WIDTH))
913 | map.put(MediaFormat.KEY_WIDTH, format.getInteger(MediaFormat.KEY_WIDTH));
914 | if (format.containsKey(MediaFormat.KEY_HEIGHT))
915 | map.put(MediaFormat.KEY_HEIGHT, format.getInteger(MediaFormat.KEY_HEIGHT));
916 | if (format.containsKey(MediaFormat.KEY_BIT_RATE))
917 | map.put(MediaFormat.KEY_BIT_RATE, format.getInteger(MediaFormat.KEY_BIT_RATE));
918 | if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT))
919 | map.put(MediaFormat.KEY_COLOR_FORMAT, format.getInteger(MediaFormat.KEY_COLOR_FORMAT));
920 | if (format.containsKey(MediaFormat.KEY_FRAME_RATE))
921 | map.put(MediaFormat.KEY_FRAME_RATE, format.getInteger(MediaFormat.KEY_FRAME_RATE));
922 | if (format.containsKey(MediaFormat.KEY_I_FRAME_INTERVAL))
923 | map.put(MediaFormat.KEY_I_FRAME_INTERVAL, format.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL));
924 | if (format.containsKey(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER))
925 | map.put(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, format.getLong(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER));
926 | if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE))
927 | map.put(MediaFormat.KEY_MAX_INPUT_SIZE, format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE));
928 | if (format.containsKey(MediaFormat.KEY_DURATION))
929 | map.put(MediaFormat.KEY_DURATION, format.getInteger(MediaFormat.KEY_DURATION));
930 | if (format.containsKey(MediaFormat.KEY_CHANNEL_COUNT))
931 | map.put(MediaFormat.KEY_CHANNEL_COUNT, format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
932 | if (format.containsKey(MediaFormat.KEY_SAMPLE_RATE))
933 | map.put(MediaFormat.KEY_SAMPLE_RATE, format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
934 | if (format.containsKey(MediaFormat.KEY_CHANNEL_MASK))
935 | map.put(MediaFormat.KEY_CHANNEL_MASK, format.getInteger(MediaFormat.KEY_CHANNEL_MASK));
936 | if (format.containsKey(MediaFormat.KEY_AAC_PROFILE))
937 | map.put(MediaFormat.KEY_AAC_PROFILE, format.getInteger(MediaFormat.KEY_AAC_PROFILE));
938 | if (format.containsKey(MediaFormat.KEY_AAC_SBR_MODE))
939 | map.put(MediaFormat.KEY_AAC_SBR_MODE, format.getInteger(MediaFormat.KEY_AAC_SBR_MODE));
940 | if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE))
941 | map.put(MediaFormat.KEY_MAX_INPUT_SIZE, format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE));
942 | if (format.containsKey(MediaFormat.KEY_IS_ADTS))
943 | map.put(MediaFormat.KEY_IS_ADTS, format.getInteger(MediaFormat.KEY_IS_ADTS));
944 | if (format.containsKey("what"))
945 | map.put("what", format.getInteger("what"));
946 | if (format.containsKey("csd-0"))
947 | map.put("csd-0", asString(format.getByteBuffer("csd-0")));
948 | if (format.containsKey("csd-1"))
949 | map.put("csd-1", asString(format.getByteBuffer("csd-1")));
950 | } catch (JSONException e) {
951 | Log.e(TAG_STATIC, "writeFormat:", e);
952 | }
953 |
954 | return map.toString();
955 | }
956 |
957 | private static final MediaFormat asMediaFormat(final String format_str) {
958 | MediaFormat format = new MediaFormat();
959 | try {
960 | final JSONObject map = new JSONObject(format_str);
961 | if (map.has(MediaFormat.KEY_MIME))
962 | format.setString(MediaFormat.KEY_MIME, (String)map.get(MediaFormat.KEY_MIME));
963 | if (map.has(MediaFormat.KEY_WIDTH))
964 | format.setInteger(MediaFormat.KEY_WIDTH, (Integer)map.get(MediaFormat.KEY_WIDTH));
965 | if (map.has(MediaFormat.KEY_HEIGHT))
966 | format.setInteger(MediaFormat.KEY_HEIGHT, (Integer)map.get(MediaFormat.KEY_HEIGHT));
967 | if (map.has(MediaFormat.KEY_BIT_RATE))
968 | format.setInteger(MediaFormat.KEY_BIT_RATE, (Integer)map.get(MediaFormat.KEY_BIT_RATE));
969 | if (map.has(MediaFormat.KEY_COLOR_FORMAT))
970 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT, (Integer)map.get(MediaFormat.KEY_COLOR_FORMAT));
971 | if (map.has(MediaFormat.KEY_FRAME_RATE))
972 | format.setInteger(MediaFormat.KEY_FRAME_RATE, (Integer)map.get(MediaFormat.KEY_FRAME_RATE));
973 | if (map.has(MediaFormat.KEY_I_FRAME_INTERVAL))
974 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, (Integer)map.get(MediaFormat.KEY_I_FRAME_INTERVAL));
975 | if (map.has(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER))
976 | format.setLong(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, (Long)map.get(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER));
977 | if (map.has(MediaFormat.KEY_MAX_INPUT_SIZE))
978 | format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, (Integer)map.get(MediaFormat.KEY_MAX_INPUT_SIZE));
979 | if (map.has(MediaFormat.KEY_DURATION))
980 | format.setInteger(MediaFormat.KEY_DURATION, (Integer)map.get(MediaFormat.KEY_DURATION));
981 | if (map.has(MediaFormat.KEY_CHANNEL_COUNT))
982 | format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, (Integer) map.get(MediaFormat.KEY_CHANNEL_COUNT));
983 | if (map.has(MediaFormat.KEY_SAMPLE_RATE))
984 | format.setInteger(MediaFormat.KEY_SAMPLE_RATE, (Integer) map.get(MediaFormat.KEY_SAMPLE_RATE));
985 | if (map.has(MediaFormat.KEY_CHANNEL_MASK))
986 | format.setInteger(MediaFormat.KEY_CHANNEL_MASK, (Integer) map.get(MediaFormat.KEY_CHANNEL_MASK));
987 | if (map.has(MediaFormat.KEY_AAC_PROFILE))
988 | format.setInteger(MediaFormat.KEY_AAC_PROFILE, (Integer) map.get(MediaFormat.KEY_AAC_PROFILE));
989 | if (map.has(MediaFormat.KEY_AAC_SBR_MODE))
990 | format.setInteger(MediaFormat.KEY_AAC_SBR_MODE, (Integer) map.get(MediaFormat.KEY_AAC_SBR_MODE));
991 | if (map.has(MediaFormat.KEY_MAX_INPUT_SIZE))
992 | format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, (Integer) map.get(MediaFormat.KEY_MAX_INPUT_SIZE));
993 | if (map.has(MediaFormat.KEY_IS_ADTS))
994 | format.setInteger(MediaFormat.KEY_IS_ADTS, (Integer) map.get(MediaFormat.KEY_IS_ADTS));
995 | if (map.has("what"))
996 | format.setInteger("what", (Integer)map.get("what"));
997 | if (map.has("csd-0"))
998 | format.setByteBuffer("csd-0", asByteBuffer((String)map.get("csd-0")));
999 | if (map.has("csd-1"))
1000 | format.setByteBuffer("csd-1", asByteBuffer((String)map.get("csd-1")));
1001 | } catch (JSONException e) {
1002 | Log.e(TAG_STATIC, "writeFormat:" + format_str, e);
1003 | format = null;
1004 | }
1005 | return format;
1006 | }
1007 |
1008 | private static final byte[] RESERVED = new byte[40];
1009 | /**
1010 | * write frame header
1011 | * @param presentation_time_us
1012 | * @param size
1013 | * @throws IOException
1014 | */
1015 | /*package*/static void writeHeader(final DataOutputStream out,
1016 | final int sequence, final int frame_number,
1017 | final long presentation_time_us, final int size, final int flag) throws IOException {
1018 |
1019 | out.writeInt(sequence);
1020 | out.writeInt(frame_number);
1021 | out.writeLong(presentation_time_us);
1022 | out.writeInt(size);
1023 | out.writeInt(flag);
1024 | //
1025 | out.write(RESERVED, 0, 40);
1026 | }
1027 |
1028 | /*package*/static TLMediaFrameHeader readHeader(final DataInputStream in, final TLMediaFrameHeader header) throws IOException {
1029 | header.size = 0;
1030 | header.sequence = in.readInt();
1031 | header.frameNumber = in.readInt(); // frame number
1032 | header.presentationTimeUs = in.readLong();
1033 | header.size = in.readInt();
1034 | header.flags = in.readInt();
1035 | in.skipBytes(40); // long x 5
1036 | return header;
1037 | }
1038 |
1039 | /*package*/static TLMediaFrameHeader readHeader(final DataInputStream in) throws IOException {
1040 | final TLMediaFrameHeader header = new TLMediaFrameHeader();
1041 | return readHeader(in, header);
1042 | }
1043 |
1044 | /**
1045 | * read frame header and only returns size of frame
1046 | * @param in
1047 | * @return
1048 | * @throws IOException
1049 | */
1050 | /*package*/static int readFrameSize(final DataInputStream in) throws IOException {
1051 | final TLMediaFrameHeader header = readHeader(in);
1052 | return header.size;
1053 | }
1054 |
1055 | /**
1056 | * write MediaFormat data into intermediate file
1057 | * @param out
1058 | * @param output_format
1059 | */
1060 | private static final void writeFormat(final DataOutputStream out, final MediaFormat codec_format, final MediaFormat output_format) throws IOException {
1061 | if (DEBUG) Log.v(TAG_STATIC, "writeFormat:format=" + output_format);
1062 | final String codec_format_str = asString(codec_format);
1063 | final String output_format_str = asString(output_format);
1064 | final int size = (TextUtils.isEmpty(codec_format_str) ? 0 : codec_format_str.length())
1065 | + (TextUtils.isEmpty(output_format_str) ? 0 : output_format_str.length());
1066 | try {
1067 | writeHeader(out, 0, 0, -1, size, 0);
1068 | out.writeUTF(codec_format_str);
1069 | out.writeUTF(output_format_str);
1070 | } catch (IOException e) {
1071 | Log.e(TAG_STATIC, "writeFormat:", e);
1072 | throw e;
1073 | }
1074 | }
1075 |
1076 | /*package*/static MediaFormat readFormat(final DataInputStream in) {
1077 | MediaFormat format = null;
1078 | try {
1079 | readHeader(in);
1080 | in.readUTF(); // skip MediaFormat data for configure
1081 | format = asMediaFormat(in.readUTF());
1082 | } catch (IOException e) {
1083 | Log.e(TAG_STATIC, "readFormat:", e);
1084 | }
1085 | if (DEBUG) Log.v(TAG_STATIC, "readFormat:format=" + format);
1086 | return format;
1087 | }
1088 |
1089 | /**
1090 | * write raw bit stream into specific intermediate file
1091 | * @param out
1092 | * @param sequence
1093 | * @param frame_number
1094 | * @param info
1095 | * @param buffer
1096 | * @param writeBuffer
1097 | * @throws IOException
1098 | */
1099 | private static final void writeStream(final DataOutputStream out,
1100 | final int sequence, final int frame_number,
1101 | final MediaCodec.BufferInfo info,
1102 | final ByteBuffer buffer, byte[] writeBuffer) throws IOException {
1103 |
1104 | if (writeBuffer.length < info.size) {
1105 | writeBuffer = new byte[info.size];
1106 | }
1107 | buffer.position(info.offset);
1108 | buffer.get(writeBuffer, 0, info.size);
1109 | try {
1110 | writeHeader(out, sequence, frame_number, info.presentationTimeUs, info.size, info.flags);
1111 | out.write(writeBuffer, 0, info.size);
1112 | } catch (IOException e) {
1113 | if (DEBUG) Log.e(TAG_STATIC, "writeStream:", e);
1114 | throw e;
1115 | }
1116 | }
1117 |
1118 | /**
1119 | * read raw bit stream from specific intermediate file
1120 | * @param in
1121 | * @param header
1122 | * @param buffer
1123 | * @param readBuffer
1124 | * @throws IOException
1125 | * @throws BufferOverflowException
1126 | */
1127 | /*package*/static ByteBuffer readStream(final DataInputStream in,
1128 | final TLMediaFrameHeader header,
1129 | ByteBuffer buffer, final byte[] readBuffer) throws IOException {
1130 |
1131 | readHeader(in, header);
1132 | if ((buffer == null) || header.size > buffer.capacity()) {
1133 | buffer = ByteBuffer.allocateDirect(header.size);
1134 | }
1135 | buffer.clear();
1136 | final int max_bytes = Math.min(readBuffer.length, header.size);
1137 | int read_bytes;
1138 | for (int i = header.size; i > 0; i -= read_bytes) {
1139 | read_bytes = in.read(readBuffer, 0, Math.min(i, max_bytes));
1140 | if (read_bytes <= 0) break;
1141 | buffer.put(readBuffer, 0, read_bytes);
1142 | }
1143 | buffer.flip();
1144 | return buffer;
1145 | }
1146 |
1147 | /**
1148 | * delete specific file/directory recursively
1149 | * @param path
1150 | */
1151 | /*package*/static final void delete(final File path) {
1152 | if (path.isDirectory()) {
1153 | File[] files = path.listFiles();
1154 | final int n = files != null ? files.length : 0;
1155 | for (int i = 0; i < n; i++)
1156 | delete(files[i]);
1157 | }
1158 | path.delete();
1159 | }
1160 |
1161 | }
1162 |
--------------------------------------------------------------------------------