├── .idea
├── .name
├── vcs.xml
├── gradle.xml
├── codeStyles
│ └── Project.xml
└── inspectionProfiles
│ └── Project_Default.xml
├── settings.gradle
├── app
├── lint.xml
├── src
│ └── main
│ │ ├── res
│ │ ├── drawable-hdpi
│ │ │ └── ic_launcher.png
│ │ ├── values
│ │ │ ├── strings.xml
│ │ │ ├── dimens.xml
│ │ │ └── styles.xml
│ │ ├── values-w820dp
│ │ │ └── dimens.xml
│ │ ├── menu
│ │ │ └── main.xml
│ │ └── layout
│ │ │ ├── activity_main.xml
│ │ │ └── fragment_main.xml
│ │ ├── AndroidManifest.xml
│ │ └── java
│ │ └── com
│ │ └── serenegiant
│ │ ├── audiovideosample
│ │ ├── CameraFragment.java
│ │ ├── MainActivity.java
│ │ └── CameraGLView.java
│ │ ├── encoder
│ │ ├── MediaMuxerWrapper.java
│ │ ├── MediaVideoEncoder.java
│ │ ├── MediaAudioEncoder.java
│ │ └── MediaEncoder.java
│ │ └── glutilsOld
│ │ ├── RenderHandler.java
│ │ ├── GLDrawer2D.java
│ │ └── EGLBase.java
├── proguard-rules.txt
└── build.gradle
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── README.md
├── gradle.properties
├── .gitignore
├── gradlew.bat
├── gradlew
└── LICENSE
/.idea/.name:
--------------------------------------------------------------------------------
1 | AudioVideoRecordingSample
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------
/app/lint.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saki4510t/AudioVideoRecordingSample/HEAD/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saki4510t/AudioVideoRecordingSample/HEAD/app/src/main/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | AudioVideoRecordingSample
5 | Settings
6 |
7 |
8 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 16dp
5 | 16dp
6 |
7 |
8 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Sat May 02 11:16:59 JST 2020
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 | 64dp
9 |
10 |
11 |
--------------------------------------------------------------------------------
/app/src/main/res/menu/main.xml:
--------------------------------------------------------------------------------
1 |
13 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
14 |
15 |
16 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/app/proguard-rules.txt:
--------------------------------------------------------------------------------
1 | # To enable ProGuard in your project, edit project.properties
2 | # to define the proguard.config property as described in that file.
3 | #
4 | # Add project specific ProGuard rules here.
5 | # By default, the flags in this file are appended to flags specified
6 | # in ${sdk.dir}/tools/proguard/proguard-android.txt
7 | # You can edit the include path and order by changing the ProGuard
8 | # include property in project.properties.
9 | #
10 | # For more details, see
11 | # http://developer.android.com/guide/developing/tools/proguard.html
12 |
13 | # Add any project specific keep options here:
14 |
15 | # If your project uses WebView with JS, uncomment the following
16 | # and specify the fully qualified class name to the JavaScript interface
17 | # class:
18 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
19 | # public *;
20 | #}
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | AudioVideoRecordingSample
2 | =========================
3 |
4 | Simultaneous audio and video recording sample using MediaCodec/MediaMuxer
5 |
6 | Copyright (c) 2014-2016 saki t_saki@serenegiant.com
7 |
8 | Licensed under the Apache License, Version 2.0 (the "License");
9 | you may not use this file except in compliance with the License.
10 | You may obtain a copy of the License at
11 |
12 | http://www.apache.org/licenses/LICENSE-2.0
13 |
14 | Unless required by applicable law or agreed to in writing, software
15 | distributed under the License is distributed on an "AS IS" BASIS,
16 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | See the License for the specific language governing permissions and
18 | limitations under the License.
19 |
20 | All files in the folder are under this Apache License, Version 2.0.
21 |
22 | =========
23 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
21 |
22 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app's APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # built application files
2 | *.apk
3 | *.ap_
4 |
5 | # files for the dex VM
6 | *.dex
7 |
8 | # Java class files
9 | *.class
10 |
11 | # Mobile Tools for Java (J2ME)
12 | .mtj.tmp/
13 |
14 |
15 | # generated files
16 | bin/
17 | obj
18 | obj/local
19 | gen/
20 | bin/dexedLibs
21 | bin/res
22 | bin/*.xml
23 | bin/classes
24 | bin/res
25 | bin/jarlist.cache
26 | *.cache
27 |
28 | # Local configuration file (sdk path, etc)
29 | local.properties
30 |
31 | # Eclipse project files
32 | .classpath
33 | .project
34 |
35 | # Proguard folder generated by Eclipse
36 | proguard/
37 |
38 | # Intellij project files
39 | *.iml
40 | *.ipr
41 | *.iws
42 |
43 | # Gradle
44 | .gradle/
45 | .gradle
46 | build/
47 | build
48 |
49 | # gedit
50 | *~
51 |
52 | .idea/*.xml
53 | !.idea/codeStyleSettings.xml
54 | !.idea/copyright/*.xml
55 | !.idea/fileColors.xml
56 | !.idea/encodings.xml
57 | !.idea/gradle.xml
58 | !.idea/runConfigurations/*.xml
59 |
60 | !.idea/inspectionProfiles/*.xml
61 | .idea/inspectionProfiles/profiles_settings.xml
62 |
63 | !.idea/scopes/*.xml
64 | .idea/scopes/scope_settings.xml
65 |
66 | !.idea/templateLanguages.xml
67 | !.idea/vcs.xml
68 | profiles_settings.xml
69 | .idea/libraries
70 | .idea/caches
71 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
25 |
32 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion versionCompiler
5 | buildToolsVersion versionBuildTool
6 |
7 | compileOptions {
8 | sourceCompatibility javaSourceCompatibility
9 | targetCompatibility javaTargetCompatibility
10 | }
11 |
12 | defaultConfig {
13 | applicationId "com.serenegiant.audiovideosample"
14 | minSdkVersion 18
15 | targetSdkVersion versionTarget
16 | versionCode versionCodeNum
17 | versionName versionNameString
18 |
19 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
20 | consumerProguardFiles 'consumer-rules.pro'
21 | }
22 |
23 | buildTypes {
24 | release {
25 | minifyEnabled false
26 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
27 | }
28 | }
29 |
30 | }
31 |
32 | dependencies {
33 | implementation fileTree(dir: 'libs', include: ['*.jar'])
34 |
35 | testImplementation 'junit:junit:4.12'
36 | androidTestImplementation 'androidx.test:runner:1.2.0'
37 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
38 |
39 | implementation "androidx.appcompat:appcompat:${androidXVersion}"
40 | implementation "androidx.legacy:legacy-support-v4:${androidXLegacyV4}"
41 |
42 | implementation "com.serenegiant:common:${commonLibVersion}"
43 | }
44 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
26 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
40 |
41 |
46 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_main.xml:
--------------------------------------------------------------------------------
1 |
25 |
32 |
33 |
38 |
39 |
49 |
50 |
58 |
59 |
67 |
68 |
--------------------------------------------------------------------------------
/.idea/codeStyles/Project.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 | xmlns:android
11 |
12 | ^$
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | xmlns:.*
22 |
23 | ^$
24 |
25 |
26 | BY_NAME
27 |
28 |
29 |
30 |
31 |
32 |
33 | .*:id
34 |
35 | http://schemas.android.com/apk/res/android
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 | .*:name
45 |
46 | http://schemas.android.com/apk/res/android
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 | name
56 |
57 | ^$
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 | style
67 |
68 | ^$
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 | .*
78 |
79 | ^$
80 |
81 |
82 | BY_NAME
83 |
84 |
85 |
86 |
87 |
88 |
89 | .*
90 |
91 | http://schemas.android.com/apk/res/android
92 |
93 |
94 | ANDROID_ATTRIBUTE_ORDER
95 |
96 |
97 |
98 |
99 |
100 |
101 | .*
102 |
103 | .*
104 |
105 |
106 | BY_NAME
107 |
108 |
109 |
110 |
111 |
112 |
113 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/audiovideosample/CameraFragment.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.audiovideosample;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: CameraFragment.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import android.os.Bundle;
26 | import androidx.fragment.app.Fragment;
27 | import android.util.Log;
28 | import android.view.LayoutInflater;
29 | import android.view.View;
30 | import android.view.View.OnClickListener;
31 | import android.view.ViewGroup;
32 | import android.widget.ImageButton;
33 | import android.widget.TextView;
34 |
35 | import com.serenegiant.encoder.MediaAudioEncoder;
36 | import com.serenegiant.encoder.MediaEncoder;
37 | import com.serenegiant.encoder.MediaMuxerWrapper;
38 | import com.serenegiant.encoder.MediaVideoEncoder;
39 |
40 | import java.io.IOException;
41 |
42 | public class CameraFragment extends Fragment {
43 | private static final boolean DEBUG = false; // TODO set false on release
44 | private static final String TAG = "CameraFragment";
45 |
46 | /**
47 | * for camera preview display
48 | */
49 | private CameraGLView mCameraView;
50 | /**
51 | * for scale mode display
52 | */
53 | private TextView mScaleModeView;
54 | /**
55 | * button for start/stop recording
56 | */
57 | private ImageButton mRecordButton;
58 | /**
59 | * muxer for audio/video recording
60 | */
61 | private MediaMuxerWrapper mMuxer;
62 |
63 | public CameraFragment() {
64 | super();
65 | // need default constructor
66 | }
67 |
68 | @Override
69 | public View onCreateView(final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
70 | final View rootView = inflater.inflate(R.layout.fragment_main, container, false);
71 | mCameraView = (CameraGLView)rootView.findViewById(R.id.cameraView);
72 | mCameraView.setVideoSize(1280, 720);
73 | mCameraView.setOnClickListener(mOnClickListener);
74 | mScaleModeView = rootView.findViewById(R.id.scalemode_textview);
75 | updateScaleModeText();
76 | mRecordButton = rootView.findViewById(R.id.record_button);
77 | mRecordButton.setOnClickListener(mOnClickListener);
78 | return rootView;
79 | }
80 |
81 | @Override
82 | public void onResume() {
83 | super.onResume();
84 | if (DEBUG) Log.v(TAG, "onResume:");
85 | mCameraView.onResume();
86 | }
87 |
88 | @Override
89 | public void onPause() {
90 | if (DEBUG) Log.v(TAG, "onPause:");
91 | stopRecording();
92 | mCameraView.onPause();
93 | super.onPause();
94 | }
95 |
96 | /**
97 | * method when touch record button
98 | */
99 | private final OnClickListener mOnClickListener = new OnClickListener() {
100 | @Override
101 | public void onClick(final View view) {
102 | switch (view.getId()) {
103 | case R.id.cameraView:
104 | final int scale_mode = (mCameraView.getScaleMode() + 1) % 4;
105 | mCameraView.setScaleMode(scale_mode);
106 | updateScaleModeText();
107 | break;
108 | case R.id.record_button:
109 | if (mMuxer == null)
110 | startRecording();
111 | else
112 | stopRecording();
113 | break;
114 | }
115 | }
116 | };
117 |
118 | private void updateScaleModeText() {
119 | final int scale_mode = mCameraView.getScaleMode();
120 | mScaleModeView.setText(
121 | scale_mode == 0 ? "scale to fit"
122 | : (scale_mode == 1 ? "keep aspect(viewport)"
123 | : (scale_mode == 2 ? "keep aspect(matrix)"
124 | : (scale_mode == 3 ? "keep aspect(crop center)" : ""))));
125 | }
126 |
127 | /**
128 | * start resorcing
129 | * This is a sample project and call this on UI thread to avoid being complicated
130 | * but basically this should be called on private thread because prepareing
131 | * of encoder is heavy work
132 | */
133 | private void startRecording() {
134 | if (DEBUG) Log.v(TAG, "startRecording:");
135 | try {
136 | mRecordButton.setColorFilter(0xffff0000); // turn red
137 | mMuxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
138 | if (true) {
139 | // for video capturing
140 | new MediaVideoEncoder(mMuxer, mMediaEncoderListener, mCameraView.getVideoWidth(), mCameraView.getVideoHeight());
141 | }
142 | if (true) {
143 | // for audio capturing
144 | new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
145 | }
146 | mMuxer.prepare();
147 | mMuxer.startRecording();
148 | } catch (final IOException e) {
149 | mRecordButton.setColorFilter(0);
150 | Log.e(TAG, "startCapture:", e);
151 | }
152 | }
153 |
154 | /**
155 | * request stop recording
156 | */
157 | private void stopRecording() {
158 | if (DEBUG) Log.v(TAG, "stopRecording:mMuxer=" + mMuxer);
159 | mRecordButton.setColorFilter(0); // return to default color
160 | if (mMuxer != null) {
161 | mMuxer.stopRecording();
162 | mMuxer = null;
163 | // you should not wait here
164 | }
165 | }
166 |
167 | /**
168 | * callback methods from encoder
169 | */
170 | private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
171 | @Override
172 | public void onPrepared(final MediaEncoder encoder) {
173 | if (DEBUG) Log.v(TAG, "onPrepared:encoder=" + encoder);
174 | if (encoder instanceof MediaVideoEncoder)
175 | mCameraView.setVideoEncoder((MediaVideoEncoder)encoder);
176 | }
177 |
178 | @Override
179 | public void onStopped(final MediaEncoder encoder) {
180 | if (DEBUG) Log.v(TAG, "onStopped:encoder=" + encoder);
181 | if (encoder instanceof MediaVideoEncoder)
182 | mCameraView.setVideoEncoder(null);
183 | }
184 | };
185 | }
186 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/encoder/MediaMuxerWrapper.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.encoder;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: MediaMuxerWrapper.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import java.io.File;
26 | import java.io.IOException;
27 | import java.nio.ByteBuffer;
28 | import java.text.SimpleDateFormat;
29 | import java.util.GregorianCalendar;
30 | import java.util.Locale;
31 |
32 | import android.media.MediaCodec;
33 | import android.media.MediaFormat;
34 | import android.media.MediaMuxer;
35 | import android.os.Environment;
36 | import android.text.TextUtils;
37 | import android.util.Log;
38 |
39 | public class MediaMuxerWrapper {
40 | private static final boolean DEBUG = false; // TODO set false on release
41 | private static final String TAG = "MediaMuxerWrapper";
42 |
43 | private static final String DIR_NAME = "AVRecSample";
44 | private static final SimpleDateFormat mDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US);
45 |
46 | private String mOutputPath;
47 | private final MediaMuxer mMediaMuxer; // API >= 18
48 | private int mEncoderCount, mStatredCount;
49 | private boolean mIsStarted;
50 | private MediaEncoder mVideoEncoder, mAudioEncoder;
51 |
52 | /**
53 | * Constructor
54 | * @param ext extension of output file
55 | * @throws IOException
56 | */
57 | public MediaMuxerWrapper(String ext) throws IOException {
58 | if (TextUtils.isEmpty(ext)) ext = ".mp4";
59 | try {
60 | mOutputPath = getCaptureFile(Environment.DIRECTORY_MOVIES, ext).toString();
61 | } catch (final NullPointerException e) {
62 | throw new RuntimeException("This app has no permission of writing external storage");
63 | }
64 | mMediaMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
65 | mEncoderCount = mStatredCount = 0;
66 | mIsStarted = false;
67 | }
68 |
69 | public String getOutputPath() {
70 | return mOutputPath;
71 | }
72 |
73 | public void prepare() throws IOException {
74 | if (mVideoEncoder != null)
75 | mVideoEncoder.prepare();
76 | if (mAudioEncoder != null)
77 | mAudioEncoder.prepare();
78 | }
79 |
80 | public void startRecording() {
81 | if (mVideoEncoder != null)
82 | mVideoEncoder.startRecording();
83 | if (mAudioEncoder != null)
84 | mAudioEncoder.startRecording();
85 | }
86 |
87 | public void stopRecording() {
88 | if (mVideoEncoder != null)
89 | mVideoEncoder.stopRecording();
90 | mVideoEncoder = null;
91 | if (mAudioEncoder != null)
92 | mAudioEncoder.stopRecording();
93 | mAudioEncoder = null;
94 | }
95 |
96 | public synchronized boolean isStarted() {
97 | return mIsStarted;
98 | }
99 |
100 | //**********************************************************************
101 | //**********************************************************************
102 | /**
103 | * assign encoder to this calss. this is called from encoder.
104 | * @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
105 | */
106 | /*package*/ void addEncoder(final MediaEncoder encoder) {
107 | if (encoder instanceof MediaVideoEncoder) {
108 | if (mVideoEncoder != null)
109 | throw new IllegalArgumentException("Video encoder already added.");
110 | mVideoEncoder = encoder;
111 | } else if (encoder instanceof MediaAudioEncoder) {
112 | if (mAudioEncoder != null)
113 | throw new IllegalArgumentException("Video encoder already added.");
114 | mAudioEncoder = encoder;
115 | } else
116 | throw new IllegalArgumentException("unsupported encoder");
117 | mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0);
118 | }
119 |
120 | /**
121 | * request start recording from encoder
122 | * @return true when muxer is ready to write
123 | */
124 | /*package*/ synchronized boolean start() {
125 | if (DEBUG) Log.v(TAG, "start:");
126 | mStatredCount++;
127 | if ((mEncoderCount > 0) && (mStatredCount == mEncoderCount)) {
128 | mMediaMuxer.start();
129 | mIsStarted = true;
130 | notifyAll();
131 | if (DEBUG) Log.v(TAG, "MediaMuxer started:");
132 | }
133 | return mIsStarted;
134 | }
135 |
136 | /**
137 | * request stop recording from encoder when encoder received EOS
138 | */
139 | /*package*/ synchronized void stop() {
140 | if (DEBUG) Log.v(TAG, "stop:mStatredCount=" + mStatredCount);
141 | mStatredCount--;
142 | if ((mEncoderCount > 0) && (mStatredCount <= 0)) {
143 | mMediaMuxer.stop();
144 | mMediaMuxer.release();
145 | mIsStarted = false;
146 | if (DEBUG) Log.v(TAG, "MediaMuxer stopped:");
147 | }
148 | }
149 |
150 | /**
151 | * assign encoder to muxer
152 | * @param format
153 | * @return minus value indicate error
154 | */
155 | /*package*/ synchronized int addTrack(final MediaFormat format) {
156 | if (mIsStarted)
157 | throw new IllegalStateException("muxer already started");
158 | final int trackIx = mMediaMuxer.addTrack(format);
159 | if (DEBUG) Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format);
160 | return trackIx;
161 | }
162 |
163 | /**
164 | * write encoded data to muxer
165 | * @param trackIndex
166 | * @param byteBuf
167 | * @param bufferInfo
168 | */
169 | /*package*/ synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
170 | if (mStatredCount > 0)
171 | mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
172 | }
173 |
174 | //**********************************************************************
175 | //**********************************************************************
176 | /**
177 | * generate output file
178 | * @param type Environment.DIRECTORY_MOVIES / Environment.DIRECTORY_DCIM etc.
179 | * @param ext .mp4(.m4a for audio) or .png
180 | * @return return null when this app has no writing permission to external storage.
181 | */
182 | public static final File getCaptureFile(final String type, final String ext) {
183 | final File dir = new File(Environment.getExternalStoragePublicDirectory(type), DIR_NAME);
184 | Log.d(TAG, "path=" + dir.toString());
185 | dir.mkdirs();
186 | if (dir.canWrite()) {
187 | return new File(dir, getDateTimeString() + ext);
188 | }
189 | return null;
190 | }
191 |
192 | /**
193 | * get current date and time as String
194 | * @return
195 | */
196 | private static final String getDateTimeString() {
197 | final GregorianCalendar now = new GregorianCalendar();
198 | return mDateTimeFormat.format(now.getTime());
199 | }
200 |
201 | }
202 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/Project_Default.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/glutilsOld/RenderHandler.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.glutilsOld;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: RenderHandler.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import android.graphics.SurfaceTexture;
26 | import android.opengl.EGLContext;
27 | import android.opengl.GLES20;
28 | import android.opengl.Matrix;
29 | import android.text.TextUtils;
30 | import android.util.Log;
31 | import android.view.Surface;
32 | import android.view.SurfaceHolder;
33 |
34 | /**
35 | * Helper class to draw texture to whole view on private thread
36 | */
37 | public final class RenderHandler implements Runnable {
38 | private static final boolean DEBUG = false; // TODO set false on release
39 | private static final String TAG = "RenderHandler";
40 |
41 | private final Object mSync = new Object();
42 | private EGLContext mShard_context;
43 | private boolean mIsRecordable;
44 | private Object mSurface;
45 | private int mTexId = -1;
46 | private float[] mMatrix = new float[32];
47 |
48 | private boolean mRequestSetEglContext;
49 | private boolean mRequestRelease;
50 | private int mRequestDraw;
51 |
52 | public static final RenderHandler createHandler(final String name) {
53 | if (DEBUG) Log.v(TAG, "createHandler:");
54 | final RenderHandler handler = new RenderHandler();
55 | synchronized (handler.mSync) {
56 | new Thread(handler, !TextUtils.isEmpty(name) ? name : TAG).start();
57 | try {
58 | handler.mSync.wait();
59 | } catch (final InterruptedException e) {
60 | }
61 | }
62 | return handler;
63 | }
64 |
65 | public final void setEglContext(final EGLContext shared_context, final int tex_id, final Object surface, final boolean isRecordable) {
66 | if (DEBUG) Log.i(TAG, "setEglContext:");
67 | if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture) && !(surface instanceof SurfaceHolder))
68 | throw new RuntimeException("unsupported window type:" + surface);
69 | synchronized (mSync) {
70 | if (mRequestRelease) return;
71 | mShard_context = shared_context;
72 | mTexId = tex_id;
73 | mSurface = surface;
74 | mIsRecordable = isRecordable;
75 | mRequestSetEglContext = true;
76 | Matrix.setIdentityM(mMatrix, 0);
77 | Matrix.setIdentityM(mMatrix, 16);
78 | mSync.notifyAll();
79 | try {
80 | mSync.wait();
81 | } catch (final InterruptedException e) {
82 | }
83 | }
84 | }
85 |
86 | public final void draw() {
87 | draw(mTexId, mMatrix, null);
88 | }
89 |
90 | public final void draw(final int tex_id) {
91 | draw(tex_id, mMatrix, null);
92 | }
93 |
94 | public final void draw(final float[] tex_matrix) {
95 | draw(mTexId, tex_matrix, null);
96 | }
97 |
98 | public final void draw(final float[] tex_matrix, final float[] mvp_matrix) {
99 | draw(mTexId, tex_matrix, mvp_matrix);
100 | }
101 |
102 | public final void draw(final int tex_id, final float[] tex_matrix) {
103 | draw(tex_id, tex_matrix, null);
104 | }
105 |
106 | public final void draw(final int tex_id, final float[] tex_matrix, final float[] mvp_matrix) {
107 | synchronized (mSync) {
108 | if (mRequestRelease) return;
109 | mTexId = tex_id;
110 | if ((tex_matrix != null) && (tex_matrix.length >= 16)) {
111 | System.arraycopy(tex_matrix, 0, mMatrix, 0, 16);
112 | } else {
113 | Matrix.setIdentityM(mMatrix, 0);
114 | }
115 | if ((mvp_matrix != null) && (mvp_matrix.length >= 16)) {
116 | System.arraycopy(mvp_matrix, 0, mMatrix, 16, 16);
117 | } else {
118 | Matrix.setIdentityM(mMatrix, 16);
119 | }
120 | mRequestDraw++;
121 | mSync.notifyAll();
122 | /* try {
123 | mSync.wait();
124 | } catch (final InterruptedException e) {
125 | } */
126 | }
127 | }
128 |
129 | public boolean isValid() {
130 | synchronized (mSync) {
131 | return !(mSurface instanceof Surface) || ((Surface)mSurface).isValid();
132 | }
133 | }
134 |
135 | public final void release() {
136 | if (DEBUG) Log.i(TAG, "release:");
137 | synchronized (mSync) {
138 | if (mRequestRelease) return;
139 | mRequestRelease = true;
140 | mSync.notifyAll();
141 | try {
142 | mSync.wait();
143 | } catch (final InterruptedException e) {
144 | }
145 | }
146 | }
147 |
148 | //********************************************************************************
149 | //********************************************************************************
150 | private EGLBase mEgl;
151 | private EGLBase.EglSurface mInputSurface;
152 | private GLDrawer2D mDrawer;
153 |
154 | @Override
155 | public final void run() {
156 | if (DEBUG) Log.i(TAG, "RenderHandler thread started:");
157 | synchronized (mSync) {
158 | mRequestSetEglContext = mRequestRelease = false;
159 | mRequestDraw = 0;
160 | mSync.notifyAll();
161 | }
162 | boolean localRequestDraw;
163 | for (;;) {
164 | synchronized (mSync) {
165 | if (mRequestRelease) break;
166 | if (mRequestSetEglContext) {
167 | mRequestSetEglContext = false;
168 | internalPrepare();
169 | }
170 | localRequestDraw = mRequestDraw > 0;
171 | if (localRequestDraw) {
172 | mRequestDraw--;
173 | // mSync.notifyAll();
174 | }
175 | }
176 | if (localRequestDraw) {
177 | if ((mEgl != null) && mTexId >= 0) {
178 | mInputSurface.makeCurrent();
179 | // clear screen with yellow color so that you can see rendering rectangle
180 | GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
181 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
182 | mDrawer.setMatrix(mMatrix, 16);
183 | mDrawer.draw(mTexId, mMatrix);
184 | mInputSurface.swap();
185 | }
186 | } else {
187 | synchronized(mSync) {
188 | try {
189 | mSync.wait();
190 | } catch (final InterruptedException e) {
191 | break;
192 | }
193 | }
194 | }
195 | }
196 | synchronized (mSync) {
197 | mRequestRelease = true;
198 | internalRelease();
199 | mSync.notifyAll();
200 | }
201 | if (DEBUG) Log.i(TAG, "RenderHandler thread finished:");
202 | }
203 |
204 | private final void internalPrepare() {
205 | if (DEBUG) Log.i(TAG, "internalPrepare:");
206 | internalRelease();
207 | mEgl = new EGLBase(mShard_context, false, mIsRecordable);
208 |
209 | mInputSurface = mEgl.createFromSurface(mSurface);
210 |
211 | mInputSurface.makeCurrent();
212 | mDrawer = new GLDrawer2D();
213 | mSurface = null;
214 | mSync.notifyAll();
215 | }
216 |
217 | private final void internalRelease() {
218 | if (DEBUG) Log.i(TAG, "internalRelease:");
219 | if (mInputSurface != null) {
220 | mInputSurface.release();
221 | mInputSurface = null;
222 | }
223 | if (mDrawer != null) {
224 | mDrawer.release();
225 | mDrawer = null;
226 | }
227 | if (mEgl != null) {
228 | mEgl.release();
229 | mEgl = null;
230 | }
231 | }
232 |
233 | }
234 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/glutilsOld/GLDrawer2D.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.glutilsOld;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: GLDrawer2D.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import java.nio.ByteBuffer;
26 | import java.nio.ByteOrder;
27 | import java.nio.FloatBuffer;
28 |
29 | import android.opengl.GLES11Ext;
30 | import android.opengl.GLES20;
31 | import android.opengl.Matrix;
32 | import android.util.Log;
33 |
34 | /**
35 | * Helper class to draw to whole view using specific texture and texture matrix
36 | */
37 | public class GLDrawer2D {
38 | private static final boolean DEBUG = false; // TODO set false on release
39 | private static final String TAG = "GLDrawer2D";
40 |
41 | private static final String vss
42 | = "uniform mat4 uMVPMatrix;\n"
43 | + "uniform mat4 uTexMatrix;\n"
44 | + "attribute highp vec4 aPosition;\n"
45 | + "attribute highp vec4 aTextureCoord;\n"
46 | + "varying highp vec2 vTextureCoord;\n"
47 | + "\n"
48 | + "void main() {\n"
49 | + " gl_Position = uMVPMatrix * aPosition;\n"
50 | + " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n"
51 | + "}\n";
52 | private static final String fss
53 | = "#extension GL_OES_EGL_image_external : require\n"
54 | + "precision mediump float;\n"
55 | + "uniform samplerExternalOES sTexture;\n"
56 | + "varying highp vec2 vTextureCoord;\n"
57 | + "void main() {\n"
58 | + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
59 | + "}";
60 | private static final float[] VERTICES = { 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
61 | private static final float[] TEXCOORD = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
62 |
63 | private final FloatBuffer pVertex;
64 | private final FloatBuffer pTexCoord;
65 | private int hProgram;
66 | int maPositionLoc;
67 | int maTextureCoordLoc;
68 | int muMVPMatrixLoc;
69 | int muTexMatrixLoc;
70 | private final float[] mMvpMatrix = new float[16];
71 |
72 | private static final int FLOAT_SZ = Float.SIZE / 8;
73 | private static final int VERTEX_NUM = 4;
74 | private static final int VERTEX_SZ = VERTEX_NUM * 2;
75 | /**
76 | * Constructor
77 | * this should be called in GL context
78 | */
79 | public GLDrawer2D() {
80 | pVertex = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
81 | .order(ByteOrder.nativeOrder()).asFloatBuffer();
82 | pVertex.put(VERTICES);
83 | pVertex.flip();
84 | pTexCoord = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
85 | .order(ByteOrder.nativeOrder()).asFloatBuffer();
86 | pTexCoord.put(TEXCOORD);
87 | pTexCoord.flip();
88 |
89 | hProgram = loadShader(vss, fss);
90 | GLES20.glUseProgram(hProgram);
91 | maPositionLoc = GLES20.glGetAttribLocation(hProgram, "aPosition");
92 | maTextureCoordLoc = GLES20.glGetAttribLocation(hProgram, "aTextureCoord");
93 | muMVPMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uMVPMatrix");
94 | muTexMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uTexMatrix");
95 |
96 | Matrix.setIdentityM(mMvpMatrix, 0);
97 | GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
98 | GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, mMvpMatrix, 0);
99 | GLES20.glVertexAttribPointer(maPositionLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pVertex);
100 | GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pTexCoord);
101 | GLES20.glEnableVertexAttribArray(maPositionLoc);
102 | GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
103 | }
104 |
105 | /**
106 | * terminatinng, this should be called in GL context
107 | */
108 | public void release() {
109 | if (hProgram >= 0)
110 | GLES20.glDeleteProgram(hProgram);
111 | hProgram = -1;
112 | }
113 |
114 | /**
115 | * draw specific texture with specific texture matrix
116 | * @param tex_id texture ID
117 | * @param tex_matrix texture matrix、if this is null, the last one use(we don't check size of this array and needs at least 16 of float)
118 | */
119 | public void draw(final int tex_id, final float[] tex_matrix) {
120 | GLES20.glUseProgram(hProgram);
121 | if (tex_matrix != null)
122 | GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, tex_matrix, 0);
123 | GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
124 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
125 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex_id);
126 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_NUM);
127 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
128 | GLES20.glUseProgram(0);
129 | }
130 |
131 | /**
132 | * Set model/view/projection transform matrix
133 | * @param matrix
134 | * @param offset
135 | */
136 | public void setMatrix(final float[] matrix, final int offset) {
137 | if ((matrix != null) && (matrix.length >= offset + 16)) {
138 | System.arraycopy(matrix, offset, mMvpMatrix, 0, 16);
139 | } else {
140 | Matrix.setIdentityM(mMvpMatrix, 0);
141 | }
142 | }
143 | /**
144 | * create external texture
145 | * @return texture ID
146 | */
147 | public static int initTex() {
148 | if (DEBUG) Log.v(TAG, "initTex:");
149 | final int[] tex = new int[1];
150 | GLES20.glGenTextures(1, tex, 0);
151 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
152 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
153 | GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
154 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
155 | GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
156 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
157 | GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
158 | GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
159 | GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
160 | return tex[0];
161 | }
162 |
163 | /**
164 | * delete specific texture
165 | */
166 | public static void deleteTex(final int hTex) {
167 | if (DEBUG) Log.v(TAG, "deleteTex:");
168 | final int[] tex = new int[] {hTex};
169 | GLES20.glDeleteTextures(1, tex, 0);
170 | }
171 |
172 | /**
173 | * load, compile and link shader
174 | * @param vss source of vertex shader
175 | * @param fss source of fragment shader
176 | * @return
177 | */
178 | public static int loadShader(final String vss, final String fss) {
179 | if (DEBUG) Log.v(TAG, "loadShader:");
180 | int vs = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
181 | GLES20.glShaderSource(vs, vss);
182 | GLES20.glCompileShader(vs);
183 | final int[] compiled = new int[1];
184 | GLES20.glGetShaderiv(vs, GLES20.GL_COMPILE_STATUS, compiled, 0);
185 | if (compiled[0] == 0) {
186 | if (DEBUG) Log.e(TAG, "Failed to compile vertex shader:"
187 | + GLES20.glGetShaderInfoLog(vs));
188 | GLES20.glDeleteShader(vs);
189 | vs = 0;
190 | }
191 |
192 | int fs = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
193 | GLES20.glShaderSource(fs, fss);
194 | GLES20.glCompileShader(fs);
195 | GLES20.glGetShaderiv(fs, GLES20.GL_COMPILE_STATUS, compiled, 0);
196 | if (compiled[0] == 0) {
197 | if (DEBUG) Log.w(TAG, "Failed to compile fragment shader:"
198 | + GLES20.glGetShaderInfoLog(fs));
199 | GLES20.glDeleteShader(fs);
200 | fs = 0;
201 | }
202 |
203 | final int program = GLES20.glCreateProgram();
204 | GLES20.glAttachShader(program, vs);
205 | GLES20.glAttachShader(program, fs);
206 | GLES20.glLinkProgram(program);
207 |
208 | return program;
209 | }
210 |
211 | }
212 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/encoder/MediaVideoEncoder.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.encoder;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: MediaVideoEncoder.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import java.io.IOException;
26 |
27 | import android.media.MediaCodec;
28 | import android.media.MediaCodecInfo;
29 | import android.media.MediaCodecList;
30 | import android.media.MediaFormat;
31 | import android.opengl.EGLContext;
32 | import android.util.Log;
33 | import android.view.Surface;
34 |
35 | import com.serenegiant.glutilsOld.RenderHandler;
36 |
37 | public class MediaVideoEncoder extends MediaEncoder {
38 | private static final boolean DEBUG = false; // TODO set false on release
39 | private static final String TAG = "MediaVideoEncoder";
40 |
41 | private static final String MIME_TYPE = "video/avc";
42 | // parameters for recording
43 | private static final int FRAME_RATE = 25;
44 | private static final float BPP = 0.25f;
45 |
46 | private final int mWidth;
47 | private final int mHeight;
48 | private RenderHandler mRenderHandler;
49 | private Surface mSurface;
50 |
51 | public MediaVideoEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener, final int width, final int height) {
52 | super(muxer, listener);
53 | if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
54 | mWidth = width;
55 | mHeight = height;
56 | mRenderHandler = RenderHandler.createHandler(TAG);
57 | }
58 |
59 | public boolean frameAvailableSoon(final float[] tex_matrix) {
60 | boolean result;
61 | if (result = super.frameAvailableSoon())
62 | mRenderHandler.draw(tex_matrix);
63 | return result;
64 | }
65 |
66 | public boolean frameAvailableSoon(final float[] tex_matrix, final float[] mvp_matrix) {
67 | boolean result;
68 | if (result = super.frameAvailableSoon())
69 | mRenderHandler.draw(tex_matrix, mvp_matrix);
70 | return result;
71 | }
72 |
73 | @Override
74 | public boolean frameAvailableSoon() {
75 | boolean result;
76 | if (result = super.frameAvailableSoon())
77 | mRenderHandler.draw(null);
78 | return result;
79 | }
80 |
81 | @Override
82 | protected void prepare() throws IOException {
83 | if (DEBUG) Log.i(TAG, "prepare: ");
84 | mTrackIndex = -1;
85 | mMuxerStarted = mIsEOS = false;
86 |
87 | final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
88 | if (videoCodecInfo == null) {
89 | Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
90 | return;
91 | }
92 | if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
93 |
94 | final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
95 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
96 | format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
97 | format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
98 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
99 | if (DEBUG) Log.i(TAG, "format: " + format);
100 |
101 | mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
102 | mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
103 | // get Surface for encoder input
104 | // this method only can call between #configure and #start
105 | mSurface = mMediaCodec.createInputSurface(); // API >= 18
106 | mMediaCodec.start();
107 | if (DEBUG) Log.i(TAG, "prepare finishing");
108 | if (mListener != null) {
109 | try {
110 | mListener.onPrepared(this);
111 | } catch (final Exception e) {
112 | Log.e(TAG, "prepare:", e);
113 | }
114 | }
115 | }
116 |
117 | public void setEglContext(final EGLContext shared_context, final int tex_id) {
118 | mRenderHandler.setEglContext(shared_context, tex_id, mSurface, true);
119 | }
120 |
121 | @Override
122 | protected void release() {
123 | if (DEBUG) Log.i(TAG, "release:");
124 | if (mSurface != null) {
125 | mSurface.release();
126 | mSurface = null;
127 | }
128 | if (mRenderHandler != null) {
129 | mRenderHandler.release();
130 | mRenderHandler = null;
131 | }
132 | super.release();
133 | }
134 |
135 | private int calcBitRate() {
136 | final int bitrate = (int)(BPP * FRAME_RATE * mWidth * mHeight);
137 | Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
138 | return bitrate;
139 | }
140 |
141 | /**
142 | * select the first codec that match a specific MIME type
143 | * @param mimeType
144 | * @return null if no codec matched
145 | */
146 | protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
147 | if (DEBUG) Log.v(TAG, "selectVideoCodec:");
148 |
149 | // get the list of available codecs
150 | final int numCodecs = MediaCodecList.getCodecCount();
151 | for (int i = 0; i < numCodecs; i++) {
152 | final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
153 |
154 | if (!codecInfo.isEncoder()) { // skipp decoder
155 | continue;
156 | }
157 | // select first codec that match a specific MIME type and color format
158 | final String[] types = codecInfo.getSupportedTypes();
159 | for (int j = 0; j < types.length; j++) {
160 | if (types[j].equalsIgnoreCase(mimeType)) {
161 | if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
162 | final int format = selectColorFormat(codecInfo, mimeType);
163 | if (format > 0) {
164 | return codecInfo;
165 | }
166 | }
167 | }
168 | }
169 | return null;
170 | }
171 |
172 | /**
173 | * select color format available on specific codec and we can use.
174 | * @return 0 if no colorFormat is matched
175 | */
176 | protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
177 | if (DEBUG) Log.i(TAG, "selectColorFormat: ");
178 | int result = 0;
179 | final MediaCodecInfo.CodecCapabilities caps;
180 | try {
181 | Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
182 | caps = codecInfo.getCapabilitiesForType(mimeType);
183 | } finally {
184 | Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
185 | }
186 | int colorFormat;
187 | for (int i = 0; i < caps.colorFormats.length; i++) {
188 | colorFormat = caps.colorFormats[i];
189 | if (isRecognizedViewoFormat(colorFormat)) {
190 | if (result == 0)
191 | result = colorFormat;
192 | break;
193 | }
194 | }
195 | if (result == 0)
196 | Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
197 | return result;
198 | }
199 |
200 | /**
201 | * color formats that we can use in this class
202 | */
203 | protected static int[] recognizedFormats;
204 | static {
205 | recognizedFormats = new int[] {
206 | // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
207 | // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
208 | // MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
209 | MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
210 | };
211 | }
212 |
213 | private static final boolean isRecognizedViewoFormat(final int colorFormat) {
214 | if (DEBUG) Log.i(TAG, "isRecognizedViewoFormat:colorFormat=" + colorFormat);
215 | final int n = recognizedFormats != null ? recognizedFormats.length : 0;
216 | for (int i = 0; i < n; i++) {
217 | if (recognizedFormats[i] == colorFormat) {
218 | return true;
219 | }
220 | }
221 | return false;
222 | }
223 |
224 | @Override
225 | protected void signalEndOfInputStream() {
226 | if (DEBUG) Log.d(TAG, "sending EOS to encoder");
227 | mMediaCodec.signalEndOfInputStream(); // API >= 18
228 | mIsEOS = true;
229 | }
230 |
231 | }
232 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/encoder/MediaAudioEncoder.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.encoder;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: MediaAudioEncoder.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import java.io.IOException;
26 | import java.nio.ByteBuffer;
27 |
28 | import android.media.AudioFormat;
29 | import android.media.AudioRecord;
30 | import android.media.MediaCodec;
31 | import android.media.MediaCodecInfo;
32 | import android.media.MediaCodecList;
33 | import android.media.MediaFormat;
34 | import android.media.MediaRecorder;
35 | import android.util.Log;
36 |
37 | public class MediaAudioEncoder extends MediaEncoder {
38 | private static final boolean DEBUG = false; // TODO set false on release
39 | private static final String TAG = "MediaAudioEncoder";
40 |
41 | private static final String MIME_TYPE = "audio/mp4a-latm";
42 | private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
43 | private static final int BIT_RATE = 64000;
44 | public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
45 | public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
46 |
47 | private AudioThread mAudioThread = null;
48 |
49 | public MediaAudioEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
50 | super(muxer, listener);
51 | }
52 |
53 | @Override
54 | protected void prepare() throws IOException {
55 | if (DEBUG) Log.v(TAG, "prepare:");
56 | mTrackIndex = -1;
57 | mMuxerStarted = mIsEOS = false;
58 | // prepare MediaCodec for AAC encoding of audio data from inernal mic.
59 | final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
60 | if (audioCodecInfo == null) {
61 | Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
62 | return;
63 | }
64 | if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
65 |
66 | final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
67 | audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
68 | audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
69 | audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
70 | audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
71 | // audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
72 | // audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
73 | if (DEBUG) Log.i(TAG, "format: " + audioFormat);
74 | mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
75 | mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
76 | mMediaCodec.start();
77 | if (DEBUG) Log.i(TAG, "prepare finishing");
78 | if (mListener != null) {
79 | try {
80 | mListener.onPrepared(this);
81 | } catch (final Exception e) {
82 | Log.e(TAG, "prepare:", e);
83 | }
84 | }
85 | }
86 |
87 | @Override
88 | protected void startRecording() {
89 | super.startRecording();
90 | // create and execute audio capturing thread using internal mic
91 | if (mAudioThread == null) {
92 | mAudioThread = new AudioThread();
93 | mAudioThread.start();
94 | }
95 | }
96 |
97 | @Override
98 | protected void release() {
99 | mAudioThread = null;
100 | super.release();
101 | }
102 |
103 | private static final int[] AUDIO_SOURCES = new int[] {
104 | MediaRecorder.AudioSource.MIC,
105 | MediaRecorder.AudioSource.DEFAULT,
106 | MediaRecorder.AudioSource.CAMCORDER,
107 | MediaRecorder.AudioSource.VOICE_COMMUNICATION,
108 | MediaRecorder.AudioSource.VOICE_RECOGNITION,
109 | };
110 |
111 | /**
112 | * Thread to capture audio data from internal mic as uncompressed 16bit PCM data
113 | * and write them to the MediaCodec encoder
114 | */
115 | private class AudioThread extends Thread {
116 | @Override
117 | public void run() {
118 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
119 | int cnt = 0;
120 | try {
121 | final int min_buffer_size = AudioRecord.getMinBufferSize(
122 | SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
123 | AudioFormat.ENCODING_PCM_16BIT);
124 | int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
125 | if (buffer_size < min_buffer_size)
126 | buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
127 |
128 | AudioRecord audioRecord = null;
129 | for (final int source : AUDIO_SOURCES) {
130 | try {
131 | audioRecord = new AudioRecord(
132 | source, SAMPLE_RATE,
133 | AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
134 | if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED)
135 | audioRecord = null;
136 | } catch (final Exception e) {
137 | audioRecord = null;
138 | }
139 | if (audioRecord != null) break;
140 | }
141 | if (audioRecord != null) {
142 | try {
143 | if (mIsCapturing) {
144 | if (DEBUG) Log.v(TAG, "AudioThread:start audio recording");
145 | final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
146 | int readBytes;
147 | audioRecord.startRecording();
148 | try {
149 | for (; mIsCapturing && !mRequestStop && !mIsEOS ;) {
150 | // read audio data from internal mic
151 | buf.clear();
152 | readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
153 | if (readBytes > 0) {
154 | // set audio data to encoder
155 | buf.position(readBytes);
156 | buf.flip();
157 | encode(buf, readBytes, getPTSUs());
158 | frameAvailableSoon();
159 | cnt++;
160 | }
161 | }
162 | frameAvailableSoon();
163 | } finally {
164 | audioRecord.stop();
165 | }
166 | }
167 | } finally {
168 | audioRecord.release();
169 | }
170 | } else {
171 | Log.e(TAG, "failed to initialize AudioRecord");
172 | }
173 | } catch (final Exception e) {
174 | Log.e(TAG, "AudioThread#run", e);
175 | }
176 | if (cnt == 0) {
177 | final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
178 | for (int i = 0; mIsCapturing && (i < 5); i++) {
179 | buf.position(SAMPLES_PER_FRAME);
180 | buf.flip();
181 | try {
182 | encode(buf, SAMPLES_PER_FRAME, getPTSUs());
183 | frameAvailableSoon();
184 | } catch (final Exception e) {
185 | break;
186 | }
187 | synchronized(this) {
188 | try {
189 | wait(50);
190 | } catch (final InterruptedException e) {
191 | }
192 | }
193 | }
194 | }
195 | if (DEBUG) Log.v(TAG, "AudioThread:finished");
196 | }
197 | }
198 |
199 | /**
200 | * select the first codec that match a specific MIME type
201 | * @param mimeType
202 | * @return
203 | */
204 | private static final MediaCodecInfo selectAudioCodec(final String mimeType) {
205 | if (DEBUG) Log.v(TAG, "selectAudioCodec:");
206 |
207 | MediaCodecInfo result = null;
208 | // get the list of available codecs
209 | final int numCodecs = MediaCodecList.getCodecCount();
210 | LOOP: for (int i = 0; i < numCodecs; i++) {
211 | final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
212 | if (!codecInfo.isEncoder()) { // skipp decoder
213 | continue;
214 | }
215 | final String[] types = codecInfo.getSupportedTypes();
216 | for (int j = 0; j < types.length; j++) {
217 | if (DEBUG) Log.i(TAG, "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
218 | if (types[j].equalsIgnoreCase(mimeType)) {
219 | if (result == null) {
220 | result = codecInfo;
221 | break LOOP;
222 | }
223 | }
224 | }
225 | }
226 | return result;
227 | }
228 |
229 | }
230 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2014 saki t_saki@serenegiant.com
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/audiovideosample/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.audiovideosample;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: MainActivity.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import android.Manifest;
26 | import android.annotation.SuppressLint;
27 | import android.content.pm.PackageManager;
28 | import android.os.Bundle;
29 |
30 | import androidx.annotation.NonNull;
31 | import androidx.appcompat.app.AppCompatActivity;
32 |
33 | import android.util.Log;
34 | import android.view.Menu;
35 | import android.view.MenuItem;
36 | import android.widget.Toast;
37 |
38 | import com.serenegiant.dialog.MessageDialogFragmentV4;
39 | import com.serenegiant.system.BuildCheck;
40 | import com.serenegiant.system.PermissionCheck;
41 |
42 | public class MainActivity extends AppCompatActivity
43 | implements MessageDialogFragmentV4.MessageDialogListener {
44 |
45 | private static final boolean DEBUG = false; // XXX 実働時はfalseにすること
46 | private static final String TAG = MainActivity.class.getSimpleName();
47 |
48 | @Override
49 | protected void onCreate(final Bundle savedInstanceState) {
50 | super.onCreate(savedInstanceState);
51 | setContentView(R.layout.activity_main);
52 |
53 | if (savedInstanceState == null) {
54 | getSupportFragmentManager().beginTransaction()
55 | .add(R.id.container, new CameraFragment()).commit();
56 | }
57 | }
58 |
59 | @Override
60 | protected final void onStart() {
61 | super.onStart();
62 | if (DEBUG) Log.v(TAG, "onStart:");
63 | if (BuildCheck.isAndroid7()) {
64 | internalOnResume();
65 | }
66 | }
67 |
68 | @Override
69 | protected final void onResume() {
70 | super.onResume();
71 | if (DEBUG) Log.v(TAG, "onResume:");
72 | if (!BuildCheck.isAndroid7()) {
73 | internalOnResume();
74 | }
75 | }
76 |
77 | @Override
78 | protected final void onPause() {
79 | if (DEBUG) Log.v(TAG, "onPause:");
80 | if (!BuildCheck.isAndroid7()) {
81 | internalOnPause();
82 | }
83 | super.onPause();
84 | }
85 |
86 | @Override
87 | protected final void onStop() {
88 | if (DEBUG) Log.v(TAG, "onStop:");
89 | if (BuildCheck.isAndroid7()) {
90 | internalOnPause();
91 | }
92 | super.onStop();
93 | }
94 |
95 | protected void internalOnResume() {
96 | if (DEBUG) Log.v(TAG, "internalOnResume:");
97 | checkPermission();
98 | }
99 |
100 | protected void internalOnPause() {
101 | if (DEBUG) Log.v(TAG, "internalOnPause:");
102 | }
103 |
104 | @Override
105 | public boolean onCreateOptionsMenu(final Menu menu) {
106 |
107 | // Inflate the menu; this adds items to the action bar if it is present.
108 | getMenuInflater().inflate(R.menu.main, menu);
109 | return true;
110 | }
111 |
112 | @Override
113 | public boolean onOptionsItemSelected(final MenuItem item) {
114 | // Handle action bar item clicks here. The action bar will
115 | // automatically handle clicks on the Home/Up button, so long
116 | // as you specify a parent activity in AndroidManifest.xml.
117 | final int id = item.getItemId();
118 | if (id == R.id.action_settings) {
119 | return true;
120 | }
121 | return super.onOptionsItemSelected(item);
122 | }
123 |
124 | @Override
125 | public void onRequestPermissionsResult(final int requestCode,
126 | @NonNull final String[] permissions, @NonNull final int[] grantResults) {
127 |
128 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); // 何もしてないけど一応呼んどく
129 | final int n = Math.min(permissions.length, grantResults.length);
130 | for (int i = 0; i < n; i++) {
131 | checkPermissionResult(requestCode, permissions[i],
132 | grantResults[i] == PackageManager.PERMISSION_GRANTED);
133 | }
134 | checkPermission();
135 | }
136 |
137 | /**
138 | * callback listener from MessageDialogFragmentV4
139 | *
140 | * @param dialog
141 | * @param requestCode
142 | * @param permissions
143 | * @param result
144 | */
145 | @SuppressLint("NewApi")
146 | @Override
147 | public void onMessageDialogResult(
148 | @NonNull final MessageDialogFragmentV4 dialog, final int requestCode,
149 | @NonNull final String[] permissions, final boolean result) {
150 |
151 | switch (requestCode) {
152 | case REQUEST_PERMISSION_WRITE_EXTERNAL_STORAGE:
153 | case REQUEST_PERMISSION_AUDIO_RECORDING:
154 | case REQUEST_PERMISSION_CAMERA:
155 | case REQUEST_PERMISSION_NETWORK:
156 | case REQUEST_PERMISSION_HARDWARE_ID:
157 | case REQUEST_PERMISSION_LOCATION:
158 | if (result) {
159 | // メッセージダイアログでOKを押された時はパーミッション要求する
160 | if (BuildCheck.isMarshmallow()) {
161 | requestPermissions(permissions, requestCode);
162 | return;
163 | }
164 | }
165 | // メッセージダイアログでキャンセルされた時とAndroid6でない時は自前でチェックして#checkPermissionResultを呼び出す
166 | for (final String permission : permissions) {
167 | checkPermissionResult(requestCode, permission,
168 | PermissionCheck.hasPermission(this, permission));
169 | }
170 | break;
171 | }
172 | }
173 |
174 | //--------------------------------------------------------------------------------
175 | private boolean checkPermission() {
176 | return checkPermissionCamera()
177 | && checkPermissionAudio()
178 | && checkPermissionWriteExternalStorage();
179 | }
180 |
181 | private static final int ID_PERMISSION_REASON_AUDIO = R.string.permission_audio_recording_reason;
182 | private static final int ID_PERMISSION_REQUEST_AUDIO = R.string.permission_audio_recording_request;
183 | private static final int ID_PERMISSION_REASON_NETWORK = R.string.permission_network_reason;
184 | private static final int ID_PERMISSION_REQUEST_NETWORK = R.string.permission_network_request;
185 | private static final int ID_PERMISSION_REASON_EXT_STORAGE = R.string.permission_ext_storage_reason;
186 | private static final int ID_PERMISSION_REQUEST_EXT_STORAGE = R.string.permission_ext_storage_request;
187 | private static final int ID_PERMISSION_REASON_CAMERA = R.string.permission_camera_reason;
188 | private static final int ID_PERMISSION_REQUEST_CAMERA = R.string.permission_camera_request;
189 | private static final int ID_PERMISSION_REQUEST_HARDWARE_ID = R.string.permission_hardware_id_request;
190 | private static final int ID_PERMISSION_REASON_LOCATION = R.string.permission_location_reason;
191 | private static final int ID_PERMISSION_REQUEST_LOCATION = R.string.permission_location_request;
192 |
193 | /** request code for WRITE_EXTERNAL_STORAGE permission */
194 | private static final int REQUEST_PERMISSION_WRITE_EXTERNAL_STORAGE = 0x1234;
195 | /** request code for RECORD_AUDIO permission */
196 | private static final int REQUEST_PERMISSION_AUDIO_RECORDING = 0x2345;
197 | /** request code for CAMERA permission */
198 | private static final int REQUEST_PERMISSION_CAMERA = 0x3456;
199 | /** request code for INTERNET permission */
200 | private static final int REQUEST_PERMISSION_NETWORK = 0x4567;
201 | /** request code for READ_PHONE_STATE permission */
202 | private static final int REQUEST_PERMISSION_HARDWARE_ID = 0x5678;
203 | /** request code for ACCESS_FINE_LOCATION permission */
204 | private static final int REQUEST_PERMISSION_LOCATION = 0x6789;
205 |
206 | /**
207 | * パーミッション処理の実態
208 | * パーミッションが無いときにメッセージを表示するだけ
209 | * @param requestCode
210 | * @param permission
211 | * @param result
212 | */
213 | protected void checkPermissionResult(final int requestCode,
214 | final String permission, final boolean result) {
215 |
216 | // パーミッションがないときにはメッセージを表示する
217 | if (!result && (permission != null)) {
218 | final StringBuilder sb = new StringBuilder();
219 | if (Manifest.permission.RECORD_AUDIO.equals(permission)) {
220 | sb.append(getString(R.string.permission_audio));
221 | }
222 | if (Manifest.permission.WRITE_EXTERNAL_STORAGE.equals(permission)) {
223 | if (sb.length() != 0) {
224 | sb.append("\n");
225 | }
226 | sb.append(getString(R.string.permission_ext_storage));
227 | }
228 | if (Manifest.permission.CAMERA.equals(permission)) {
229 | if (sb.length() != 0) {
230 | sb.append("\n");
231 | }
232 | sb.append(getString(R.string.permission_camera));
233 | }
234 | if (Manifest.permission.INTERNET.equals(permission)) {
235 | if (sb.length() != 0) {
236 | sb.append("\n");
237 | }
238 | sb.append(getString(R.string.permission_network));
239 | }
240 | if (Manifest.permission.ACCESS_FINE_LOCATION.equals(permission)) {
241 | if (sb.length() != 0) {
242 | sb.append("\n");
243 | }
244 | sb.append(getString(R.string.permission_location));
245 | }
246 | Toast.makeText(this, sb.toString(), Toast.LENGTH_LONG).show();
247 | }
248 | }
249 |
250 | /**
251 | * check permission to access external storage
252 | * and request to show detail dialog to request permission
253 | *
254 | * @return true already have permission to access external storage
255 | */
256 | protected boolean checkPermissionWriteExternalStorage() {
257 | if (!PermissionCheck.hasWriteExternalStorage(this)) {
258 | MessageDialogFragmentV4.showDialog(this, REQUEST_PERMISSION_WRITE_EXTERNAL_STORAGE,
259 | R.string.permission_title, ID_PERMISSION_REQUEST_EXT_STORAGE,
260 | new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE});
261 | return false;
262 | }
263 | return true;
264 | }
265 |
266 | /**
267 | * check permission to record audio
268 | * and request to show detail dialog to request permission
269 | *
270 | * @return true already have permission to record audio
271 | */
272 | protected boolean checkPermissionAudio() {
273 | if (!PermissionCheck.hasAudio(this)) {
274 | MessageDialogFragmentV4.showDialog(this, REQUEST_PERMISSION_AUDIO_RECORDING,
275 | R.string.permission_title, ID_PERMISSION_REQUEST_AUDIO,
276 | new String[]{Manifest.permission.RECORD_AUDIO});
277 | return false;
278 | }
279 | return true;
280 | }
281 |
282 | /**
283 | * check permission to access internal camera
284 | * and request to show detail dialog to request permission
285 | *
286 | * @return true already have permission to access internal camera
287 | */
288 | protected boolean checkPermissionCamera() {
289 | if (!PermissionCheck.hasCamera(this)) {
290 | MessageDialogFragmentV4.showDialog(this, REQUEST_PERMISSION_CAMERA,
291 | R.string.permission_title, ID_PERMISSION_REQUEST_CAMERA,
292 | new String[]{Manifest.permission.CAMERA});
293 | return false;
294 | }
295 | return true;
296 | }
297 |
298 | /**
299 | * check permission to access network
300 | * and request to show detail dialog to request permission
301 | *
302 | * @return true already have permission to access network
303 | */
304 | protected boolean checkPermissionNetwork() {
305 | if (!PermissionCheck.hasNetwork(this)) {
306 | MessageDialogFragmentV4.showDialog(this, REQUEST_PERMISSION_NETWORK,
307 | R.string.permission_title, ID_PERMISSION_REQUEST_NETWORK,
308 | new String[]{Manifest.permission.INTERNET});
309 | return false;
310 | }
311 | return true;
312 | }
313 |
314 | /**
315 | * check permission to access gps
316 | * and request to show detail dialog to request permission
317 | * @return true already have permission to access gps
318 | */
319 | protected boolean checkPermissionLocation(){
320 | if (!PermissionCheck.hasAccessLocation(this)) {
321 | MessageDialogFragmentV4.showDialog(this, REQUEST_PERMISSION_LOCATION,
322 | R.string.permission_title, ID_PERMISSION_REQUEST_LOCATION,
323 | new String[]{Manifest.permission.ACCESS_FINE_LOCATION,Manifest.permission.ACCESS_COARSE_LOCATION});
324 | return false;
325 | }
326 | return true;
327 | }
328 | /**
329 | * check permission to of READ_PHONE_STATE
330 | * and request to show detail dialog to request permission
331 | * This permission is necessarily to get hardware ID on device like IMEI.
332 | *
333 | * @return true already have permission of READ_PHONE_STATE
334 | */
335 | protected boolean checkPermissionHardwareId() {
336 | if (!PermissionCheck.hasPermission(this,
337 | Manifest.permission.READ_PHONE_STATE)) {
338 |
339 | MessageDialogFragmentV4.showDialog(this, REQUEST_PERMISSION_HARDWARE_ID,
340 | R.string.permission_title, ID_PERMISSION_REQUEST_HARDWARE_ID,
341 | new String[]{Manifest.permission.READ_PHONE_STATE});
342 | return false;
343 | }
344 | return true;
345 | }
346 |
347 | }
348 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/glutilsOld/EGLBase.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.glutilsOld;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: EGLBase.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import android.annotation.TargetApi;
26 | import android.graphics.SurfaceTexture;
27 | import android.opengl.EGL14;
28 | import android.opengl.EGLConfig;
29 | import android.opengl.EGLContext;
30 | import android.opengl.EGLDisplay;
31 | import android.opengl.EGLSurface;
32 | import android.os.Build;
33 | import android.util.Log;
34 | import android.view.Surface;
35 | import android.view.SurfaceHolder;
36 | import android.view.SurfaceView;
37 |
38 | @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
39 | public class EGLBase { // API >= 17
40 | private static final boolean DEBUG = false; // TODO set false on release
41 | private static final String TAG = "EGLBase";
42 |
43 | private static final int EGL_RECORDABLE_ANDROID = 0x3142;
44 |
45 | private EGLConfig mEglConfig = null;
46 | private EGLContext mEglContext = EGL14.EGL_NO_CONTEXT;
47 | private EGLDisplay mEglDisplay = EGL14.EGL_NO_DISPLAY;
48 | private EGLContext mDefaultContext = EGL14.EGL_NO_CONTEXT;
49 |
50 | public static class EglSurface {
51 | private final EGLBase mEgl;
52 | private EGLSurface mEglSurface = EGL14.EGL_NO_SURFACE;
53 | private final int mWidth, mHeight;
54 |
55 | EglSurface(final EGLBase egl, final Object surface) {
56 | if (DEBUG) Log.v(TAG, "EglSurface:");
57 | if (!(surface instanceof SurfaceView)
58 | && !(surface instanceof Surface)
59 | && !(surface instanceof SurfaceHolder)
60 | && !(surface instanceof SurfaceTexture))
61 | throw new IllegalArgumentException("unsupported surface");
62 | mEgl = egl;
63 | mEglSurface = mEgl.createWindowSurface(surface);
64 | mWidth = mEgl.querySurface(mEglSurface, EGL14.EGL_WIDTH);
65 | mHeight = mEgl.querySurface(mEglSurface, EGL14.EGL_HEIGHT);
66 | if (DEBUG) Log.v(TAG, String.format("EglSurface:size(%d,%d)", mWidth, mHeight));
67 | }
68 |
69 | EglSurface(final EGLBase egl, final int width, final int height) {
70 | if (DEBUG) Log.v(TAG, "EglSurface:");
71 | mEgl = egl;
72 | mEglSurface = mEgl.createOffscreenSurface(width, height);
73 | mWidth = width;
74 | mHeight = height;
75 | }
76 |
77 | public void makeCurrent() {
78 | mEgl.makeCurrent(mEglSurface);
79 | }
80 |
81 | public void swap() {
82 | mEgl.swap(mEglSurface);
83 | }
84 |
85 | public EGLContext getContext() {
86 | return mEgl.getContext();
87 | }
88 |
89 | public void release() {
90 | if (DEBUG) Log.v(TAG, "EglSurface:release:");
91 | mEgl.makeDefault();
92 | mEgl.destroyWindowSurface(mEglSurface);
93 | mEglSurface = EGL14.EGL_NO_SURFACE;
94 | }
95 |
96 | public int getWidth() {
97 | return mWidth;
98 | }
99 |
100 | public int getHeight() {
101 | return mHeight;
102 | }
103 | }
104 |
105 | public EGLBase(final EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
106 | if (DEBUG) Log.v(TAG, "EGLBase:");
107 | init(shared_context, with_depth_buffer, isRecordable);
108 | }
109 |
110 | public void release() {
111 | if (DEBUG) Log.v(TAG, "release:");
112 | if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
113 | destroyContext();
114 | EGL14.eglTerminate(mEglDisplay);
115 | EGL14.eglReleaseThread();
116 | }
117 | mEglDisplay = EGL14.EGL_NO_DISPLAY;
118 | mEglContext = EGL14.EGL_NO_CONTEXT;
119 | }
120 |
121 | public EglSurface createFromSurface(final Object surface) {
122 | if (DEBUG) Log.v(TAG, "createFromSurface:");
123 | final EglSurface eglSurface = new EglSurface(this, surface);
124 | eglSurface.makeCurrent();
125 | return eglSurface;
126 | }
127 |
128 | public EglSurface createOffscreen(final int width, final int height) {
129 | if (DEBUG) Log.v(TAG, "createOffscreen:");
130 | final EglSurface eglSurface = new EglSurface(this, width, height);
131 | eglSurface.makeCurrent();
132 | return eglSurface;
133 | }
134 |
135 | public EGLContext getContext() {
136 | return mEglContext;
137 | }
138 |
139 | public int querySurface(final EGLSurface eglSurface, final int what) {
140 | final int[] value = new int[1];
141 | EGL14.eglQuerySurface(mEglDisplay, eglSurface, what, value, 0);
142 | return value[0];
143 | }
144 |
145 | private void init(EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
146 | if (DEBUG) Log.v(TAG, "init:");
147 | if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
148 | throw new RuntimeException("EGL already set up");
149 | }
150 |
151 | mEglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
152 | if (mEglDisplay == EGL14.EGL_NO_DISPLAY) {
153 | throw new RuntimeException("eglGetDisplay failed");
154 | }
155 |
156 | final int[] version = new int[2];
157 | if (!EGL14.eglInitialize(mEglDisplay, version, 0, version, 1)) {
158 | mEglDisplay = null;
159 | throw new RuntimeException("eglInitialize failed");
160 | }
161 |
162 | shared_context = shared_context != null ? shared_context : EGL14.EGL_NO_CONTEXT;
163 | if (mEglContext == EGL14.EGL_NO_CONTEXT) {
164 | mEglConfig = getConfig(with_depth_buffer, isRecordable);
165 | if (mEglConfig == null) {
166 | throw new RuntimeException("chooseConfig failed");
167 | }
168 | // create EGL rendering context
169 | mEglContext = createContext(shared_context);
170 | }
171 | // confirm whether the EGL rendering context is successfully created
172 | final int[] values = new int[1];
173 | EGL14.eglQueryContext(mEglDisplay, mEglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
174 | if (DEBUG) Log.d(TAG, "EGLContext created, client version " + values[0]);
175 | makeDefault(); // makeCurrent(EGL14.EGL_NO_SURFACE);
176 | }
177 |
178 | /**
179 | * change context to draw this window surface
180 | * @return
181 | */
182 | private boolean makeCurrent(final EGLSurface surface) {
183 | // if (DEBUG) Log.v(TAG, "makeCurrent:");
184 | if (mEglDisplay == null) {
185 | if (DEBUG) Log.d(TAG, "makeCurrent:eglDisplay not initialized");
186 | }
187 | if (surface == null || surface == EGL14.EGL_NO_SURFACE) {
188 | final int error = EGL14.eglGetError();
189 | if (error == EGL14.EGL_BAD_NATIVE_WINDOW) {
190 | Log.e(TAG, "makeCurrent:returned EGL_BAD_NATIVE_WINDOW.");
191 | }
192 | return false;
193 | }
194 | // attach EGL renderring context to specific EGL window surface
195 | if (!EGL14.eglMakeCurrent(mEglDisplay, surface, surface, mEglContext)) {
196 | Log.w(TAG, "eglMakeCurrent:" + EGL14.eglGetError());
197 | return false;
198 | }
199 | return true;
200 | }
201 |
202 | private void makeDefault() {
203 | if (DEBUG) Log.v(TAG, "makeDefault:");
204 | if (!EGL14.eglMakeCurrent(mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
205 | Log.w("TAG", "makeDefault" + EGL14.eglGetError());
206 | }
207 | }
208 |
209 | private int swap(final EGLSurface surface) {
210 | // if (DEBUG) Log.v(TAG, "swap:");
211 | if (!EGL14.eglSwapBuffers(mEglDisplay, surface)) {
212 | final int err = EGL14.eglGetError();
213 | if (DEBUG) Log.w(TAG, "swap:err=" + err);
214 | return err;
215 | }
216 | return EGL14.EGL_SUCCESS;
217 | }
218 |
219 | private EGLContext createContext(final EGLContext shared_context) {
220 | // if (DEBUG) Log.v(TAG, "createContext:");
221 |
222 | final int[] attrib_list = {
223 | EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
224 | EGL14.EGL_NONE
225 | };
226 | final EGLContext context = EGL14.eglCreateContext(mEglDisplay, mEglConfig, shared_context, attrib_list, 0);
227 | checkEglError("eglCreateContext");
228 | return context;
229 | }
230 |
231 | private void destroyContext() {
232 | if (DEBUG) Log.v(TAG, "destroyContext:");
233 |
234 | if (!EGL14.eglDestroyContext(mEglDisplay, mEglContext)) {
235 | Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mEglContext);
236 | Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
237 | }
238 | mEglContext = EGL14.EGL_NO_CONTEXT;
239 | if (mDefaultContext != EGL14.EGL_NO_CONTEXT) {
240 | if (!EGL14.eglDestroyContext(mEglDisplay, mDefaultContext)) {
241 | Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mDefaultContext);
242 | Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
243 | }
244 | mDefaultContext = EGL14.EGL_NO_CONTEXT;
245 | }
246 | }
247 |
248 | private EGLSurface createWindowSurface(final Object nativeWindow) {
249 | if (DEBUG) Log.v(TAG, "createWindowSurface:nativeWindow=" + nativeWindow);
250 |
251 | final int[] surfaceAttribs = {
252 | EGL14.EGL_NONE
253 | };
254 | EGLSurface result = null;
255 | try {
256 | result = EGL14.eglCreateWindowSurface(mEglDisplay, mEglConfig, nativeWindow, surfaceAttribs, 0);
257 | } catch (final IllegalArgumentException e) {
258 | Log.e(TAG, "eglCreateWindowSurface", e);
259 | }
260 | return result;
261 | }
262 |
263 | /**
264 | * Creates an EGL surface associated with an offscreen buffer.
265 | */
266 | private EGLSurface createOffscreenSurface(final int width, final int height) {
267 | if (DEBUG) Log.v(TAG, "createOffscreenSurface:");
268 | final int[] surfaceAttribs = {
269 | EGL14.EGL_WIDTH, width,
270 | EGL14.EGL_HEIGHT, height,
271 | EGL14.EGL_NONE
272 | };
273 | EGLSurface result = null;
274 | try {
275 | result = EGL14.eglCreatePbufferSurface(mEglDisplay, mEglConfig, surfaceAttribs, 0);
276 | checkEglError("eglCreatePbufferSurface");
277 | if (result == null) {
278 | throw new RuntimeException("surface was null");
279 | }
280 | } catch (final IllegalArgumentException e) {
281 | Log.e(TAG, "createOffscreenSurface", e);
282 | } catch (final RuntimeException e) {
283 | Log.e(TAG, "createOffscreenSurface", e);
284 | }
285 | return result;
286 | }
287 |
288 | private void destroyWindowSurface(EGLSurface surface) {
289 | if (DEBUG) Log.v(TAG, "destroySurface:");
290 |
291 | if (surface != EGL14.EGL_NO_SURFACE) {
292 | EGL14.eglMakeCurrent(mEglDisplay,
293 | EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
294 | EGL14.eglDestroySurface(mEglDisplay, surface);
295 | }
296 | surface = EGL14.EGL_NO_SURFACE;
297 | if (DEBUG) Log.v(TAG, "destroySurface:finished");
298 | }
299 |
300 | private void checkEglError(final String msg) {
301 | int error;
302 | if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
303 | throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
304 | }
305 | }
306 |
307 | @SuppressWarnings("unused")
308 | private EGLConfig getConfig(final boolean with_depth_buffer, final boolean isRecordable) {
309 | final int[] attribList = {
310 | EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
311 | EGL14.EGL_RED_SIZE, 8,
312 | EGL14.EGL_GREEN_SIZE, 8,
313 | EGL14.EGL_BLUE_SIZE, 8,
314 | EGL14.EGL_ALPHA_SIZE, 8,
315 | EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL14.EGL_STENCIL_SIZE, 8,
316 | EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL_RECORDABLE_ANDROID, 1, // this flag need to recording of MediaCodec
317 | EGL14.EGL_NONE, EGL14.EGL_NONE, // with_depth_buffer ? EGL14.EGL_DEPTH_SIZE : EGL14.EGL_NONE,
318 | // with_depth_buffer ? 16 : 0,
319 | EGL14.EGL_NONE
320 | };
321 | int offset = 10;
322 | if (false) { // ステンシルバッファ(常時未使用)
323 | attribList[offset++] = EGL14.EGL_STENCIL_SIZE;
324 | attribList[offset++] = 8;
325 | }
326 | if (with_depth_buffer) { // デプスバッファ
327 | attribList[offset++] = EGL14.EGL_DEPTH_SIZE;
328 | attribList[offset++] = 16;
329 | }
330 | if (isRecordable && (Build.VERSION.SDK_INT >= 18)) {// MediaCodecの入力用Surfaceの場合
331 | attribList[offset++] = EGL_RECORDABLE_ANDROID;
332 | attribList[offset++] = 1;
333 | }
334 | for (int i = attribList.length - 1; i >= offset; i--) {
335 | attribList[i] = EGL14.EGL_NONE;
336 | }
337 | final EGLConfig[] configs = new EGLConfig[1];
338 | final int[] numConfigs = new int[1];
339 | if (!EGL14.eglChooseConfig(mEglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
340 | // XXX it will be better to fallback to RGB565
341 | Log.w(TAG, "unable to find RGBA8888 / " + " EGLConfig");
342 | return null;
343 | }
344 | return configs[0];
345 | }
346 | }
347 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/encoder/MediaEncoder.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.encoder;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: MediaEncoder.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import java.io.IOException;
26 | import java.lang.ref.WeakReference;
27 | import java.nio.ByteBuffer;
28 |
29 | import android.media.MediaCodec;
30 | import android.media.MediaFormat;
31 | import android.util.Log;
32 |
33 | public abstract class MediaEncoder implements Runnable {
34 | private static final boolean DEBUG = false; // TODO set false on release
35 | private static final String TAG = "MediaEncoder";
36 |
37 | protected static final int TIMEOUT_USEC = 10000; // 10[msec]
38 | protected static final int MSG_FRAME_AVAILABLE = 1;
39 | protected static final int MSG_STOP_RECORDING = 9;
40 |
41 | public interface MediaEncoderListener {
42 | public void onPrepared(MediaEncoder encoder);
43 | public void onStopped(MediaEncoder encoder);
44 | }
45 |
46 | protected final Object mSync = new Object();
47 | /**
48 | * Flag that indicate this encoder is capturing now.
49 | */
50 | protected volatile boolean mIsCapturing;
51 | /**
52 | * Flag that indicate the frame data will be available soon.
53 | */
54 | private int mRequestDrain;
55 | /**
56 | * Flag to request stop capturing
57 | */
58 | protected volatile boolean mRequestStop;
59 | /**
60 | * Flag that indicate encoder received EOS(End Of Stream)
61 | */
62 | protected boolean mIsEOS;
63 | /**
64 | * Flag the indicate the muxer is running
65 | */
66 | protected boolean mMuxerStarted;
67 | /**
68 | * Track Number
69 | */
70 | protected int mTrackIndex;
71 | /**
72 | * MediaCodec instance for encoding
73 | */
74 | protected MediaCodec mMediaCodec; // API >= 16(Android4.1.2)
75 | /**
76 | * Weak refarence of MediaMuxerWarapper instance
77 | */
78 | protected final WeakReference mWeakMuxer;
79 | /**
80 | * BufferInfo instance for dequeuing
81 | */
82 | private MediaCodec.BufferInfo mBufferInfo; // API >= 16(Android4.1.2)
83 |
84 | protected final MediaEncoderListener mListener;
85 |
86 | public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
87 | if (listener == null) throw new NullPointerException("MediaEncoderListener is null");
88 | if (muxer == null) throw new NullPointerException("MediaMuxerWrapper is null");
89 | mWeakMuxer = new WeakReference(muxer);
90 | muxer.addEncoder(this);
91 | mListener = listener;
92 | synchronized (mSync) {
93 | // create BufferInfo here for effectiveness(to reduce GC)
94 | mBufferInfo = new MediaCodec.BufferInfo();
95 | // wait for starting thread
96 | new Thread(this, getClass().getSimpleName()).start();
97 | try {
98 | mSync.wait();
99 | } catch (final InterruptedException e) {
100 | }
101 | }
102 | }
103 |
104 | public String getOutputPath() {
105 | final MediaMuxerWrapper muxer = mWeakMuxer.get();
106 | return muxer != null ? muxer.getOutputPath() : null;
107 | }
108 |
109 | /**
110 | * the method to indicate frame data is soon available or already available
111 | * @return return true if encoder is ready to encod.
112 | */
113 | public boolean frameAvailableSoon() {
114 | // if (DEBUG) Log.v(TAG, "frameAvailableSoon");
115 | synchronized (mSync) {
116 | if (!mIsCapturing || mRequestStop) {
117 | return false;
118 | }
119 | mRequestDrain++;
120 | mSync.notifyAll();
121 | }
122 | return true;
123 | }
124 |
125 | /**
126 | * encoding loop on private thread
127 | */
128 | @Override
129 | public void run() {
130 | // android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
131 | synchronized (mSync) {
132 | mRequestStop = false;
133 | mRequestDrain = 0;
134 | mSync.notify();
135 | }
136 | final boolean isRunning = true;
137 | boolean localRequestStop;
138 | boolean localRequestDrain;
139 | while (isRunning) {
140 | synchronized (mSync) {
141 | localRequestStop = mRequestStop;
142 | localRequestDrain = (mRequestDrain > 0);
143 | if (localRequestDrain)
144 | mRequestDrain--;
145 | }
146 | if (localRequestStop) {
147 | drain();
148 | // request stop recording
149 | signalEndOfInputStream();
150 | // process output data again for EOS signale
151 | drain();
152 | // release all related objects
153 | release();
154 | break;
155 | }
156 | if (localRequestDrain) {
157 | drain();
158 | } else {
159 | synchronized (mSync) {
160 | try {
161 | mSync.wait();
162 | } catch (final InterruptedException e) {
163 | break;
164 | }
165 | }
166 | }
167 | } // end of while
168 | if (DEBUG) Log.d(TAG, "Encoder thread exiting");
169 | synchronized (mSync) {
170 | mRequestStop = true;
171 | mIsCapturing = false;
172 | }
173 | }
174 |
175 | /*
176 | * prepareing method for each sub class
177 | * this method should be implemented in sub class, so set this as abstract method
178 | * @throws IOException
179 | */
180 | /*package*/ abstract void prepare() throws IOException;
181 |
182 | /*package*/ void startRecording() {
183 | if (DEBUG) Log.v(TAG, "startRecording");
184 | synchronized (mSync) {
185 | mIsCapturing = true;
186 | mRequestStop = false;
187 | mSync.notifyAll();
188 | }
189 | }
190 |
191 | /**
192 | * the method to request stop encoding
193 | */
194 | /*package*/ void stopRecording() {
195 | if (DEBUG) Log.v(TAG, "stopRecording");
196 | synchronized (mSync) {
197 | if (!mIsCapturing || mRequestStop) {
198 | return;
199 | }
200 | mRequestStop = true; // for rejecting newer frame
201 | mSync.notifyAll();
202 | // We can not know when the encoding and writing finish.
203 | // so we return immediately after request to avoid delay of caller thread
204 | }
205 | }
206 |
207 | //********************************************************************************
208 | //********************************************************************************
209 | /**
210 | * Release all releated objects
211 | */
212 | protected void release() {
213 | if (DEBUG) Log.d(TAG, "release:");
214 | try {
215 | mListener.onStopped(this);
216 | } catch (final Exception e) {
217 | Log.e(TAG, "failed onStopped", e);
218 | }
219 | mIsCapturing = false;
220 | if (mMediaCodec != null) {
221 | try {
222 | mMediaCodec.stop();
223 | mMediaCodec.release();
224 | mMediaCodec = null;
225 | } catch (final Exception e) {
226 | Log.e(TAG, "failed releasing MediaCodec", e);
227 | }
228 | }
229 | if (mMuxerStarted) {
230 | final MediaMuxerWrapper muxer = mWeakMuxer != null ? mWeakMuxer.get() : null;
231 | if (muxer != null) {
232 | try {
233 | muxer.stop();
234 | } catch (final Exception e) {
235 | Log.e(TAG, "failed stopping muxer", e);
236 | }
237 | }
238 | }
239 | mBufferInfo = null;
240 | }
241 |
242 | protected void signalEndOfInputStream() {
243 | if (DEBUG) Log.d(TAG, "sending EOS to encoder");
244 | // signalEndOfInputStream is only avairable for video encoding with surface
245 | // and equivalent sending a empty buffer with BUFFER_FLAG_END_OF_STREAM flag.
246 | // mMediaCodec.signalEndOfInputStream(); // API >= 18
247 | encode(null, 0, getPTSUs());
248 | }
249 |
250 | /**
251 | * Method to set byte array to the MediaCodec encoder
252 | * @param buffer
253 | * @param length length of byte array, zero means EOS.
254 | * @param presentationTimeUs
255 | */
256 | protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
257 | if (!mIsCapturing) return;
258 | final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
259 | while (mIsCapturing) {
260 | final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
261 | if (inputBufferIndex >= 0) {
262 | final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
263 | inputBuffer.clear();
264 | if (buffer != null) {
265 | inputBuffer.put(buffer);
266 | }
267 | // if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
268 | if (length <= 0) {
269 | // send EOS
270 | mIsEOS = true;
271 | if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
272 | mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
273 | presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
274 | break;
275 | } else {
276 | mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
277 | presentationTimeUs, 0);
278 | }
279 | break;
280 | } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
281 | // wait for MediaCodec encoder is ready to encode
282 | // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
283 | // will wait for maximum TIMEOUT_USEC(10msec) on each call
284 | }
285 | }
286 | }
287 |
288 | /**
289 | * drain encoded data and write them to muxer
290 | */
291 | protected void drain() {
292 | if (mMediaCodec == null) return;
293 | ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
294 | int encoderStatus, count = 0;
295 | final MediaMuxerWrapper muxer = mWeakMuxer.get();
296 | if (muxer == null) {
297 | // throw new NullPointerException("muxer is unexpectedly null");
298 | Log.w(TAG, "muxer is unexpectedly null");
299 | return;
300 | }
301 | LOOP: while (mIsCapturing) {
302 | // get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
303 | encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
304 | if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
305 | // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
306 | if (!mIsEOS) {
307 | if (++count > 5)
308 | break LOOP; // out of while
309 | }
310 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
311 | if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
312 | // this shoud not come when encoding
313 | encoderOutputBuffers = mMediaCodec.getOutputBuffers();
314 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
315 | if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
316 | // this status indicate the output format of codec is changed
317 | // this should come only once before actual encoded data
318 | // but this status never come on Android4.3 or less
319 | // and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
320 | if (mMuxerStarted) { // second time request is error
321 | throw new RuntimeException("format changed twice");
322 | }
323 | // get output format from codec and pass them to muxer
324 | // getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
325 | final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
326 | mTrackIndex = muxer.addTrack(format);
327 | mMuxerStarted = true;
328 | if (!muxer.start()) {
329 | // we should wait until muxer is ready
330 | synchronized (muxer) {
331 | while (!muxer.isStarted())
332 | try {
333 | muxer.wait(100);
334 | } catch (final InterruptedException e) {
335 | break LOOP;
336 | }
337 | }
338 | }
339 | } else if (encoderStatus < 0) {
340 | // unexpected status
341 | if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
342 | } else {
343 | final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
344 | if (encodedData == null) {
345 | // this never should come...may be a MediaCodec internal error
346 | throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
347 | }
348 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
349 | // You shoud set output format to muxer here when you target Android4.3 or less
350 | // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
351 | // therefor we should expand and prepare output format from buffer data.
352 | // This sample is for API>=18(>=Android 4.3), just ignore this flag here
353 | if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
354 | mBufferInfo.size = 0;
355 | }
356 |
357 | if (mBufferInfo.size != 0) {
358 | // encoded data is ready, clear waiting counter
359 | count = 0;
360 | if (!mMuxerStarted) {
361 | // muxer is not ready...this will prrograming failure.
362 | throw new RuntimeException("drain:muxer hasn't started");
363 | }
364 | // write encoded data to muxer(need to adjust presentationTimeUs.
365 | mBufferInfo.presentationTimeUs = getPTSUs();
366 | muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
367 | prevOutputPTSUs = mBufferInfo.presentationTimeUs;
368 | }
369 | // return buffer to encoder
370 | mMediaCodec.releaseOutputBuffer(encoderStatus, false);
371 | if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
372 | // when EOS come.
373 | mIsCapturing = false;
374 | break; // out of while
375 | }
376 | }
377 | }
378 | }
379 |
380 | /**
381 | * previous presentationTimeUs for writing
382 | */
383 | private long prevOutputPTSUs = 0;
384 | /**
385 | * get next encoding presentationTimeUs
386 | * @return
387 | */
388 | protected long getPTSUs() {
389 | long result = System.nanoTime() / 1000L;
390 | // presentationTimeUs should be monotonic
391 | // otherwise muxer fail to write
392 | if (result < prevOutputPTSUs)
393 | result = (prevOutputPTSUs - result) + result;
394 | return result;
395 | }
396 |
397 | }
398 |
--------------------------------------------------------------------------------
/app/src/main/java/com/serenegiant/audiovideosample/CameraGLView.java:
--------------------------------------------------------------------------------
1 | package com.serenegiant.audiovideosample;
2 | /*
3 | * AudioVideoRecordingSample
4 | * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
5 | *
6 | * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
7 | *
8 | * File name: CameraGLView.java
9 | *
10 | * Licensed under the Apache License, Version 2.0 (the "License");
11 | * you may not use this file except in compliance with the License.
12 | * You may obtain a copy of the License at
13 | *
14 | * http://www.apache.org/licenses/LICENSE-2.0
15 | *
16 | * Unless required by applicable law or agreed to in writing, software
17 | * distributed under the License is distributed on an "AS IS" BASIS,
18 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19 | * See the License for the specific language governing permissions and
20 | * limitations under the License.
21 | *
22 | * All files in the folder are under this Apache License, Version 2.0.
23 | */
24 |
25 | import java.io.IOException;
26 | import java.lang.ref.WeakReference;
27 | import java.util.Collections;
28 | import java.util.Comparator;
29 | import java.util.List;
30 |
31 | import javax.microedition.khronos.egl.EGLConfig;
32 | import javax.microedition.khronos.opengles.GL10;
33 |
34 | import android.content.Context;
35 | import android.graphics.SurfaceTexture;
36 | import android.hardware.Camera;
37 | import android.opengl.EGL14;
38 | import android.opengl.GLES20;
39 | import android.opengl.GLSurfaceView;
40 | import android.opengl.Matrix;
41 | import android.os.Handler;
42 | import android.os.Looper;
43 | import android.os.Message;
44 | import android.util.AttributeSet;
45 | import android.util.Log;
46 | import android.view.Display;
47 | import android.view.Surface;
48 | import android.view.SurfaceHolder;
49 | import android.view.WindowManager;
50 |
51 | import com.serenegiant.encoder.MediaVideoEncoder;
52 | import com.serenegiant.glutilsOld.GLDrawer2D;
53 |
54 | /**
55 | * Sub class of GLSurfaceView to display camera preview and write video frame to capturing surface
56 | */
57 | public final class CameraGLView extends GLSurfaceView {
58 |
59 | private static final boolean DEBUG = false; // TODO set false on release
60 | private static final String TAG = "CameraGLView";
61 |
62 | private static final int CAMERA_ID = 0;
63 |
64 | private static final int SCALE_STRETCH_FIT = 0;
65 | private static final int SCALE_KEEP_ASPECT_VIEWPORT = 1;
66 | private static final int SCALE_KEEP_ASPECT = 2;
67 | private static final int SCALE_CROP_CENTER = 3;
68 |
69 | private final CameraSurfaceRenderer mRenderer;
70 | private boolean mHasSurface;
71 | private CameraHandler mCameraHandler = null;
72 | private int mVideoWidth, mVideoHeight;
73 | private int mRotation;
74 | private int mScaleMode = SCALE_STRETCH_FIT;
75 |
76 | public CameraGLView(final Context context) {
77 | this(context, null, 0);
78 | }
79 |
80 | public CameraGLView(final Context context, final AttributeSet attrs) {
81 | this(context, attrs, 0);
82 | }
83 |
84 | public CameraGLView(final Context context, final AttributeSet attrs, final int defStyle) {
85 | super(context, attrs);
86 | if (DEBUG) Log.v(TAG, "CameraGLView:");
87 | mRenderer = new CameraSurfaceRenderer(this);
88 | setEGLContextClientVersion(2); // GLES 2.0, API >= 8
89 | setRenderer(mRenderer);
90 | /* // the frequency of refreshing of camera preview is at most 15 fps
91 | // and RENDERMODE_WHEN_DIRTY is better to reduce power consumption
92 | setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); */
93 | }
94 |
95 | @Override
96 | public void onResume() {
97 | if (DEBUG) Log.v(TAG, "onResume:");
98 | super.onResume();
99 | if (mHasSurface) {
100 | if (mCameraHandler == null) {
101 | if (DEBUG) Log.v(TAG, "surface already exist");
102 | startPreview(getWidth(), getHeight());
103 | }
104 | }
105 | }
106 |
107 | @Override
108 | public void onPause() {
109 | if (DEBUG) Log.v(TAG, "onPause:");
110 | if (mCameraHandler != null) {
111 | // just request stop prviewing
112 | mCameraHandler.stopPreview(false);
113 | }
114 | super.onPause();
115 | }
116 |
117 | public void setScaleMode(final int mode) {
118 | if (mScaleMode != mode) {
119 | mScaleMode = mode;
120 | queueEvent(new Runnable() {
121 | @Override
122 | public void run() {
123 | mRenderer.updateViewport();
124 | }
125 | });
126 | }
127 | }
128 |
129 | public int getScaleMode() {
130 | return mScaleMode;
131 | }
132 |
133 | public void setVideoSize(final int width, final int height) {
134 | if ((mRotation % 180) == 0) {
135 | mVideoWidth = width;
136 | mVideoHeight = height;
137 | } else {
138 | mVideoWidth = height;
139 | mVideoHeight = width;
140 | }
141 | queueEvent(new Runnable() {
142 | @Override
143 | public void run() {
144 | mRenderer.updateViewport();
145 | }
146 | });
147 | }
148 |
149 | public int getVideoWidth() {
150 | return mVideoWidth;
151 | }
152 |
153 | public int getVideoHeight() {
154 | return mVideoHeight;
155 | }
156 |
157 | public SurfaceTexture getSurfaceTexture() {
158 | if (DEBUG) Log.v(TAG, "getSurfaceTexture:");
159 | return mRenderer != null ? mRenderer.mSTexture : null;
160 | }
161 |
162 | @Override
163 | public void surfaceDestroyed(final SurfaceHolder holder) {
164 | if (DEBUG) Log.v(TAG, "surfaceDestroyed:");
165 | if (mCameraHandler != null) {
166 | // wait for finish previewing here
167 | // otherwise camera try to display on un-exist Surface and some error will occure
168 | mCameraHandler.stopPreview(true);
169 | }
170 | mCameraHandler = null;
171 | mHasSurface = false;
172 | mRenderer.onSurfaceDestroyed();
173 | super.surfaceDestroyed(holder);
174 | }
175 |
176 | public void setVideoEncoder(final MediaVideoEncoder encoder) {
177 | if (DEBUG) Log.v(TAG, "setVideoEncoder:tex_id=" + mRenderer.hTex + ",encoder=" + encoder);
178 | queueEvent(new Runnable() {
179 | @Override
180 | public void run() {
181 | synchronized (mRenderer) {
182 | if (encoder != null) {
183 | encoder.setEglContext(EGL14.eglGetCurrentContext(), mRenderer.hTex);
184 | }
185 | mRenderer.mVideoEncoder = encoder;
186 | }
187 | }
188 | });
189 | }
190 |
191 | //********************************************************************************
192 | //********************************************************************************
193 | private synchronized void startPreview(final int width, final int height) {
194 | if (mCameraHandler == null) {
195 | final CameraThread thread = new CameraThread(this);
196 | thread.start();
197 | mCameraHandler = thread.getHandler();
198 | }
199 | mCameraHandler.startPreview(1280, 720/*width, height*/);
200 | }
201 |
202 | /**
203 | * GLSurfaceViewのRenderer
204 | */
205 | private static final class CameraSurfaceRenderer
206 | implements GLSurfaceView.Renderer,
207 | SurfaceTexture.OnFrameAvailableListener { // API >= 11
208 |
209 | private final WeakReference mWeakParent;
210 | private SurfaceTexture mSTexture; // API >= 11
211 | private int hTex;
212 | private GLDrawer2D mDrawer;
213 | private final float[] mStMatrix = new float[16];
214 | private final float[] mMvpMatrix = new float[16];
215 | private MediaVideoEncoder mVideoEncoder;
216 |
217 | public CameraSurfaceRenderer(final CameraGLView parent) {
218 | if (DEBUG) Log.v(TAG, "CameraSurfaceRenderer:");
219 | mWeakParent = new WeakReference(parent);
220 | Matrix.setIdentityM(mMvpMatrix, 0);
221 | }
222 |
223 | @Override
224 | public void onSurfaceCreated(final GL10 unused, final EGLConfig config) {
225 | if (DEBUG) Log.v(TAG, "onSurfaceCreated:");
226 | // This renderer required OES_EGL_image_external extension
227 | final String extensions = GLES20.glGetString(GLES20.GL_EXTENSIONS); // API >= 8
228 | // if (DEBUG) Log.i(TAG, "onSurfaceCreated:Gl extensions: " + extensions);
229 | if (!extensions.contains("OES_EGL_image_external"))
230 | throw new RuntimeException("This system does not support OES_EGL_image_external.");
231 | // create textur ID
232 | hTex = GLDrawer2D.initTex();
233 | // create SurfaceTexture with texture ID.
234 | mSTexture = new SurfaceTexture(hTex);
235 | mSTexture.setOnFrameAvailableListener(this);
236 | // clear screen with yellow color so that you can see rendering rectangle
237 | GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
238 | final CameraGLView parent = mWeakParent.get();
239 | if (parent != null) {
240 | parent.mHasSurface = true;
241 | }
242 | // create object for preview display
243 | mDrawer = new GLDrawer2D();
244 | mDrawer.setMatrix(mMvpMatrix, 0);
245 | }
246 |
247 | @Override
248 | public void onSurfaceChanged(final GL10 unused, final int width, final int height) {
249 | if (DEBUG) Log.v(TAG, String.format("onSurfaceChanged:(%d,%d)", width, height));
250 | // if at least with or height is zero, initialization of this view is still progress.
251 | if ((width == 0) || (height == 0)) return;
252 | updateViewport();
253 | final CameraGLView parent = mWeakParent.get();
254 | if (parent != null) {
255 | parent.startPreview(width, height);
256 | }
257 | }
258 |
259 | /**
260 | * when GLSurface context is soon destroyed
261 | */
262 | public void onSurfaceDestroyed() {
263 | if (DEBUG) Log.v(TAG, "onSurfaceDestroyed:");
264 | if (mDrawer != null) {
265 | mDrawer.release();
266 | mDrawer = null;
267 | }
268 | if (mSTexture != null) {
269 | mSTexture.release();
270 | mSTexture = null;
271 | }
272 | GLDrawer2D.deleteTex(hTex);
273 | }
274 |
275 | private final void updateViewport() {
276 | final CameraGLView parent = mWeakParent.get();
277 | if (parent != null) {
278 | final int view_width = parent.getWidth();
279 | final int view_height = parent.getHeight();
280 | GLES20.glViewport(0, 0, view_width, view_height);
281 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
282 | final double video_width = parent.mVideoWidth;
283 | final double video_height = parent.mVideoHeight;
284 | if (video_width == 0 || video_height == 0) return;
285 | Matrix.setIdentityM(mMvpMatrix, 0);
286 | final double view_aspect = view_width / (double)view_height;
287 | Log.i(TAG, String.format("view(%d,%d)%f,video(%1.0f,%1.0f)", view_width, view_height, view_aspect, video_width, video_height));
288 | switch (parent.mScaleMode) {
289 | case SCALE_STRETCH_FIT:
290 | break;
291 | case SCALE_KEEP_ASPECT_VIEWPORT:
292 | {
293 | final double req = video_width / video_height;
294 | int x, y;
295 | int width, height;
296 | if (view_aspect > req) {
297 | // if view is wider than camera image, calc width of drawing area based on view height
298 | y = 0;
299 | height = view_height;
300 | width = (int)(req * view_height);
301 | x = (view_width - width) / 2;
302 | } else {
303 | // if view is higher than camera image, calc height of drawing area based on view width
304 | x = 0;
305 | width = view_width;
306 | height = (int)(view_width / req);
307 | y = (view_height - height) / 2;
308 | }
309 | // set viewport to draw keeping aspect ration of camera image
310 | if (DEBUG) Log.v(TAG, String.format("xy(%d,%d),size(%d,%d)", x, y, width, height));
311 | GLES20.glViewport(x, y, width, height);
312 | break;
313 | }
314 | case SCALE_KEEP_ASPECT:
315 | case SCALE_CROP_CENTER:
316 | {
317 | final double scale_x = view_width / video_width;
318 | final double scale_y = view_height / video_height;
319 | final double scale = (parent.mScaleMode == SCALE_CROP_CENTER
320 | ? Math.max(scale_x, scale_y) : Math.min(scale_x, scale_y));
321 | final double width = scale * video_width;
322 | final double height = scale * video_height;
323 | Log.v(TAG, String.format("size(%1.0f,%1.0f),scale(%f,%f),mat(%f,%f)",
324 | width, height, scale_x, scale_y, width / view_width, height / view_height));
325 | Matrix.scaleM(mMvpMatrix, 0, (float)(width / view_width), (float)(height / view_height), 1.0f);
326 | break;
327 | }
328 | }
329 | if (mDrawer != null)
330 | mDrawer.setMatrix(mMvpMatrix, 0);
331 | }
332 | }
333 |
334 | private volatile boolean requesrUpdateTex = false;
335 | private boolean flip = true;
336 | /**
337 | * drawing to GLSurface
338 | * we set renderMode to GLSurfaceView.RENDERMODE_WHEN_DIRTY,
339 | * this method is only called when #requestRender is called(= when texture is required to update)
340 | * if you don't set RENDERMODE_WHEN_DIRTY, this method is called at maximum 60fps
341 | */
342 | @Override
343 | public void onDrawFrame(final GL10 unused) {
344 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
345 |
346 | if (requesrUpdateTex) {
347 | requesrUpdateTex = false;
348 | // update texture(came from camera)
349 | mSTexture.updateTexImage();
350 | // get texture matrix
351 | mSTexture.getTransformMatrix(mStMatrix);
352 | }
353 | // draw to preview screen
354 | mDrawer.draw(hTex, mStMatrix);
355 | flip = !flip;
356 | if (flip) { // ~30fps
357 | synchronized (this) {
358 | if (mVideoEncoder != null) {
359 | // notify to capturing thread that the camera frame is available.
360 | // mVideoEncoder.frameAvailableSoon(mStMatrix);
361 | mVideoEncoder.frameAvailableSoon(mStMatrix, mMvpMatrix);
362 | }
363 | }
364 | }
365 | }
366 |
367 | @Override
368 | public void onFrameAvailable(final SurfaceTexture st) {
369 | requesrUpdateTex = true;
370 | // final CameraGLView parent = mWeakParent.get();
371 | // if (parent != null)
372 | // parent.requestRender();
373 | }
374 | }
375 |
376 | /**
377 | * Handler class for asynchronous camera operation
378 | */
379 | private static final class CameraHandler extends Handler {
380 | private static final int MSG_PREVIEW_START = 1;
381 | private static final int MSG_PREVIEW_STOP = 2;
382 | private CameraThread mThread;
383 |
384 | public CameraHandler(final CameraThread thread) {
385 | mThread = thread;
386 | }
387 |
388 | public void startPreview(final int width, final int height) {
389 | sendMessage(obtainMessage(MSG_PREVIEW_START, width, height));
390 | }
391 |
392 | /**
393 | * request to stop camera preview
394 | * @param needWait need to wait for stopping camera preview
395 | */
396 | public void stopPreview(final boolean needWait) {
397 | synchronized (this) {
398 | sendEmptyMessage(MSG_PREVIEW_STOP);
399 | if (needWait && mThread.mIsRunning) {
400 | try {
401 | if (DEBUG) Log.d(TAG, "wait for terminating of camera thread");
402 | wait();
403 | } catch (final InterruptedException e) {
404 | }
405 | }
406 | }
407 | }
408 |
409 | /**
410 | * message handler for camera thread
411 | */
412 | @Override
413 | public void handleMessage(final Message msg) {
414 | switch (msg.what) {
415 | case MSG_PREVIEW_START:
416 | mThread.startPreview(msg.arg1, msg.arg2);
417 | break;
418 | case MSG_PREVIEW_STOP:
419 | mThread.stopPreview();
420 | synchronized (this) {
421 | notifyAll();
422 | }
423 | Looper.myLooper().quit();
424 | mThread = null;
425 | break;
426 | default:
427 | throw new RuntimeException("unknown message:what=" + msg.what);
428 | }
429 | }
430 | }
431 |
432 | /**
433 | * Thread for asynchronous operation of camera preview
434 | */
435 | private static final class CameraThread extends Thread {
436 | private final Object mReadyFence = new Object();
437 | private final WeakReferencemWeakParent;
438 | private CameraHandler mHandler;
439 | private volatile boolean mIsRunning = false;
440 | private Camera mCamera;
441 | private boolean mIsFrontFace;
442 |
443 | public CameraThread(final CameraGLView parent) {
444 | super("Camera thread");
445 | mWeakParent = new WeakReference(parent);
446 | }
447 |
448 | public CameraHandler getHandler() {
449 | synchronized (mReadyFence) {
450 | try {
451 | mReadyFence.wait();
452 | } catch (final InterruptedException e) {
453 | }
454 | }
455 | return mHandler;
456 | }
457 |
458 | /**
459 | * message loop
460 | * prepare Looper and create Handler for this thread
461 | */
462 | @Override
463 | public void run() {
464 | if (DEBUG) Log.d(TAG, "Camera thread start");
465 | Looper.prepare();
466 | synchronized (mReadyFence) {
467 | mHandler = new CameraHandler(this);
468 | mIsRunning = true;
469 | mReadyFence.notify();
470 | }
471 | Looper.loop();
472 | if (DEBUG) Log.d(TAG, "Camera thread finish");
473 | synchronized (mReadyFence) {
474 | mHandler = null;
475 | mIsRunning = false;
476 | }
477 | }
478 |
479 | /**
480 | * start camera preview
481 | * @param width
482 | * @param height
483 | */
484 | private final void startPreview(final int width, final int height) {
485 | if (DEBUG) Log.v(TAG, "startPreview:");
486 | final CameraGLView parent = mWeakParent.get();
487 | if ((parent != null) && (mCamera == null)) {
488 | // This is a sample project so just use 0 as camera ID.
489 | // it is better to selecting camera is available
490 | try {
491 | mCamera = Camera.open(CAMERA_ID);
492 | final Camera.Parameters params = mCamera.getParameters();
493 | final List focusModes = params.getSupportedFocusModes();
494 | if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
495 | params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
496 | } else if(focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
497 | params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
498 | } else {
499 | if (DEBUG) Log.i(TAG, "Camera does not support autofocus");
500 | }
501 | // let's try fastest frame rate. You will get near 60fps, but your device become hot.
502 | final List supportedFpsRange = params.getSupportedPreviewFpsRange();
503 | // final int n = supportedFpsRange != null ? supportedFpsRange.size() : 0;
504 | // int[] range;
505 | // for (int i = 0; i < n; i++) {
506 | // range = supportedFpsRange.get(i);
507 | // Log.i(TAG, String.format("supportedFpsRange(%d)=(%d,%d)", i, range[0], range[1]));
508 | // }
509 | final int[] max_fps = supportedFpsRange.get(supportedFpsRange.size() - 1);
510 | Log.i(TAG, String.format("fps:%d-%d", max_fps[0], max_fps[1]));
511 | params.setPreviewFpsRange(max_fps[0], max_fps[1]);
512 | params.setRecordingHint(true);
513 | // request closest supported preview size
514 | final Camera.Size closestSize = getClosestSupportedSize(
515 | params.getSupportedPreviewSizes(), width, height);
516 | params.setPreviewSize(closestSize.width, closestSize.height);
517 | // request closest picture size for an aspect ratio issue on Nexus7
518 | final Camera.Size pictureSize = getClosestSupportedSize(
519 | params.getSupportedPictureSizes(), width, height);
520 | params.setPictureSize(pictureSize.width, pictureSize.height);
521 | // rotate camera preview according to the device orientation
522 | setRotation(params);
523 | mCamera.setParameters(params);
524 | // get the actual preview size
525 | final Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
526 | Log.i(TAG, String.format("previewSize(%d, %d)", previewSize.width, previewSize.height));
527 | // adjust view size with keeping the aspect ration of camera preview.
528 | // here is not a UI thread and we should request parent view to execute.
529 | parent.post(new Runnable() {
530 | @Override
531 | public void run() {
532 | parent.setVideoSize(previewSize.width, previewSize.height);
533 | }
534 | });
535 | final SurfaceTexture st = parent.getSurfaceTexture();
536 | st.setDefaultBufferSize(previewSize.width, previewSize.height);
537 | mCamera.setPreviewTexture(st);
538 | } catch (final IOException e) {
539 | Log.e(TAG, "startPreview:", e);
540 | if (mCamera != null) {
541 | mCamera.release();
542 | mCamera = null;
543 | }
544 | } catch (final RuntimeException e) {
545 | Log.e(TAG, "startPreview:", e);
546 | if (mCamera != null) {
547 | mCamera.release();
548 | mCamera = null;
549 | }
550 | }
551 | if (mCamera != null) {
552 | // start camera preview display
553 | mCamera.startPreview();
554 | }
555 | }
556 | }
557 |
558 | private static Camera.Size getClosestSupportedSize(List supportedSizes, final int requestedWidth, final int requestedHeight) {
559 | return (Camera.Size)Collections.min(supportedSizes, new Comparator() {
560 |
561 | private int diff(final Camera.Size size) {
562 | return Math.abs(requestedWidth - size.width) + Math.abs(requestedHeight - size.height);
563 | }
564 |
565 | @Override
566 | public int compare(final Camera.Size lhs, final Camera.Size rhs) {
567 | return diff(lhs) - diff(rhs);
568 | }
569 | });
570 |
571 | }
572 |
573 | /**
574 | * stop camera preview
575 | */
576 | private void stopPreview() {
577 | if (DEBUG) Log.v(TAG, "stopPreview:");
578 | if (mCamera != null) {
579 | mCamera.stopPreview();
580 | mCamera.release();
581 | mCamera = null;
582 | }
583 | final CameraGLView parent = mWeakParent.get();
584 | if (parent == null) return;
585 | parent.mCameraHandler = null;
586 | }
587 |
588 | /**
589 | * rotate preview screen according to the device orientation
590 | * @param params
591 | */
592 | private final void setRotation(final Camera.Parameters params) {
593 | if (DEBUG) Log.v(TAG, "setRotation:");
594 | final CameraGLView parent = mWeakParent.get();
595 | if (parent == null) return;
596 |
597 | final Display display = ((WindowManager)parent.getContext()
598 | .getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
599 | final int rotation = display.getRotation();
600 | int degrees = 0;
601 | switch (rotation) {
602 | case Surface.ROTATION_0: degrees = 0; break;
603 | case Surface.ROTATION_90: degrees = 90; break;
604 | case Surface.ROTATION_180: degrees = 180; break;
605 | case Surface.ROTATION_270: degrees = 270; break;
606 | }
607 | // get whether the camera is front camera or back camera
608 | final Camera.CameraInfo info =
609 | new android.hardware.Camera.CameraInfo();
610 | android.hardware.Camera.getCameraInfo(CAMERA_ID, info);
611 | mIsFrontFace = (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
612 | if (mIsFrontFace) { // front camera
613 | degrees = (info.orientation + degrees) % 360;
614 | degrees = (360 - degrees) % 360; // reverse
615 | } else { // back camera
616 | degrees = (info.orientation - degrees + 360) % 360;
617 | }
618 | // apply rotation setting
619 | mCamera.setDisplayOrientation(degrees);
620 | parent.mRotation = degrees;
621 | // XXX This method fails to call and camera stops working on some devices.
622 | // params.setRotation(degrees);
623 | }
624 |
625 | }
626 | }
627 |
--------------------------------------------------------------------------------