├── .gitignore
├── LICENSE
├── README.md
├── art
└── output.gif
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── imagetovideo
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── com
│ │ └── daasuu
│ │ └── imagetovideo
│ │ ├── EncodeListener.kt
│ │ ├── GLDraw.kt
│ │ ├── GLHelper.kt
│ │ ├── GLImageOverlay.kt
│ │ ├── GLOverlayDraw.kt
│ │ ├── GLThread.kt
│ │ ├── ImageToVideoConverter.kt
│ │ ├── MediaMuxerCaptureWrapper.kt
│ │ └── VideoEncoder.kt
│ └── res
│ └── values
│ └── strings.xml
├── sample
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── com
│ │ └── daasuu
│ │ └── imagetovideoandroid
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── java
│ │ └── com
│ │ │ └── daasuu
│ │ │ └── imagetovideoandroid
│ │ │ ├── ImageListAdapter.kt
│ │ │ ├── ImageLoader.kt
│ │ │ ├── ImageloadListener.kt
│ │ │ └── MainActivity.kt
│ └── res
│ │ ├── drawable-v24
│ │ └── ic_launcher_foreground.xml
│ │ ├── drawable
│ │ └── ic_launcher_background.xml
│ │ ├── layout
│ │ ├── activity_image_to_video.xml
│ │ └── row_image_list.xml
│ │ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ └── values
│ │ ├── colors.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── com
│ └── daasuu
│ └── imagetovideoandroid
│ └── ExampleUnitTest.java
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.ap_
4 |
5 | # Files for the ART/Dalvik VM
6 | *.dex
7 |
8 | # Java class files
9 | *.class
10 |
11 | # Generated files
12 | bin/
13 | gen/
14 | out/
15 |
16 | # Gradle files
17 | .gradle/
18 | build/
19 |
20 | # Local configuration file (sdk path, etc)
21 | local.properties
22 |
23 | # Proguard folder generated by Eclipse
24 | proguard/
25 |
26 | # Log Files
27 | *.log
28 |
29 | # Android Studio Navigation editor temp files
30 | .navigation/
31 |
32 | # Android Studio captures folder
33 | captures/
34 |
35 | # Intellij
36 | *.iml
37 | .idea/workspace.xml
38 | .idea/tasks.xml
39 | .idea/gradle.xml
40 | .idea/dictionaries
41 | .idea/libraries
42 |
43 | # Keystore files
44 | *.jks
45 |
46 | # External native build folder generated in Android Studio 2.2 and later
47 | .externalNativeBuild
48 |
49 | # Google Services (e.g. APIs or Firebase)
50 | google-services.json
51 |
52 | # Freeline
53 | freeline.py
54 | freeline/
55 | freeline_project_description.json
56 |
57 |
58 | .DS_Store
59 | Thumbs.db
60 |
61 | # built application files
62 | *.apk
63 | *.ap_
64 |
65 | # files for the dex VM
66 | *.dex
67 |
68 | # Java class files
69 | *.class
70 |
71 | # gradle files
72 | .gradle
73 |
74 | # IntelliJ
75 | .idea
76 | *.iml
77 |
78 | # generated files
79 | bin/
80 | gen/
81 | obj/
82 | apk/
83 | target/
84 | build/
85 | app/libs/**/*.java
86 |
87 | # Local configuration file (sdk path, etc)
88 | local.properties
89 | *crashlytics-build.properties
90 | *com_crashlytics_export_strings.xml
91 |
92 | # Proguard folder generated by Eclipse
93 | proguard/
94 |
95 | mapping.txt
96 |
97 | app/prd/release/output.json
98 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Masayuki Suda
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ImageToVideoAndroid
2 | [](http://developer.android.com/index.html)
3 |
4 | [](https://android-arsenal.com/api?level=21)
5 |
6 | Easy Image to Video Converter
7 |
8 |
9 |
10 | ## Gradle
11 | Step 1. Add the JitPack repository to your build file
12 | ```groovy
13 | allprojects {
14 | repositories {
15 | ...
16 | maven { url 'https://jitpack.io' }
17 | }
18 | }
19 | ```
20 | Step 2. Add the dependency
21 | ```groovy
22 | dependencies {
23 | implementation 'com.github.MasayukiSuda:ImageToVideoAndroid:v0.1.0'
24 | }
25 | ```
26 | ## Usage
27 | ```Kotlin
28 | imageToVideo = ImageToVideoConverter(
29 | outputPath = outputVideoPath,
30 | inputImagePath = inputImagePath,
31 | size = Size(720, 720),
32 | duration = TimeUnit.SECONDS.toMicros(4),
33 | listener = object : EncodeListener {
34 | override fun onProgress(progress: Float) {
35 | Log.d("progress", "progress = $progress")
36 | runOnUiThread {
37 | progressBar.progress = (progress * 100).toInt()
38 | }
39 | }
40 |
41 | override fun onCompleted() {
42 | runOnUiThread {
43 | progressBar.progress = 100
44 | }
45 | }
46 |
47 | override fun onFailed(exception: Exception) {
48 |
49 | }
50 | }
51 | )
52 | imageToVideo?.start()
53 | ```
54 |
55 |
56 | ## Sample Dependencies
57 | * [glide](https://github.com/bumptech/glide)
58 |
59 | ## License
60 |
61 | [MIT License](https://github.com/MasayukiSuda/ImageToVideoAndroid/blob/master/LICENSE)
62 |
--------------------------------------------------------------------------------
/art/output.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MasayukiSuda/ImageToVideoAndroid/ef4051e493e663cbc46a4348d7e9221402f19dbd/art/output.gif
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | ext.kotlin_version = '1.2.71'
5 |
6 | repositories {
7 | google()
8 | jcenter()
9 | }
10 | dependencies {
11 | classpath 'com.android.tools.build:gradle:3.2.1'
12 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
13 |
14 | // NOTE: Do not place your application dependencies here; they belong
15 | // in the individual module build.gradle files
16 | }
17 | }
18 |
19 | allprojects {
20 | repositories {
21 | google()
22 | jcenter()
23 | }
24 | }
25 |
26 | task clean(type: Delete) {
27 | delete rootProject.buildDir
28 | }
29 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | VERSION_NAME=0.1.0
15 | VERSION_CODE=1
16 | COMPILE_SDK_VERSION=28
17 | COMPILE_MIN_SDK_VERSION=21
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MasayukiSuda/ImageToVideoAndroid/ef4051e493e663cbc46a4348d7e9221402f19dbd/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Sat Oct 06 15:30:34 JST 2018
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.6-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/imagetovideo/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/imagetovideo/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 | apply plugin: 'kotlin-android'
3 |
4 | android {
5 | compileSdkVersion COMPILE_SDK_VERSION as int
6 |
7 | defaultConfig {
8 | minSdkVersion COMPILE_MIN_SDK_VERSION as int
9 | targetSdkVersion COMPILE_SDK_VERSION as int
10 | versionCode VERSION_CODE as int
11 | versionName VERSION_NAME
12 | }
13 |
14 | buildTypes {
15 | release {
16 | minifyEnabled false
17 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
18 | }
19 | }
20 |
21 | }
22 |
23 | dependencies {
24 | implementation fileTree(dir: 'libs', include: ['*.jar'])
25 |
26 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
27 | }
28 |
--------------------------------------------------------------------------------
/imagetovideo/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/imagetovideo/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/EncodeListener.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | interface EncodeListener {
4 | /**
5 | * Called to notify progress.
6 | *
7 | * @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
8 | */
9 | fun onProgress(progress: Float)
10 |
11 | /**
12 | * Called when transcode completed.
13 | */
14 | fun onCompleted()
15 |
16 | fun onFailed(exception: Exception)
17 |
18 | }
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/GLDraw.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | import android.graphics.SurfaceTexture
4 | import android.opengl.GLES11Ext
5 | import android.opengl.GLES20
6 | import java.nio.ByteBuffer
7 | import java.nio.ByteOrder
8 | import java.nio.FloatBuffer
9 | import java.util.*
10 |
11 | internal open class GLDraw(
12 | private val vertexShaderSource: String,
13 | private val fragmentShaderSource: String
14 | ) {
15 |
16 | companion object {
17 | private const val FLOAT_SIZE_BYTES = 4
18 | private const val TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES
19 | private const val TRIANGLE_VERTICES_DATA_POS_OFFSET = 0
20 | private const val TRIANGLE_VERTICES_DATA_UV_OFFSET = 3
21 | }
22 |
23 | private val triangleVerticesData = floatArrayOf(
24 | // X, Y, Z, U, V
25 | -1.0f, -1.0f, 0f, 0f, 0f, 1.0f, -1.0f, 0f, 1f, 0f, -1.0f, 1.0f, 0f, 0f, 1f, 1.0f, 1.0f, 0f, 1f, 1f
26 | )
27 |
28 | private var triangleVertices: FloatBuffer
29 |
30 | private var program: Int = 0
31 | var textureID = -12345
32 | protected var clearColor = floatArrayOf(0f, 0f, 0f, 1f)
33 |
34 | private val handleMap = HashMap()
35 |
36 | init {
37 | triangleVertices = ByteBuffer.allocateDirect(
38 | triangleVerticesData.size * FLOAT_SIZE_BYTES
39 | ).order(ByteOrder.nativeOrder()).asFloatBuffer()
40 | triangleVertices.put(triangleVerticesData).position(0)
41 | }
42 |
43 | fun draw(surfaceTexture: SurfaceTexture, STMatrix: FloatArray, MVPMatrix: FloatArray) {
44 | GLHelper.checkGlError("onDrawFrame start")
45 |
46 |
47 | GLES20.glClearColor(clearColor[0], clearColor[1], clearColor[2], clearColor[3])
48 | GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT or GLES20.GL_COLOR_BUFFER_BIT)
49 | GLES20.glUseProgram(program)
50 | GLHelper.checkGlError("glUseProgram")
51 |
52 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
53 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID)
54 | triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET)
55 | GLES20.glVertexAttribPointer(
56 | getHandle("aPosition"), 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices
57 | )
58 | GLES20.glEnableVertexAttribArray(getHandle("aPosition"))
59 |
60 | triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET)
61 | GLES20.glVertexAttribPointer(
62 | getHandle("aTextureCoord"), 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices
63 | )
64 | GLHelper.checkGlError("glVertexAttribPointer aTextureHandle")
65 |
66 | GLES20.glEnableVertexAttribArray(getHandle("aTextureCoord"))
67 | GLHelper.checkGlError("glEnableVertexAttribArray aTextureHandle")
68 |
69 | surfaceTexture.getTransformMatrix(STMatrix)
70 |
71 | GLES20.glUniformMatrix4fv(getHandle("uMVPMatrix"), 1, false, MVPMatrix, 0)
72 | GLES20.glUniformMatrix4fv(getHandle("uSTMatrix"), 1, false, STMatrix, 0)
73 |
74 |
75 | onDraw()
76 |
77 |
78 | GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
79 | GLHelper.checkGlError("glDrawArrays")
80 |
81 | GLES20.glFinish()
82 | }
83 |
84 | open fun onDraw() {}
85 |
86 | open fun setUpSurface() {
87 | val vertexShader = GLHelper.loadShader(vertexShaderSource, GLES20.GL_VERTEX_SHADER)
88 | val fragmentShader = GLHelper.loadShader(fragmentShaderSource, GLES20.GL_FRAGMENT_SHADER)
89 | program = GLHelper.createProgram(vertexShader, fragmentShader)
90 | if (program == 0) {
91 | throw RuntimeException("failed creating program")
92 | }
93 |
94 | getHandle("aPosition")
95 | getHandle("aTextureCoord")
96 | getHandle("uMVPMatrix")
97 | getHandle("uSTMatrix")
98 |
99 | val textures = IntArray(1)
100 | GLES20.glGenTextures(1, textures, 0)
101 | textureID = textures[0]
102 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID)
103 | GLHelper.checkGlError("glBindTexture textureID")
104 | GLES20.glTexParameterf(
105 | GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR.toFloat()
106 | )
107 | GLES20.glTexParameterf(
108 | GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR.toFloat()
109 | )
110 | GLES20.glTexParameteri(
111 | GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE
112 | )
113 | GLES20.glTexParameteri(
114 | GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE
115 | )
116 | GLHelper.checkGlError("glTexParameter")
117 | }
118 |
119 | protected fun getHandle(name: String): Int {
120 | val value = handleMap[name]
121 | if (value != null) {
122 | return value
123 | }
124 |
125 | var location = GLES20.glGetAttribLocation(program, name)
126 | if (location == -1) {
127 | location = GLES20.glGetUniformLocation(program, name)
128 | }
129 | if (location == -1) {
130 | throw IllegalStateException("Could not get attrib or uniform location for $name")
131 | }
132 | handleMap[name] = location
133 | return location
134 | }
135 |
136 | open fun release() {}
137 |
138 | fun setClearColor(
139 | red: Float, green: Float, blue: Float, alpha: Float
140 | ) {
141 | this.clearColor = floatArrayOf(red, green, blue, alpha)
142 | }
143 |
144 | }
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/GLHelper.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | import android.opengl.GLES20
4 | import android.opengl.GLES20.GL_TRUE
5 | import android.util.Log
6 |
7 | internal object GLHelper {
8 |
9 | fun createProgram(vertexShader: Int, pixelShader: Int): Int {
10 | val program = GLES20.glCreateProgram()
11 | if (program == 0) {
12 | throw RuntimeException("Could not create program")
13 | }
14 |
15 | GLES20.glAttachShader(program, vertexShader)
16 | GLES20.glAttachShader(program, pixelShader)
17 |
18 | GLES20.glLinkProgram(program)
19 | val linkStatus = IntArray(1)
20 | GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0)
21 | if (linkStatus[0] != GL_TRUE) {
22 | GLES20.glDeleteProgram(program)
23 | throw RuntimeException("Could not link program")
24 | }
25 | return program
26 | }
27 |
28 | fun loadShader(strSource: String, iType: Int): Int {
29 | val compiled = IntArray(1)
30 | val iShader = GLES20.glCreateShader(iType)
31 | GLES20.glShaderSource(iShader, strSource)
32 | GLES20.glCompileShader(iShader)
33 | GLES20.glGetShaderiv(iShader, GLES20.GL_COMPILE_STATUS, compiled, 0)
34 | if (compiled[0] == 0) {
35 | Log.d("Load Shader Failed", "Compilation\n" + GLES20.glGetShaderInfoLog(iShader))
36 | return 0
37 | }
38 | return iShader
39 | }
40 |
41 | fun checkGlError(op: String) {
42 | var error = GLES20.glGetError()
43 | while (error != GLES20.GL_NO_ERROR) {
44 | Log.w("OpenGL", "$op: glError $error")
45 | error = GLES20.glGetError()
46 | }
47 | }
48 |
49 | const val DEFAULT_VERTEX_SHADER =
50 | "uniform mat4 uMVPMatrix;\n" +
51 | "uniform mat4 uSTMatrix;\n" +
52 | "attribute vec4 aPosition;\n" +
53 | "attribute vec4 aTextureCoord;\n" +
54 | "varying vec2 vTextureCoord;\n" +
55 | "void main() {\n" +
56 | " gl_Position = uMVPMatrix * aPosition;\n" +
57 | " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
58 | "}\n"
59 |
60 | const val DEFAULT_FRAGMENT_SHADER =
61 | "#extension GL_OES_EGL_image_external : require\n" +
62 | "precision mediump float;\n" + // highp here doesn't seem to matter
63 | "varying vec2 vTextureCoord;\n" +
64 | "uniform samplerExternalOES sTexture;\n" +
65 | "void main() {\n" +
66 | " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
67 | "}\n"
68 |
69 | const val OVERLAY_FRAGMENT_SHADER =
70 | "#extension GL_OES_EGL_image_external : require\n" +
71 | "precision mediump float;\n" +
72 | "varying vec2 vTextureCoord;\n" +
73 | "uniform samplerExternalOES sTexture;\n" +
74 | "uniform lowp sampler2D oTexture;\n" +
75 | "void main() {\n" +
76 | " lowp vec4 c2 = texture2D(sTexture, vTextureCoord);\n" +
77 | " lowp vec4 c1 = texture2D(oTexture, vTextureCoord);\n" +
78 | " lowp vec4 outputColor;\n" +
79 | " outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a);\n" +
80 | " outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a);\n" +
81 | " outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a);\n" +
82 | " outputColor.a = c1.a + c2.a * (1.0 - c1.a);\n" +
83 | " gl_FragColor = outputColor;\n" +
84 | "}\n"
85 |
86 | }
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/GLImageOverlay.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | import android.graphics.*
4 | import android.media.ExifInterface
5 | import android.util.Size
6 | import java.io.File
7 | import java.io.IOException
8 |
9 | internal class GLImageOverlay(
10 | private val path: String,
11 | size: Size,
12 | bitmapCreateComplete: () -> Unit
13 | ) : GLOverlayDraw() {
14 |
15 | private val width: Int
16 | private val height: Int
17 | private var drawBitmap: Bitmap? = null
18 | private var originalBitmap: Bitmap? = null
19 |
20 | private val matrix: Matrix
21 | private val paintFlagsDrawFilter: PaintFlagsDrawFilter
22 | private val bitmapPaint: Paint
23 |
24 | init {
25 | inputResolution = size
26 | val options = BitmapFactory.Options()
27 | options.inJustDecodeBounds = true
28 | BitmapFactory.decodeFile(path, options)
29 | options.inScaled = false
30 | options.inSampleSize = calculateInSampleSize(options, inputResolution.width, inputResolution.height)
31 | options.inPreferredConfig = Bitmap.Config.ARGB_8888
32 | options.inJustDecodeBounds = false
33 | options.inDither = false
34 |
35 | width = options.outWidth
36 | height = options.outHeight
37 |
38 | options.inJustDecodeBounds = false
39 | matrix = getRotatedMatrix(File(path), Matrix())
40 | paintFlagsDrawFilter = PaintFlagsDrawFilter(0, 2)
41 |
42 | bitmapPaint = Paint()
43 | bitmapPaint.isFilterBitmap = true
44 |
45 | Thread {
46 | originalBitmap = BitmapFactory.decodeFile(path)
47 | drawBitmap = Bitmap.createBitmap(originalBitmap, 0, 0, width, height, matrix, true)
48 | bitmapCreateComplete()
49 | }.start()
50 |
51 | }
52 |
53 | override fun drawCanvas(canvas: Canvas) {
54 | val scale = getScale(canvas)
55 |
56 | canvas.save()
57 | canvas.scale(scale, scale, (canvas.width / 2).toFloat(), (canvas.height / 2).toFloat())
58 | canvas.drawFilter = paintFlagsDrawFilter
59 | if (drawBitmap?.isRecycled == true) {
60 | originalBitmap = BitmapFactory.decodeFile(path)
61 | drawBitmap = Bitmap.createBitmap(originalBitmap, 0, 0, width, height, matrix, true)
62 | }
63 | drawBitmap?.let {
64 | canvas.drawBitmap(it, ((canvas.width - it.width) / 2).toFloat(), ((canvas.height - it.height) / 2).toFloat(), bitmapPaint)
65 | }
66 | canvas.restore()
67 | }
68 |
69 | override fun release() {
70 | if (drawBitmap?.isRecycled == false) {
71 | drawBitmap?.recycle()
72 | }
73 | if (originalBitmap?.isRecycled == false) {
74 | originalBitmap?.recycle()
75 | }
76 |
77 | }
78 |
79 | private fun getScale(canvas: Canvas): Float {
80 | drawBitmap?.let {
81 | if (canvas.width == canvas.height) {
82 |
83 | if (it.width <= it.height) {
84 | return canvas.width.toFloat() / it.width
85 | } else {
86 | return canvas.height.toFloat() / it.height
87 | }
88 |
89 | } else if (canvas.width > canvas.height) {
90 | return canvas.width.toFloat() / it.width
91 | } else {
92 | return canvas.height.toFloat() / it.height
93 | }
94 |
95 | }
96 |
97 | return 1f
98 | }
99 |
100 | private fun calculateInSampleSize(options: BitmapFactory.Options, reqWidth: Int, reqHeight: Int): Int {
101 | // Raw height and width of image
102 | val height = options.outHeight
103 | val width = options.outWidth
104 | var inSampleSize = 1
105 |
106 | if (height > reqHeight || width > reqWidth) {
107 | if (width > height) {
108 | inSampleSize = Math.round(height.toFloat() / reqHeight.toFloat())
109 | } else {
110 | inSampleSize = Math.round(width.toFloat() / reqWidth.toFloat())
111 | }
112 | }
113 |
114 | if (inSampleSize <= 0) {
115 | inSampleSize = 1
116 | }
117 |
118 | return inSampleSize
119 | }
120 |
121 | private fun getRotatedMatrix(file: File, matrix: Matrix): Matrix {
122 | var exifInterface: ExifInterface? = null
123 |
124 | try {
125 | exifInterface = ExifInterface(file.path)
126 | } catch (e: IOException) {
127 | e.printStackTrace()
128 | return matrix
129 | }
130 |
131 | val exifOrientation = exifInterface.getAttributeInt(
132 | ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED
133 | )
134 |
135 | when (exifOrientation) {
136 | ExifInterface.ORIENTATION_FLIP_HORIZONTAL -> matrix.setScale(-1f, 1f)
137 | ExifInterface.ORIENTATION_ROTATE_180 -> matrix.setRotate(180f)
138 | ExifInterface.ORIENTATION_FLIP_VERTICAL -> {
139 | matrix.setRotate(180f)
140 | matrix.postScale(-1f, 1f)
141 | }
142 | ExifInterface.ORIENTATION_TRANSPOSE -> {
143 | matrix.setRotate(90f)
144 | matrix.postScale(-1f, 1f)
145 | }
146 | ExifInterface.ORIENTATION_ROTATE_90 -> matrix.setRotate(90f)
147 | ExifInterface.ORIENTATION_TRANSVERSE -> {
148 | matrix.setRotate(-90f)
149 | matrix.postScale(-1f, 1f)
150 | }
151 | ExifInterface.ORIENTATION_ROTATE_270 -> matrix.setRotate(-90f)
152 | }// Do nothing.
153 | return matrix
154 | }
155 |
156 | }
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/GLOverlayDraw.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | import android.graphics.Bitmap
4 | import android.graphics.Canvas
5 | import android.graphics.Color
6 | import android.opengl.GLES20
7 | import android.opengl.GLUtils
8 | import android.util.Size
9 |
10 | internal open class GLOverlayDraw : GLDraw(GLHelper.DEFAULT_VERTEX_SHADER, GLHelper.OVERLAY_FRAGMENT_SHADER) {
11 | private val textures = IntArray(1)
12 |
13 | private var bitmap: Bitmap? = null
14 |
15 | var inputResolution = Size(720, 720)
16 |
17 | private fun createBitmap() {
18 | if (bitmap == null || bitmap!!.width != inputResolution.width || bitmap!!.height != inputResolution.height) {
19 | // BitmapUtil.releaseBitmap(bitmap);
20 | bitmap = Bitmap.createBitmap(inputResolution.width, inputResolution.height, Bitmap.Config.ARGB_8888)
21 | }
22 | }
23 |
24 | override fun setUpSurface() {
25 | super.setUpSurface()// 1
26 | GLES20.glGenTextures(1, textures, 0)
27 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0])
28 |
29 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
30 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
31 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
32 | GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
33 |
34 | getHandle("oTexture")
35 | createBitmap()
36 | }
37 |
38 | override fun onDraw() {
39 | createBitmap()
40 |
41 | bitmap?.let {
42 | it.eraseColor(Color.argb(0, 0, 0, 0))
43 | val bitmapCanvas = Canvas(it)
44 | drawCanvas(bitmapCanvas)
45 | }
46 |
47 | val offsetDepthMapTextureUniform = getHandle("oTexture")// 3
48 |
49 | GLES20.glActiveTexture(GLES20.GL_TEXTURE3)
50 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0])
51 |
52 | if (bitmap != null && !bitmap!!.isRecycled) {
53 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, bitmap, 0)
54 | }
55 |
56 | GLES20.glUniform1i(offsetDepthMapTextureUniform, 3)
57 | }
58 |
59 | open fun drawCanvas(canvas: Canvas) {}
60 |
61 | }
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/GLThread.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | import android.graphics.SurfaceTexture
4 | import android.opengl.*
5 | import android.util.Log
6 | import android.util.Size
7 | import android.view.Surface
8 |
9 | internal class GLThread(
10 | private val surface: Surface,
11 | private val glImageOverlay: GLImageOverlay,
12 | private val size: Size,
13 | private val onDrawListener: () -> Unit
14 | ) : Thread() {
15 |
16 | companion object {
17 | private const val TAG = "GLThread"
18 | private const val EGL_RECORDABLE_ANDROID = 0x3142
19 | }
20 |
21 | private var threadFinish: Boolean = false
22 |
23 | private var eglDisplay: EGLDisplay? = EGL14.EGL_NO_DISPLAY
24 | private var eglContext: EGLContext? = EGL14.EGL_NO_CONTEXT
25 | private var eglSurface: EGLSurface? = EGL14.EGL_NO_SURFACE
26 |
27 | private val MVPMatrix = FloatArray(16)
28 | private val STMatrix = FloatArray(16)
29 |
30 | private lateinit var surfaceTexture: SurfaceTexture
31 |
32 | override fun run() {
33 | super.run()
34 | if (!initGL()) {
35 | Log.e(TAG, "Failed OpenGL initialize")
36 | threadFinish = true
37 | }
38 |
39 | glImageOverlay.setUpSurface()
40 | GLES20.glViewport(0, 0, size.width, size.height)
41 | surfaceTexture = SurfaceTexture(glImageOverlay.textureID)
42 |
43 | Matrix.setIdentityM(STMatrix, 0)
44 |
45 | while (!threadFinish) {
46 | drawImage()
47 | onDrawListener()
48 | }
49 | release()
50 | }
51 |
52 | private fun initGL(): Boolean {
53 | eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
54 | if (eglDisplay === EGL14.EGL_NO_DISPLAY) {
55 | throw RuntimeException("unable to get EGL14 display")
56 | }
57 | val version = IntArray(2)
58 | if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
59 | eglDisplay = null
60 | throw RuntimeException("unable to initialize EGL14")
61 | }
62 | // Configure EGL for recordable and OpenGL ES 2.0. We want enough RGB bits
63 | // to minimize artifacts from possible YUV conversion.
64 | val attribList = intArrayOf(EGL14.EGL_RED_SIZE, 8, EGL14.EGL_GREEN_SIZE, 8, EGL14.EGL_BLUE_SIZE, 8, EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, EGL_RECORDABLE_ANDROID, 1, EGL14.EGL_NONE)
65 | val configs = arrayOfNulls(1)
66 | val numConfigs = IntArray(1)
67 | if (!EGL14.eglChooseConfig(
68 | eglDisplay, attribList, 0, configs, 0, configs.size, numConfigs, 0
69 | )) {
70 | throw RuntimeException("unable to find RGB888+recordable ES2 EGL config")
71 | }
72 | // Configure context for OpenGL ES 2.0.
73 | val attribList2 = intArrayOf(EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE)
74 | eglContext = EGL14.eglCreateContext(
75 | eglDisplay, configs[0], EGL14.EGL_NO_CONTEXT, attribList2, 0
76 | )
77 | checkEglError("eglCreateContext")
78 | if (eglContext == null) {
79 | throw RuntimeException("null context")
80 | }
81 | // Create a window surface, and attach it to the Surface we received.
82 | val surfaceAttribs = intArrayOf(EGL14.EGL_NONE)
83 | eglSurface = EGL14.eglCreateWindowSurface(
84 | eglDisplay, configs[0], surface, surfaceAttribs, 0
85 | )
86 | checkEglError("eglCreateWindowSurface")
87 | if (eglSurface == null) {
88 | throw RuntimeException("surface was null")
89 | }
90 |
91 | if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
92 | throw RuntimeException("eglMakeCurrent failed")
93 | }
94 |
95 | return true
96 |
97 | }
98 |
99 | private fun drawImage() {
100 | Matrix.setIdentityM(MVPMatrix, 0)
101 | Matrix.scaleM(MVPMatrix, 0, 1f, -1f, 1f)
102 | glImageOverlay.draw(surfaceTexture, STMatrix, MVPMatrix)
103 |
104 | EGL14.eglSwapBuffers(eglDisplay, eglSurface)
105 | }
106 |
107 | fun requestExitAndWait() {
108 | synchronized(this) {
109 | threadFinish = true
110 | }
111 | try {
112 | join()
113 | } catch (e: InterruptedException) {
114 | Log.e(TAG, e.message, e)
115 | Thread.currentThread().interrupt()
116 | }
117 | }
118 |
119 | private fun release() {
120 | if (eglDisplay !== EGL14.EGL_NO_DISPLAY) {
121 | EGL14.eglDestroySurface(eglDisplay, eglSurface)
122 | EGL14.eglDestroyContext(eglDisplay, eglContext)
123 | EGL14.eglReleaseThread()
124 | EGL14.eglTerminate(eglDisplay)
125 | }
126 | surface.release()
127 | eglDisplay = EGL14.EGL_NO_DISPLAY
128 | eglContext = EGL14.EGL_NO_CONTEXT
129 | eglSurface = EGL14.EGL_NO_SURFACE
130 | glImageOverlay.release()
131 |
132 | }
133 |
134 | /**
135 | * Checks for EGL errors.
136 | */
137 | private fun checkEglError(msg: String) {
138 | if (EGL14.eglGetError() != EGL14.EGL_SUCCESS) {
139 | throw RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(EGL14.eglGetError()))
140 | }
141 | }
142 |
143 | }
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/ImageToVideoConverter.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | import android.util.Size
4 | import java.util.concurrent.TimeUnit
5 |
6 | class ImageToVideoConverter(
7 | outputPath: String,
8 | inputImagePath: String,
9 | private val listener: EncodeListener? = null,
10 | private val size: Size = Size(720, 720),
11 | duration: Long = TimeUnit.SECONDS.toMicros(4)
12 | ) {
13 |
14 |
15 | private var imageCreateFinish = false
16 | private var startCall = false
17 |
18 |
19 | private var glThread: GLThread? = null
20 | private var muxer: MediaMuxerCaptureWrapper? = null
21 | private val drawer = GLImageOverlay(inputImagePath, size) {
22 | imageCreateFinish = true
23 | startAction()
24 | }
25 |
26 | init {
27 | muxer = MediaMuxerCaptureWrapper(outputPath, duration, listener) {
28 | stop()
29 | }
30 | }
31 |
32 | fun start() {
33 | startCall = true
34 | startAction()
35 | }
36 |
37 | private fun startAction() {
38 | if (startCall && imageCreateFinish) {
39 | try {
40 | val encoder = VideoEncoder(size, muxer) { listener?.onCompleted() }
41 | muxer?.prepare()
42 | glThread = GLThread(encoder.surface, drawer, size) {
43 | encoder.frameAvailableSoon()
44 | }
45 | glThread?.start()
46 | muxer?.startRecording()
47 | } catch (e: Exception) {
48 | listener?.onFailed(e)
49 | }
50 | }
51 | }
52 |
53 | fun stop() {
54 | muxer?.stopRecording()
55 | muxer = null
56 |
57 | glThread?.requestExitAndWait()
58 | glThread = null
59 |
60 | }
61 |
62 | }
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/MediaMuxerCaptureWrapper.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaFormat
5 | import android.media.MediaMuxer
6 | import android.util.Log
7 | import java.io.IOException
8 | import java.nio.ByteBuffer
9 |
10 | internal class MediaMuxerCaptureWrapper @Throws(IOException::class)
11 | constructor(filePath: String,
12 | private val duration: Long,
13 | private val listener: EncodeListener?,
14 | private val overDurationListener: () -> Unit
15 | ) {
16 |
17 | companion object {
18 | private const val TAG = "MediaMuxerWrapper"
19 | }
20 |
21 | val lock = java.lang.Object()
22 | private val mediaMuxer: MediaMuxer = MediaMuxer(filePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
23 | private var encoderCount: Int = 0
24 | private var startedCount: Int = 0
25 | private var startTimeUs: Long = 0
26 | @get:Synchronized
27 | var isStarted: Boolean = false
28 | private set
29 | private var videoEncoder: VideoEncoder? = null
30 | private var preventAudioPresentationTimeUs: Long = -1
31 |
32 | init {
33 | startedCount = 0
34 | encoderCount = startedCount
35 | isStarted = false
36 |
37 | }
38 |
39 | @Throws(IOException::class)
40 | fun prepare() {
41 | videoEncoder?.prepare()
42 | }
43 |
44 | fun startRecording() {
45 | videoEncoder?.startRecording()
46 | }
47 |
48 | fun stopRecording() {
49 | videoEncoder?.stopRecording()
50 | videoEncoder = null
51 | }
52 |
53 | //**********************************************************************
54 | //**********************************************************************
55 |
56 | /**
57 | * assign encoder to this calss. this is called from encoder.
58 | *
59 | * @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
60 | */
61 | internal fun addEncoder(encoder: VideoEncoder) {
62 | encoderCount = 1
63 | videoEncoder = encoder
64 | }
65 |
66 | /**
67 | * request start recording from encoder
68 | *
69 | * @return true when muxer is ready to write
70 | */
71 | @Synchronized
72 | internal fun start(): Boolean {
73 | Log.v(TAG, "start:")
74 | startedCount++
75 | if (encoderCount > 0 && startedCount == encoderCount) {
76 | mediaMuxer.start()
77 | isStarted = true
78 | synchronized(lock) {
79 | lock.notifyAll()
80 | }
81 | Log.v(TAG, "MediaMuxer started:")
82 | }
83 | return isStarted
84 | }
85 |
86 | /**
87 | * request stopEncode recording from encoder when encoder received EOS
88 | */
89 | /*package*/
90 | @Synchronized
91 | internal fun stop() {
92 | Log.v(TAG, "stopEncode:startedCount=$startedCount")
93 | startedCount--
94 | if (encoderCount > 0 && startedCount <= 0) {
95 | mediaMuxer.stop()
96 | mediaMuxer.release()
97 | isStarted = false
98 | Log.v(TAG, "MediaMuxer stopped:")
99 | }
100 | }
101 |
102 | /**
103 | * assign encoder to muxer
104 | *
105 | * @param format
106 | * @return minus value indicate error
107 | */
108 | @Synchronized
109 | internal fun addTrack(format: MediaFormat): Int {
110 | if (isStarted) {
111 | throw IllegalStateException("muxer already started")
112 | }
113 |
114 | val trackIx = mediaMuxer.addTrack(format)
115 | Log.i(TAG, "addTrack:trackNum=$encoderCount,trackIx=$trackIx,format=$format")
116 |
117 | return trackIx
118 | }
119 |
120 | /**
121 | * write encoded data to muxer
122 | *
123 | * @param trackIndex
124 | * @param byteBuf
125 | * @param bufferInfo
126 | */
127 | /*package*/
128 | @Synchronized
129 | internal fun writeSampleData(trackIndex: Int, byteBuf: ByteBuffer, bufferInfo: MediaCodec.BufferInfo) {
130 | //bufferInfo.presentationTimeUs
131 | if (startedCount <= 0) return
132 |
133 | if (startTimeUs == 0L) {
134 | startTimeUs = bufferInfo.presentationTimeUs
135 | }
136 |
137 | if (preventAudioPresentationTimeUs < bufferInfo.presentationTimeUs) {
138 | mediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo)
139 | preventAudioPresentationTimeUs = bufferInfo.presentationTimeUs
140 |
141 | val progress = preventAudioPresentationTimeUs - startTimeUs
142 | listener?.onProgress(progress / duration.toFloat())
143 | if (duration <= progress) {
144 | overDurationListener()
145 | }
146 | }
147 |
148 | }
149 |
150 |
151 | }
152 |
153 |
--------------------------------------------------------------------------------
/imagetovideo/src/main/java/com/daasuu/imagetovideo/VideoEncoder.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideo
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaCodecInfo
5 | import android.media.MediaCodecList
6 | import android.media.MediaFormat
7 | import android.util.Log
8 | import android.util.Size
9 | import android.view.Surface
10 | import java.io.IOException
11 | import java.lang.ref.WeakReference
12 |
13 | internal class VideoEncoder(
14 | private val size: Size,
15 | muxer: MediaMuxerCaptureWrapper?,
16 | private val onCompleteListener: () -> Unit
17 |
18 | ) : Runnable {
19 |
20 | companion object {
21 | private const val MIME_TYPE = "video/avc"
22 | private const val FRAME_RATE = 30
23 | private const val BPP = 0.25f
24 | private const val TAG = "VideoEncoder"
25 | private const val TIMEOUT_USEC = 10000 // 10[msec]
26 | }
27 |
28 | lateinit var surface: Surface
29 |
30 | private val sync = java.lang.Object()
31 | /**
32 | * Flag that indicate this encoder is capturing now.
33 | */
34 | private var isCapturing: Boolean = false
35 | /**
36 | * Flag that indicate the frame data will be available soon.
37 | */
38 | private var requestDrain: Int = 0
39 | /**
40 | * Flag to request stopEncode capturing
41 | */
42 | private var requestStop: Boolean = false
43 | /**
44 | * Flag that indicate encoder received EOS(End Of Stream)
45 | */
46 | private var isEOS: Boolean = false
47 | /**
48 | * Flag the indicate the muxer is running
49 | */
50 | private var muxerStarted: Boolean = false
51 | /**
52 | * Track Number
53 | */
54 | private var trackIndex: Int = 0
55 | /**
56 | * MediaCodec instance for encoding
57 | */
58 | private var mediaCodec: MediaCodec? = null
59 | /**
60 | * Weak refarence of MediaMuxerWarapper instance
61 | */
62 | private val weakMuxer: WeakReference?
63 | /**
64 | * BufferInfo instance for dequeuing
65 | */
66 | private var bufferInfo: MediaCodec.BufferInfo? = null
67 |
68 | /**
69 | * previous presentationTimeUs for writing
70 | */
71 | private var prevOutputPTSUs: Long = 0
72 |
73 | /**
74 | * get next encoding presentationTimeUs
75 | *
76 | * @return
77 | */
78 | // presentationTimeUs should be monotonic
79 | // otherwise muxer fail to write
80 | val ptsUs: Long
81 | get() {
82 | var result = System.nanoTime() / 1000L
83 | if (result < prevOutputPTSUs)
84 | result = prevOutputPTSUs - result + result
85 | return result
86 | }
87 |
88 | init {
89 | if (muxer == null) throw NullPointerException("MediaMuxerCaptureWrapper is null")
90 | weakMuxer = WeakReference(muxer)
91 | muxer.addEncoder(this)
92 | synchronized(sync) {
93 | // create BufferInfo here for effectiveness(to reduce GC)
94 | bufferInfo = MediaCodec.BufferInfo()
95 | // wait for starting thread
96 | Thread(this, javaClass.simpleName).start()
97 | try {
98 | sync.wait()
99 | } catch (e: InterruptedException) {
100 | }
101 |
102 | }
103 | }
104 |
105 | /**
106 | * the method to indicate frame data is soon available or already available
107 | *
108 | * @return return true if encoder is ready to encod.
109 | */
110 | fun frameAvailableSoon(): Boolean {
111 | synchronized(sync) {
112 | if (!isCapturing || requestStop) {
113 | return false
114 | }
115 | requestDrain++
116 | sync.notifyAll()
117 | }
118 | return true
119 | }
120 |
121 | /**
122 | * encoding loop on private thread
123 | */
124 | override fun run() {
125 | // android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
126 | synchronized(sync) {
127 | requestStop = false
128 | requestDrain = 0
129 | sync.notify()
130 | }
131 | val isRunning = true
132 | var localRequestStop = false
133 | var localRequestDrain = false
134 | while (isRunning) {
135 | synchronized(sync) {
136 | localRequestStop = requestStop
137 | localRequestDrain = requestDrain > 0
138 | if (localRequestDrain)
139 | requestDrain--
140 | }
141 | if (localRequestStop) {
142 | drain()
143 | // request stopEncode recording
144 | signalEndOfInputStream()
145 | // process output data again for EOS signale
146 | drain()
147 | // release all related objects
148 | release()
149 | break
150 | }
151 | if (localRequestDrain) {
152 | drain()
153 | } else {
154 | var error = false
155 | synchronized(sync) {
156 | try {
157 | sync.wait()
158 | } catch (e: InterruptedException) {
159 | error = true
160 | }
161 |
162 | }
163 | if (error) {
164 | break
165 | }
166 | }
167 | } // end of while
168 | Log.d(TAG, "Encoder thread exiting")
169 | synchronized(sync) {
170 | requestStop = true
171 | isCapturing = false
172 | }
173 | onCompleteListener()
174 | }
175 |
176 | /*
177 | * prepareing method for each sub class
178 | * this method should be implemented in sub class, so set this as abstract method
179 | * @throws IOException
180 | */
181 | @Throws(IOException::class)
182 | fun prepare() {
183 | trackIndex = -1
184 | isEOS = false
185 | muxerStarted = isEOS
186 |
187 | val videoCodecInfo = selectVideoCodec(MIME_TYPE)
188 |
189 | if (videoCodecInfo == null) {
190 | Log.e(TAG, "Unable to find an appropriate codec for ${MIME_TYPE}")
191 | return
192 | }
193 | Log.i(TAG, "selected codec: " + videoCodecInfo.name)
194 |
195 | val format = MediaFormat.createVideoFormat(MIME_TYPE, size.width, size.height)
196 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface)
197 | format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate(size.width, size.height))
198 | format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE)
199 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3)
200 | Log.i(TAG, "format: $format")
201 |
202 | mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE)
203 | mediaCodec?.let {
204 | it.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
205 | // get Surface for encoder input
206 | // this method only can call between #configure and #start
207 | surface = it.createInputSurface()
208 | it.start()
209 | Log.i(TAG, "prepare finishing")
210 |
211 | }
212 | }
213 |
214 | fun startRecording() {
215 | Log.v(TAG, "startRecording")
216 | synchronized(sync) {
217 | isCapturing = true
218 | requestStop = false
219 | sync.notifyAll()
220 | }
221 | }
222 |
223 | /**
224 | * the method to request stopEncode encoding
225 | */
226 | fun stopRecording() {
227 | Log.v(TAG, "stopRecording")
228 | synchronized(sync) {
229 | if (!isCapturing || requestStop) {
230 | return
231 | }
232 | requestStop = true // for rejecting newer frame
233 | sync.notifyAll()
234 | // We can not know when the encoding and writing finish.
235 | // so we return immediately after request to avoid delay of caller thread
236 | }
237 | }
238 |
239 | //********************************************************************************
240 | //********************************************************************************
241 |
242 | /**
243 | * Release all releated objects
244 | */
245 | protected fun release() {
246 | Log.d(TAG, "release:")
247 |
248 | isCapturing = false
249 | mediaCodec?.let {
250 | try {
251 | it.stop()
252 | it.release()
253 | mediaCodec = null
254 | } catch (e: Exception) {
255 | Log.e(TAG, "failed releasing MediaCodec", e)
256 | }
257 | }
258 | if (muxerStarted) {
259 | weakMuxer?.get()
260 | ?.let {
261 | try {
262 | it.stop()
263 | } catch (e: Exception) {
264 | Log.e(TAG, "failed stopping muxer", e)
265 | }
266 | }
267 | }
268 | bufferInfo = null
269 | }
270 |
271 | private fun signalEndOfInputStream() {
272 | Log.d(TAG, "sending EOS to encoder")
273 | mediaCodec?.signalEndOfInputStream() // API >= 18
274 | isEOS = true
275 | }
276 |
277 | /**
278 | * drain encoded data and write them to muxer
279 | */
280 | private fun drain() {
281 | mediaCodec?.let {
282 | var encoderStatus: Int
283 | var count = 0
284 | val muxer = weakMuxer?.get()
285 | val buffer = bufferInfo
286 | if (muxer == null || buffer == null) {
287 | Log.w(TAG, "muxer or bufferInfo is unexpectedly null")
288 | return
289 | }
290 | LOOP@ while (isCapturing) {
291 | // get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
292 | encoderStatus = it.dequeueOutputBuffer(buffer, TIMEOUT_USEC.toLong())
293 | if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
294 | // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
295 | if (!isEOS) {
296 | if (++count > 5)
297 | break@LOOP // out of while
298 | }
299 | } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
300 | Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED")
301 | // this status indicate the output format of codec is changed
302 | // this should come only once before actual encoded data
303 | // but this status never come on Android4.3 or less
304 | // and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
305 | if (muxerStarted) { // second time request is error
306 | throw RuntimeException("format changed twice")
307 | }
308 | // get output format from codec and pass them to muxer
309 | // getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
310 | val format = it.outputFormat // API >= 16
311 | trackIndex = muxer.addTrack(format)
312 | muxerStarted = true
313 | if (!muxer.start()) {
314 | // we should wait until muxer is ready
315 | synchronized(muxer) {
316 | while (!muxer.isStarted)
317 | try {
318 | muxer.lock.wait(100)
319 | } catch (e: InterruptedException) {
320 | break
321 | }
322 |
323 | }
324 | }
325 | } else if (encoderStatus < 0) {
326 | // unexpected status
327 | Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: $encoderStatus")
328 | } else {
329 | val encodedData = it.getOutputBuffer(encoderStatus)
330 | ?: // this never should come...may be a MediaCodec internal error
331 | throw RuntimeException("encoderOutputBuffer $encoderStatus was null")
332 | if (buffer.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
333 | // You shoud set output format to muxer here when you target Android4.3 or less
334 | // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
335 | // therefor we should expand and prepare output format from buffer data.
336 | // This sample is for API>=18(>=Android 4.3), just ignore this flag here
337 | Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG")
338 | buffer.size = 0
339 | }
340 |
341 | if (buffer.size != 0) {
342 | // encoded data is ready, clear waiting counter
343 | count = 0
344 | if (!muxerStarted) {
345 | // muxer is not ready...this will prrograming failure.
346 | throw RuntimeException("drain:muxer hasn't started")
347 | }
348 | // write encoded data to muxer(need to adjust presentationTimeUs.
349 | buffer.presentationTimeUs = ptsUs
350 | muxer.writeSampleData(trackIndex, encodedData, buffer)
351 | prevOutputPTSUs = buffer.presentationTimeUs
352 | }
353 | // return buffer to encoder
354 | it.releaseOutputBuffer(encoderStatus, false)
355 | if (buffer.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
356 | // when EOS come.
357 | isCapturing = false
358 | break // out of while
359 | }
360 | }
361 | }
362 | }
363 | }
364 |
365 |
366 | private fun calcBitRate(width: Int, height: Int): Int {
367 | val bitrate = (BPP * FRAME_RATE.toFloat() * width.toFloat() * height.toFloat()).toInt()
368 | Log.i(TAG, "bitrate=$bitrate")
369 | return bitrate
370 | }
371 |
372 |
373 | /**
374 | * select the first codec that match a specific MIME type
375 | *
376 | * @param mimeType
377 | * @return null if no codec matched
378 | */
379 | private fun selectVideoCodec(mimeType: String): MediaCodecInfo? {
380 | Log.v(TAG, "selectVideoCodec:")
381 |
382 | // get the list of available codecs
383 | val list = MediaCodecList(MediaCodecList.ALL_CODECS)
384 | val codecInfos = list.codecInfos
385 |
386 | val numCodecs = codecInfos.size
387 | for (i in 0 until numCodecs) {
388 | val codecInfo = codecInfos[i]
389 |
390 | if (!codecInfo.isEncoder) { // skipp decoder
391 | continue
392 | }
393 | // select first codec that match a specific MIME type and color format
394 | val types = codecInfo.supportedTypes
395 | for (j in types.indices) {
396 | if (types[j].equals(mimeType, ignoreCase = true)) {
397 | Log.i(TAG, "codec:" + codecInfo.name + ",MIME=" + types[j])
398 | val format = selectColorFormat(codecInfo, mimeType)
399 | if (format > 0) {
400 | return codecInfo
401 | }
402 | }
403 | }
404 | }
405 | return null
406 | }
407 |
408 | /**
409 | * select color format available on specific codec and we can use.
410 | *
411 | * @return 0 if no colorFormat is matched
412 | */
413 | private fun selectColorFormat(codecInfo: MediaCodecInfo, mimeType: String): Int {
414 | Log.i(TAG, "selectColorFormat: ")
415 | var result = 0
416 | val caps: MediaCodecInfo.CodecCapabilities
417 | try {
418 | Thread.currentThread()
419 | .priority = Thread.MAX_PRIORITY
420 | caps = codecInfo.getCapabilitiesForType(mimeType)
421 | } finally {
422 | Thread.currentThread()
423 | .priority = Thread.NORM_PRIORITY
424 | }
425 | var colorFormat: Int
426 | for (i in caps.colorFormats.indices) {
427 | colorFormat = caps.colorFormats[i]
428 | if (isRecognizedViewoFormat(colorFormat)) {
429 | if (result == 0)
430 | result = colorFormat
431 | break
432 | }
433 | }
434 | if (result == 0)
435 | Log.e(TAG, "couldn't find a good color format for " + codecInfo.name + " / " + mimeType)
436 | return result
437 | }
438 |
439 | private fun isRecognizedViewoFormat(colorFormat: Int): Boolean {
440 | Log.i(TAG, "isRecognizedViewoFormat:colorFormat=$colorFormat")
441 | return colorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
442 | }
443 | }
444 |
445 |
--------------------------------------------------------------------------------
/imagetovideo/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | ImageToVideo
3 |
4 |
--------------------------------------------------------------------------------
/sample/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/sample/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'kotlin-android'
3 |
4 | android {
5 | compileSdkVersion COMPILE_SDK_VERSION as int
6 | defaultConfig {
7 | applicationId "com.daasuu.imagetovideoandroid"
8 | minSdkVersion COMPILE_MIN_SDK_VERSION as int
9 | targetSdkVersion COMPILE_SDK_VERSION as int
10 | versionCode VERSION_CODE as int
11 | versionName VERSION_NAME
12 |
13 |
14 |
15 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
16 | }
17 | buildTypes {
18 | release {
19 | minifyEnabled false
20 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
21 | }
22 | }
23 | }
24 |
25 | dependencies {
26 | implementation fileTree(dir: 'libs', include: ['*.jar'])
27 | implementation 'com.android.support:appcompat-v7:28.0.0-rc02'
28 | implementation 'com.android.support.constraint:constraint-layout:1.1.3'
29 | testImplementation 'junit:junit:4.12'
30 | androidTestImplementation 'com.android.support.test:runner:1.0.2'
31 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
32 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
33 | implementation project(':imagetovideo')
34 |
35 | implementation 'com.github.bumptech.glide:glide:4.7.1'
36 |
37 | }
38 | repositories {
39 | mavenCentral()
40 | }
41 |
42 | apply plugin: 'kotlin-android-extensions'
43 |
--------------------------------------------------------------------------------
/sample/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/sample/src/androidTest/java/com/daasuu/imagetovideoandroid/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideoandroid;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumented test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("com.daasuu.imagetovideoandroid", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/sample/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
12 |
13 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/sample/src/main/java/com/daasuu/imagetovideoandroid/ImageListAdapter.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideoandroid
2 |
3 | import android.content.Context
4 | import android.view.LayoutInflater
5 | import android.view.View
6 | import android.view.ViewGroup
7 | import android.widget.ArrayAdapter
8 | import android.widget.ImageView
9 | import android.widget.TextView
10 | import com.bumptech.glide.Glide
11 |
12 | internal class ImageListAdapter(context: Context, resource: Int, objects: List) : ArrayAdapter(context, resource, objects) {
13 |
14 | private val layoutInflater = context.getSystemService(Context.LAYOUT_INFLATER_SERVICE) as LayoutInflater
15 |
16 | override fun getView(position: Int, view: View?, parent: ViewGroup): View {
17 | var convertView = view
18 | val path = getItem(position)
19 |
20 | if (null == convertView) {
21 | convertView = layoutInflater.inflate(R.layout.row_image_list, null)
22 | }
23 |
24 | convertView?.let {
25 | val imageView = it.findViewById(R.id.image)
26 | val textView = it.findViewById(R.id.txt_image_name)
27 |
28 | textView.setText(path)
29 |
30 | Glide.with(context.applicationContext).load(path).into(imageView)
31 | }
32 |
33 |
34 | return convertView!!
35 | }
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/sample/src/main/java/com/daasuu/imagetovideoandroid/ImageLoader.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideoandroid
2 |
3 | import android.content.Context
4 | import android.os.Handler
5 | import android.os.Looper
6 | import android.provider.MediaStore
7 | import java.io.File
8 | import java.util.*
9 | import java.util.concurrent.ExecutorService
10 | import java.util.concurrent.Executors
11 |
12 | class ImageLoader(private val context: Context) {
13 | private var executorService: ExecutorService? = null
14 |
15 | fun loadDeviceVideos(listener: ImageLoadListener) {
16 | getExecutorService().execute(ImageLoader.ImageLoadRunnable(listener, context))
17 | }
18 |
19 | fun abortLoadVideos() {
20 | if (executorService != null) {
21 | executorService!!.shutdown()
22 | executorService = null
23 | }
24 | }
25 |
26 | private fun getExecutorService(): ExecutorService {
27 | if (executorService == null) {
28 | executorService = Executors.newSingleThreadExecutor()
29 | }
30 | return executorService!!
31 | }
32 |
33 | private class ImageLoadRunnable(private val listener: ImageLoadListener, private val context: Context) : Runnable {
34 | private val handler = Handler(Looper.getMainLooper())
35 |
36 | private val projection = arrayOf(MediaStore.Images.Media.DATA, MediaStore.Images.Media.DATE_MODIFIED)
37 |
38 | override fun run() {
39 | val cursor = context.contentResolver.query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, projection, null, null, MediaStore.Images.Media.DATE_ADDED)
40 |
41 | if (cursor == null) {
42 | handler.post { listener.onFailed(NullPointerException()) }
43 | return
44 | }
45 |
46 | val temp = ArrayList(cursor.count)
47 |
48 | if (cursor.moveToLast()) {
49 | do {
50 | val path = cursor.getString(cursor.getColumnIndex(projection[0])) ?: continue
51 | if (!path.endsWith("png") && !path.endsWith("PNG") && !path.endsWith("jpg") && !path.endsWith("JPEG") && !path.endsWith("JPG") && !path.endsWith("jpeg") && !path.endsWith("GIF") && !path.endsWith("gif") && !path.endsWith("webp") && !path.endsWith("WEBP")) {
52 | continue
53 | }
54 |
55 | var file: File? = File(path)
56 | if (file!!.exists()) {
57 | try {
58 | temp.add(path)
59 | } catch (e: Exception) {
60 | continue
61 | }
62 |
63 | }
64 | file = null
65 |
66 | } while (cursor.moveToPrevious())
67 | }
68 | cursor.close()
69 |
70 | handler.post { listener.onVideoLoaded(temp) }
71 | }
72 | }
73 |
74 | companion object {
75 |
76 | private val TAG = "VideoLoader"
77 | }
78 |
79 | }
--------------------------------------------------------------------------------
/sample/src/main/java/com/daasuu/imagetovideoandroid/ImageloadListener.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideoandroid
2 |
3 | interface ImageLoadListener {
4 |
5 | fun onVideoLoaded(imagePath: List)
6 |
7 | fun onFailed(e: Exception)
8 | }
9 |
--------------------------------------------------------------------------------
/sample/src/main/java/com/daasuu/imagetovideoandroid/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package com.daasuu.imagetovideoandroid
2 |
3 | import android.Manifest
4 | import android.annotation.SuppressLint
5 | import android.content.ContentValues
6 | import android.content.Context
7 | import android.content.Intent
8 | import android.content.pm.PackageManager
9 | import android.net.Uri
10 | import android.os.Bundle
11 | import android.os.Environment
12 | import android.provider.MediaStore
13 | import android.support.v7.app.AppCompatActivity
14 | import android.util.Log
15 | import android.util.Size
16 | import android.widget.Button
17 | import android.widget.ListView
18 | import android.widget.ProgressBar
19 | import android.widget.Toast
20 | import com.daasuu.imagetovideo.EncodeListener
21 | import com.daasuu.imagetovideo.ImageToVideoConverter
22 | import java.io.File
23 | import java.text.SimpleDateFormat
24 | import java.util.*
25 | import java.util.concurrent.TimeUnit
26 |
27 | class MainActivity : AppCompatActivity() {
28 |
29 | companion object {
30 | private const val PERMISSION_REQUEST_CODE = 88888
31 | }
32 |
33 | private var imageLoader: ImageLoader? = null
34 | private var imagePath: String? = null
35 | private var videoPath: String? = null
36 | private var imageToVideo: ImageToVideoConverter? = null
37 |
38 | override fun onCreate(savedInstanceState: Bundle?) {
39 | super.onCreate(savedInstanceState)
40 | setContentView(R.layout.activity_image_to_video)
41 | val progressBar = findViewById(R.id.progress_bar)
42 | progressBar.max = 100
43 |
44 | findViewById