├── .gitignore
├── .idea
├── compiler.xml
├── copyright
│ └── profiles_settings.xml
├── encodings.xml
├── gradle.xml
├── misc.xml
├── modules.xml
├── runConfigurations.xml
└── vcs.xml
├── README.md
├── app
├── .gitignore
├── CMakeLists.txt
├── ProgramInfo
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── com
│ │ └── example
│ │ └── cj
│ │ └── videoeditor
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── assets
│ │ ├── filter
│ │ │ ├── brannan_blowout.png
│ │ │ ├── brannan_contrast.png
│ │ │ ├── brannan_luma.png
│ │ │ ├── brannan_process.png
│ │ │ ├── brannan_screen.png
│ │ │ ├── edgeburn.png
│ │ │ ├── freud_rand.png
│ │ │ ├── hefemap.png
│ │ │ ├── hefemetal.png
│ │ │ ├── hefesoftlight.png
│ │ │ ├── hudsonbackground.png
│ │ │ ├── hudsonmap.png
│ │ │ ├── inkwellmap.png
│ │ │ ├── n1977blowout.png
│ │ │ ├── n1977map.png
│ │ │ ├── nashvillemap.png
│ │ │ └── overlaymap.png
│ │ └── shader
│ │ │ ├── base_fragment.sh
│ │ │ ├── base_record_fragment.sh
│ │ │ ├── base_record_vertex.sh
│ │ │ ├── base_vertex.sh
│ │ │ ├── oes_base_fragment.sh
│ │ │ └── oes_base_vertex.sh
│ ├── cpp
│ │ └── native-lib.cpp
│ ├── java
│ │ └── com
│ │ │ └── example
│ │ │ └── cj
│ │ │ └── videoeditor
│ │ │ ├── Constants.java
│ │ │ ├── MyApplication.java
│ │ │ ├── MyClassLoader.java
│ │ │ ├── activity
│ │ │ ├── AudioEditorActivity.java
│ │ │ ├── AudioMixActivity.java
│ │ │ ├── AudioPreviewActivity.java
│ │ │ ├── AudioSelectActivity.java
│ │ │ ├── BaseActivity.java
│ │ │ ├── MainActivity.java
│ │ │ ├── MediaSelectVideoActivity.java
│ │ │ ├── PreviewActivity.java
│ │ │ ├── RecordedActivity.java
│ │ │ ├── VideoConnectActivity.java
│ │ │ └── VideoSelectActivity.java
│ │ │ ├── adapter
│ │ │ ├── AudioAdapter.java
│ │ │ ├── VideoAdapter.java
│ │ │ └── VideoSelectAdapter.java
│ │ │ ├── bean
│ │ │ ├── AudioSettingInfo.java
│ │ │ ├── CutBean.java
│ │ │ ├── MediaDecode.java
│ │ │ └── Song.java
│ │ │ ├── camera
│ │ │ ├── CameraController.java
│ │ │ ├── ICamera.java
│ │ │ └── SensorController.java
│ │ │ ├── drawer
│ │ │ ├── CameraDrawer.java
│ │ │ ├── TextureRender.java
│ │ │ └── VideoDrawer.java
│ │ │ ├── filter
│ │ │ ├── AFilter.java
│ │ │ ├── CameraFilter.java
│ │ │ ├── GroupFilter.java
│ │ │ ├── NoFilter.java
│ │ │ ├── OesFilter.java
│ │ │ ├── ProcessFilter.java
│ │ │ ├── RotationOESFilter.java
│ │ │ └── WaterMarkFilter.java
│ │ │ ├── gpufilter
│ │ │ ├── SlideGpuFilterGroup.java
│ │ │ ├── basefilter
│ │ │ │ ├── GPUImageFilter.java
│ │ │ │ └── MagicCameraInputFilter.java
│ │ │ ├── filter
│ │ │ │ ├── MagicAntiqueFilter.java
│ │ │ │ ├── MagicBeautyFilter.java
│ │ │ │ ├── MagicBrannanFilter.java
│ │ │ │ ├── MagicCoolFilter.java
│ │ │ │ ├── MagicFreudFilter.java
│ │ │ │ ├── MagicHefeFilter.java
│ │ │ │ ├── MagicHudsonFilter.java
│ │ │ │ ├── MagicInkwellFilter.java
│ │ │ │ ├── MagicN1977Filter.java
│ │ │ │ └── MagicNashvilleFilter.java
│ │ │ ├── helper
│ │ │ │ ├── MagicFilterFactory.java
│ │ │ │ └── MagicFilterType.java
│ │ │ └── utils
│ │ │ │ ├── OpenGlUtils.java
│ │ │ │ ├── Rotation.java
│ │ │ │ └── TextureRotationUtil.java
│ │ │ ├── jni
│ │ │ └── AudioJniUtils.java
│ │ │ ├── media
│ │ │ ├── MediaCodecInfo.java
│ │ │ ├── MediaPlayerWrapper.java
│ │ │ └── VideoInfo.java
│ │ │ ├── mediacodec
│ │ │ ├── AudioCodec.java
│ │ │ ├── AudioDecodeRunnable.java
│ │ │ ├── AudioEncodeRunnable.java
│ │ │ ├── AudioRunnable.java
│ │ │ ├── InputSurface.java
│ │ │ ├── MediaMuxerRunnable.java
│ │ │ ├── OutputSurface.java
│ │ │ ├── OutputSurfaceTwo.java
│ │ │ ├── TextureRender.java
│ │ │ ├── VideoClipper.java
│ │ │ └── VideoRunnable.java
│ │ │ ├── record
│ │ │ ├── gles
│ │ │ │ ├── EglCore.java
│ │ │ │ └── EglSurfaceBase.java
│ │ │ └── video
│ │ │ │ ├── TextureMovieEncoder.java
│ │ │ │ ├── VideoEncoderCore.java
│ │ │ │ └── WindowSurface.java
│ │ │ ├── utils
│ │ │ ├── DateUtils.java
│ │ │ ├── DensityUtils.java
│ │ │ ├── EasyGlUtils.java
│ │ │ ├── MatrixUtils.java
│ │ │ ├── OpenGlUtils.java
│ │ │ └── TimeFormatUtils.java
│ │ │ └── widget
│ │ │ ├── CameraView.java
│ │ │ ├── CircularProgressView.java
│ │ │ ├── FocusImageView.java
│ │ │ ├── LoadingDialog.java
│ │ │ ├── TitleView.java
│ │ │ └── VideoPreviewView.java
│ └── res
│ │ ├── anim
│ │ └── focusview_show.xml
│ │ ├── drawable
│ │ ├── app_back_selector.xml
│ │ ├── bg_circle_yellow.xml
│ │ ├── bt_capture.xml
│ │ ├── btn_camera_beauty.xml
│ │ ├── btn_camera_filter.xml
│ │ ├── selector_beauty.xml
│ │ ├── selector_btn_image_choose.xml
│ │ ├── selector_rewardcamera.xml
│ │ ├── selector_title_bar_btn.xml
│ │ ├── setting_text_color_selector.xml
│ │ ├── t_progressbar.xml
│ │ └── toast_shape.xml
│ │ ├── layout
│ │ ├── activity_audio.xml
│ │ ├── activity_audio_mix.xml
│ │ ├── activity_audio_preview.xml
│ │ ├── activity_connect.xml
│ │ ├── activity_main.xml
│ │ ├── activity_media_select_video.xml
│ │ ├── activity_music_select.xml
│ │ ├── activity_recorde.xml
│ │ ├── activity_video_connect.xml
│ │ ├── activity_video_preview.xml
│ │ ├── activity_video_select.xml
│ │ ├── item_media_audio.xml
│ │ ├── item_media_video.xml
│ │ ├── item_video_select.xml
│ │ ├── t_loading.xml
│ │ └── view_titlebar.xml
│ │ ├── mipmap-hdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-mdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xhdpi
│ │ └── ic_launcher.png
│ │ ├── mipmap-xxhdpi
│ │ ├── btn_back_n.png
│ │ ├── btn_back_p.png
│ │ ├── btn_back_white_n.png
│ │ ├── editor_beauty_normal.png
│ │ ├── editor_beauty_pressed.png
│ │ ├── editor_img_confirm.png
│ │ ├── editor_img_def_video.png
│ │ ├── editor_record_back.png
│ │ ├── editor_record_close.png
│ │ ├── focus_focus_failed.9.png
│ │ ├── focus_focused.9.png
│ │ ├── focus_focusing.9.png
│ │ ├── ic_camera_beauty.png
│ │ ├── ic_camera_beauty_pressed.png
│ │ ├── ic_camera_filter.png
│ │ ├── ic_launcher.png
│ │ ├── icon_choice_nor.png
│ │ ├── icon_choice_selected.png
│ │ ├── img_aac.png
│ │ ├── img_mp3.png
│ │ ├── img_wma.png
│ │ ├── record_camera_switch_normal.png
│ │ ├── record_camera_switch_press.png
│ │ ├── t_loading_2.png
│ │ └── watermark.png
│ │ ├── mipmap-xxxhdpi
│ │ └── ic_launcher.png
│ │ ├── raw
│ │ ├── amaro.glsl
│ │ ├── antique.glsl
│ │ ├── beauty.glsl
│ │ ├── blackcat.glsl
│ │ ├── brannan.glsl
│ │ ├── brooklyn.glsl
│ │ ├── calm.glsl
│ │ ├── cool.glsl
│ │ ├── crayon.glsl
│ │ ├── default_fragment.glsl
│ │ ├── default_vertex.glsl
│ │ ├── earlybird.glsl
│ │ ├── emerald.glsl
│ │ ├── evergreen.glsl
│ │ ├── freud.glsl
│ │ ├── healthy.glsl
│ │ ├── hefe.glsl
│ │ ├── hudson.glsl
│ │ ├── inkwell.glsl
│ │ ├── kevin_new.glsl
│ │ ├── latte.glsl
│ │ ├── lomo.glsl
│ │ ├── n1977.glsl
│ │ ├── nashville.glsl
│ │ ├── nostalgia.glsl
│ │ ├── pixar.glsl
│ │ ├── rise.glsl
│ │ ├── romance.glsl
│ │ ├── sakura.glsl
│ │ ├── sierra.glsl
│ │ ├── sketch.glsl
│ │ ├── skinwhiten.glsl
│ │ ├── suger_tablets.glsl
│ │ ├── sunrise.glsl
│ │ ├── sunset.glsl
│ │ ├── sutro.glsl
│ │ ├── sweets.glsl
│ │ ├── tender.glsl
│ │ ├── toaster2_filter_shader.glsl
│ │ ├── valencia.glsl
│ │ ├── walden.glsl
│ │ ├── warm.glsl
│ │ ├── whitecat.glsl
│ │ └── xproii_filter_shader.glsl
│ │ ├── values-w820dp
│ │ └── dimens.xml
│ │ └── values
│ │ ├── colors.xml
│ │ ├── dimens.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── com
│ └── example
│ └── cj
│ └── videoeditor
│ └── ExampleUnitTest.java
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/workspace.xml
5 | /.idea/libraries
6 | .DS_Store
7 | /build
8 | /captures
9 | .externalNativeBuild
10 |
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.idea/copyright/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
18 |
19 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/.idea/runConfigurations.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # VideoEditor-For-Android
2 | 一个Android的视频编辑器,包括了视频录制、剪切、增加bgm、美白、加滤镜、加水印等多种功能
3 |
4 | 基于android硬编码的视频编辑器,不支持4.3以下系统,通过android的api完成视频采集,通过OpenGL,完成视频数据帧的处理,通过android的硬编码器MeidaCodec
5 | 对采集到的视频流进行硬编码。
6 | 利用OpenGL完成视频的美白、加滤镜、加水印等功能。利用MediaCodec完成音视频的分离和音频的一些混音处理
7 |
8 | 注:该项目属于是一个半成品项目。并没有直接使用的商业价值。我也看到了很多人提的issues,但是因为作者最近事情比较多,以后会补上剩下的通过OpenGl拼接视频,以及给视频增加bgm等功能,也会解决那些issues。
9 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | # Sets the minimum version of CMake required to build the native
2 | # library. You should either keep the default value or only pass a
3 | # value of 3.4.0 or lower.
4 |
5 | cmake_minimum_required(VERSION 3.4.1)
6 |
7 | # Creates and names a library, sets it as either STATIC
8 | # or SHARED, and provides the relative paths to its source code.
9 | # You can define multiple libraries, and CMake builds it for you.
10 | # Gradle automatically packages shared libraries with your APK.
11 |
12 | add_library( # Sets the name of the library.
13 | native-lib
14 |
15 | # Sets the library as a shared library.
16 | SHARED
17 |
18 | # Provides a relative path to your source file(s).
19 | # Associated headers in the same location as their source
20 | # file are automatically included.
21 | src/main/cpp/native-lib.cpp )
22 |
23 | # Searches for a specified prebuilt library and stores the path as a
24 | # variable. Because system libraries are included in the search path by
25 | # default, you only need to specify the name of the public NDK library
26 | # you want to add. CMake verifies that the library exists before
27 | # completing its build.
28 |
29 | find_library( # Sets the name of the path variable.
30 | log-lib
31 |
32 | # Specifies the name of the NDK library that
33 | # you want CMake to locate.
34 | log )
35 |
36 | # Specifies libraries CMake should link to your target library. You
37 | # can link multiple libraries, such as libraries you define in the
38 | # build script, prebuilt third-party libraries, or system libraries.
39 |
40 | target_link_libraries( # Specifies the target library.
41 | native-lib
42 |
43 | # Links the target library to the log library
44 | # included in the NDK.
45 | ${log-lib} )
46 |
--------------------------------------------------------------------------------
/app/ProgramInfo:
--------------------------------------------------------------------------------
1 |
2 | github 地址
3 | https://github.com/qqchenjian318/VideoEditor-For-Android
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 25
5 | buildToolsVersion "25.0.3"
6 | defaultConfig {
7 | applicationId "com.example.cj.videoeditor"
8 | minSdkVersion 18
9 | targetSdkVersion 25
10 | versionCode 1
11 | versionName "1.0"
12 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
13 | externalNativeBuild {
14 | cmake {
15 | cppFlags ""
16 | }
17 | }
18 |
19 | }
20 | buildTypes {
21 | release {
22 | minifyEnabled false
23 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
24 | }
25 | }
26 | externalNativeBuild {
27 | cmake {
28 | path "CMakeLists.txt"
29 | }
30 | }
31 | }
32 |
33 | dependencies {
34 | compile fileTree(include: ['*.jar'], dir: 'libs')
35 | androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
36 | exclude group: 'com.android.support', module: 'support-annotations'
37 | })
38 | compile 'com.android.support:appcompat-v7:26.0.0-alpha1'
39 | testCompile 'junit:junit:4.12'
40 | compile 'com.github.bumptech.glide:glide:3.7.0'
41 | compile 'com.android.support:cardview-v7:27.0.2'
42 | }
43 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in D:\sdk\sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/example/cj/videoeditor/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumentation test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() throws Exception {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("com.example.cj.videoeditor", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
16 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
26 |
28 |
30 |
32 |
34 |
36 |
38 |
40 |
42 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/app/src/main/assets/filter/brannan_blowout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/brannan_blowout.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/brannan_contrast.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/brannan_contrast.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/brannan_luma.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/brannan_luma.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/brannan_process.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/brannan_process.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/brannan_screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/brannan_screen.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/edgeburn.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/edgeburn.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/freud_rand.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/freud_rand.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/hefemap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/hefemap.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/hefemetal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/hefemetal.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/hefesoftlight.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/hefesoftlight.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/hudsonbackground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/hudsonbackground.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/hudsonmap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/hudsonmap.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/inkwellmap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/inkwellmap.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/n1977blowout.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/n1977blowout.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/n1977map.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/n1977map.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/nashvillemap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/nashvillemap.png
--------------------------------------------------------------------------------
/app/src/main/assets/filter/overlaymap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/assets/filter/overlaymap.png
--------------------------------------------------------------------------------
/app/src/main/assets/shader/base_fragment.sh:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 | varying vec2 textureCoordinate;
3 | uniform sampler2D vTexture;
4 | void main() {
5 | gl_FragColor = texture2D( vTexture, textureCoordinate );
6 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shader/base_record_fragment.sh:
--------------------------------------------------------------------------------
1 | #extension GL_OES_EGL_image_external : require
2 | precision mediump float;
3 | varying vec2 vTextureCoord;
4 | uniform samplerExternalOES sTexture;
5 | void main() {
6 | gl_FragColor = texture2D(sTexture, vTextureCoord);
7 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shader/base_record_vertex.sh:
--------------------------------------------------------------------------------
1 | uniform mat4 uMVPMatrix;
2 | uniform mat4 uSTMatrix;
3 | attribute vec4 aPosition;
4 | attribute vec4 aTextureCoord;
5 | varying vec2 vTextureCoord;
6 | void main() {
7 | gl_Position = uMVPMatrix * aPosition;
8 | vTextureCoord = (uSTMatrix * aTextureCoord).xy;
9 | };
--------------------------------------------------------------------------------
/app/src/main/assets/shader/base_vertex.sh:
--------------------------------------------------------------------------------
1 | attribute vec4 vPosition;
2 | attribute vec2 vCoord;
3 | uniform mat4 vMatrix;
4 |
5 | varying vec2 textureCoordinate;
6 |
7 | void main(){
8 | gl_Position = vMatrix*vPosition;
9 | textureCoordinate = vCoord;
10 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shader/oes_base_fragment.sh:
--------------------------------------------------------------------------------
1 | #extension GL_OES_EGL_image_external : require
2 | precision mediump float;
3 | varying vec2 textureCoordinate;
4 | uniform samplerExternalOES vTexture;
5 | void main() {
6 | gl_FragColor = texture2D( vTexture, textureCoordinate );
7 | }
--------------------------------------------------------------------------------
/app/src/main/assets/shader/oes_base_vertex.sh:
--------------------------------------------------------------------------------
1 | attribute vec4 vPosition;
2 | attribute vec2 vCoord;
3 | uniform mat4 vMatrix;
4 | varying vec2 textureCoordinate;
5 |
6 | void main(){
7 | gl_Position = vMatrix*vPosition;
8 | textureCoordinate = vCoord;
9 | }
--------------------------------------------------------------------------------
/app/src/main/cpp/native-lib.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 |
4 | extern "C"{
5 | jbyteArray
6 | Java_com_example_cj_videoeditor_jni_AudioJniUtils_audioMix(JNIEnv *env, jclass type, jbyteArray sourceA_,
7 | jbyteArray sourceB_, jbyteArray dst_, jfloat firstVol,
8 | jfloat secondVol) {
9 | jbyte *sourceA = env->GetByteArrayElements(sourceA_, NULL);
10 | jbyte *sourceB = env->GetByteArrayElements(sourceB_, NULL);
11 | jbyte *dst = env->GetByteArrayElements(dst_, NULL);
12 | //归一化混音
13 | int aL = env->GetArrayLength(sourceA_);
14 | int bL = env->GetArrayLength(sourceB_);
15 | int row = aL / 2;
16 | short sA[row];
17 | for (int i = 0; i < row; ++i) {
18 | sA[i] = (short) ((sourceA[i * 2] & 0xff) | (sourceA[i * 2 + 1] & 0xff) << 8);
19 | }
20 |
21 | short sB[row];
22 | for (int i = 0; i < row; ++i) {
23 | sB[i] = (short) ((sourceB[i * 2] & 0xff) | (sourceB[i * 2 + 1] & 0xff) << 8);
24 | }
25 | short result[row];
26 | for (int i = 0; i < row; ++i) {
27 | int a = (int) (sA[i] * firstVol);
28 | int b = (int) (sB[i] * secondVol);
29 | if (a < 0 && b < 0){
30 | int i1 = a + b - a * b / (-32768);
31 | if (i1 > 32768){
32 | result[i] = 32767;
33 | } else if (i1 < - 32768){
34 | result[i] = - 32768;
35 | } else{
36 | result[i] = (short) i1;
37 | }
38 | } else if (a > 0 && b > 0){
39 | int i1 = a + b - a * b / 32767;
40 | if (i1 > 32767){
41 | result[i] = 32767;
42 | }else if (i1 < - 32768){
43 | result[i] = -32768;
44 | }else {
45 | result[i] = (short) i1;
46 | }
47 | } else{
48 | int i1 = a + b ;
49 | if (i1 > 32767){
50 | result[i] = 32767;
51 | }else if (i1 < - 32768){
52 | result[i] = -32768;
53 | }else {
54 | result[i] = (short) i1;
55 | }
56 | }
57 | }
58 | for (int i = 0; i > 8);
60 | dst[i * 2] = (jbyte) (result[i] & 0x00FF);
61 | }
62 |
63 | jbyteArray result1 = env ->NewByteArray(aL);
64 | env->SetByteArrayRegion(result1, 0, aL, dst);
65 |
66 | env->ReleaseByteArrayElements(sourceA_, sourceA, 0);
67 | env->ReleaseByteArrayElements(sourceB_, sourceB, 0);
68 | env->ReleaseByteArrayElements(dst_, dst, 0);
69 |
70 | return result1;
71 | }
72 |
73 | }
74 |
75 | extern "C"
76 | JNIEXPORT jstring JNICALL
77 | Java_com_example_cj_videoeditor_jni_AudioJniUtils_putString(JNIEnv *env, jclass type,
78 | jstring info_) {
79 | const char *info = env->GetStringUTFChars(info_, 0);
80 | char buf[128];
81 | if (info == NULL)
82 | return NULL;
83 | sprintf(buf,"From C %s ",info);
84 |
85 | env->ReleaseStringUTFChars(info_, info);
86 |
87 | return env->NewStringUTF(buf);
88 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/Constants.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor;
2 |
3 | import android.content.Context;
4 | import android.os.Environment;
5 | import android.util.DisplayMetrics;
6 |
7 |
8 | import java.io.File;
9 |
10 | /**
11 | * Created by cj on 2017/6/26 .
12 | */
13 |
14 | public class Constants {
15 | /**
16 | * 屏幕宽高
17 | */
18 | public static int screenWidth;
19 | public static int screenHeight;
20 |
21 | /**
22 | * 画幅,视频的样式 9:16 1:1 16:9
23 | */
24 | public static final int MODE_POR_9_16 = 0;
25 | public static final int MODE_POR_1_1 = 1;
26 | public static final int MODE_POR_16_9 = 2;
27 |
28 | /**
29 | * 三种画幅的具体显示尺寸
30 | */
31 | public static int mode_por_width_9_16;
32 | public static int mode_por_height_9_16;
33 | public static int mode_por_width_1_1;
34 | public static int mode_por_height_1_1;
35 | public static int mode_por_width_16_9;
36 | public static int mode_por_height_16_9;
37 |
38 | /**
39 | * 三种画幅的具体编码尺寸(参考VUE)
40 | */
41 | public static final int mode_por_encode_width_9_16 = 540;
42 | public static final int mode_por_encode_height_9_16 = 960;
43 | public static final int mode_por_encode_width_1_1 = 540;
44 | public static final int mode_por_encode_height_1_1 = 540;
45 | public static final int mode_por_encode_width_16_9 = 960;
46 | public static final int mode_por_encode_height_16_9 = 540;
47 |
48 | public static void init(Context context) {
49 | DisplayMetrics mDisplayMetrics = context.getResources()
50 | .getDisplayMetrics();
51 | screenWidth = mDisplayMetrics.widthPixels;
52 | screenHeight = mDisplayMetrics.heightPixels;
53 | mode_por_width_9_16 = screenWidth;
54 | mode_por_height_9_16 = screenHeight;
55 | mode_por_width_1_1 = screenWidth;
56 | mode_por_height_1_1 = screenWidth;
57 | mode_por_width_16_9 = screenWidth;
58 | mode_por_height_16_9 = screenWidth / 16 * 9;
59 | }
60 |
61 | public static String getBaseFolder() {
62 | String baseFolder = Environment.getExternalStorageDirectory() + "/Codec/";
63 | File f = new File(baseFolder);
64 | if (!f.exists()) {
65 | boolean b = f.mkdirs();
66 | if (!b) {
67 | baseFolder = MyApplication.getContext().getExternalFilesDir(null).getAbsolutePath() + "/";
68 | }
69 | }
70 | return baseFolder;
71 | }
72 |
73 | //获取VideoPath
74 | public static String getPath(String path, String fileName) {
75 | String p = getBaseFolder() + path;
76 | File f = new File(p);
77 | if (!f.exists() && !f.mkdirs()) {
78 | return getBaseFolder() + fileName;
79 | }
80 | return p + fileName;
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/MyApplication.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor;
2 |
3 | import android.app.Application;
4 | import android.content.Context;
5 | import android.os.Handler;
6 | import android.util.DisplayMetrics;
7 | import android.util.Log;
8 |
9 | import com.example.cj.videoeditor.media.VideoInfo;
10 | import com.example.cj.videoeditor.mediacodec.VideoRunnable;
11 |
12 | import java.io.FileInputStream;
13 | import java.io.FileOutputStream;
14 | import java.nio.ByteBuffer;
15 |
16 | /**
17 | * Created by cj on 2017/8/6.
18 | *
19 | */
20 |
21 | public class MyApplication extends Application{
22 | private static Context mContext;
23 |
24 |
25 |
26 | @Override
27 | public void onCreate() {
28 | super.onCreate();
29 | mContext = this;
30 | Log.e("thread"," 线程值 "+Thread.currentThread());
31 | Constants.init(this);
32 | }
33 |
34 | @Override
35 | protected void attachBaseContext(Context base) {
36 | super.attachBaseContext(base);
37 |
38 | }
39 |
40 | public static Context getContext() {
41 | return mContext;
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/MyClassLoader.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor;
2 |
3 | /**
4 | * Created by cj on 2018/1/9.
5 | * desc
6 | */
7 |
8 | public class MyClassLoader extends ClassLoader{
9 | public MyClassLoader(){
10 | super();
11 | }
12 |
13 | @Override
14 | protected Class> findClass(String name) throws ClassNotFoundException {
15 | return super.findClass(name);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/activity/AudioEditorActivity.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.activity;
2 |
3 | import android.content.Intent;
4 | import android.os.Bundle;
5 | import android.os.Handler;
6 | import android.support.annotation.Nullable;
7 | import android.view.View;
8 | import android.widget.Toast;
9 |
10 | import com.example.cj.videoeditor.Constants;
11 | import com.example.cj.videoeditor.R;
12 | import com.example.cj.videoeditor.mediacodec.AudioCodec;
13 |
14 | import java.io.File;
15 |
16 | /**
17 | * Created by cj on 2017/11/5.
18 | *
19 | */
20 |
21 | public class AudioEditorActivity extends BaseActivity implements View.OnClickListener {
22 | @Override
23 | protected void onCreate(@Nullable Bundle savedInstanceState) {
24 | super.onCreate(savedInstanceState);
25 | setContentView(R.layout.activity_audio);
26 | findViewById(R.id.video_select).setOnClickListener(this);
27 | findViewById(R.id.audio_select).setOnClickListener(this);
28 | findViewById(R.id.pcm_to_audio).setOnClickListener(this);
29 | findViewById(R.id.audio_mix).setOnClickListener(this);
30 |
31 | }
32 |
33 | @Override
34 | public void onClick(View v) {
35 | switch (v.getId()){
36 | case R.id.video_select:
37 | //去选择视频
38 | VideoSelectActivity.openActivity(this);
39 | break;
40 | case R.id.audio_select:
41 | startActivity(new Intent(AudioEditorActivity.this , AudioSelectActivity.class));
42 | break;
43 | case R.id.pcm_to_audio:
44 | //pcm文件转音频
45 | String path = Constants.getPath("audio/outputPCM/", "PCM_1511078423497.pcm");
46 | if (!new File(path).exists()){
47 | Toast.makeText(this,"PCM文件不存在,请设置为本地已有PCM文件",Toast.LENGTH_SHORT).show();
48 | return;
49 | }
50 | final String audioPath = Constants.getPath("audio/outputAudio/", "audio_"+System.currentTimeMillis()+".aac");
51 | showLoading("音频编码中...");
52 | AudioCodec.PCM2Audio(path, audioPath, new AudioCodec.AudioDecodeListener() {
53 | @Override
54 | public void decodeOver() {
55 | Toast.makeText(AudioEditorActivity.this,"数据编码成功 文件保存位置为—>>"+audioPath,Toast.LENGTH_SHORT).show();
56 | endLoading();
57 | }
58 |
59 | @Override
60 | public void decodeFail() {
61 | Toast.makeText(AudioEditorActivity.this,"数据编码失败 maybe same Exception ,please look at logcat "+audioPath,Toast.LENGTH_SHORT).show();
62 | endLoading();
63 | }
64 | });
65 | break;
66 | case R.id.audio_mix:
67 | startActivity(new Intent(this,AudioMixActivity.class));
68 | break;
69 | }
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/activity/BaseActivity.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.activity;
2 |
3 | import android.app.Activity;
4 | import android.os.Build;
5 | import android.os.Looper;
6 | import android.support.v7.app.AppCompatActivity;
7 |
8 | import com.example.cj.videoeditor.widget.LoadingDialog;
9 |
10 | /**
11 | * Created by cj on 2017/10/16.
12 | * desc
13 | */
14 |
15 | public class BaseActivity extends AppCompatActivity{
16 | public LoadingDialog loading;
17 |
18 | @Override
19 | protected void onDestroy() {
20 | super.onDestroy();
21 | if (loading != null){
22 | loading.dismiss();
23 | }
24 | }
25 |
26 | /**
27 | * 修改加载提示信息
28 | *
29 | * @param tips
30 | */
31 | public void setLoading(String tips) {
32 | if (null == loading) {
33 | loading = new LoadingDialog(this);
34 | loading.setTips(tips);
35 | loading.show();
36 | } else {
37 | loading.setTips(tips);
38 | }
39 | }
40 |
41 | /**
42 | * 显示加载提示
43 | *
44 | * @param tips
45 | */
46 | public void showLoading(final String tips) {
47 | if (null == loading || loading.isWithTitle())
48 | loading = new LoadingDialog(this);
49 |
50 | if (Thread.currentThread() == Looper.getMainLooper().getThread()) {
51 | loading.setTips(tips);
52 | loading.show();
53 | } else {
54 | runOnUiThread(new Runnable() {
55 | @Override
56 | public void run() {
57 | loading.setTips(tips);
58 | loading.show();
59 | }
60 | });
61 |
62 | }
63 | }
64 |
65 | /**
66 | * @param tips
67 | * @param cancelable 是否可取消 false 不可以 true 可以
68 | */
69 | public void showLoading(final String tips, final boolean cancelable) {
70 | if (isDestroyed()){
71 | return;
72 | }
73 | endLoading();
74 | if (Thread.currentThread() == Looper.getMainLooper().getThread()) {
75 | loading = new LoadingDialog(this, tips, cancelable);
76 | loading.show();
77 | } else {
78 | runOnUiThread(new Runnable() {
79 | @Override
80 | public void run() {
81 | loading = new LoadingDialog(BaseActivity.this, tips, cancelable);
82 | loading.show();
83 | }
84 | });
85 | }
86 | }
87 |
88 | /**
89 | * 显示加载提示
90 | *
91 | * @param title 标题
92 | * @param tips 提示
93 | */
94 | public void showLoading(String title, String tips) {
95 | endLoading();
96 | loading = new LoadingDialog(this, title, tips);
97 | loading.show();
98 | }
99 |
100 | /**
101 | * 是否响应back键
102 | *
103 | * @param cancelable true:响应,false:不响应
104 | */
105 | public void setLoadingCancelable(boolean cancelable) {
106 | if (null != loading) {
107 | loading.setCancelable(cancelable);
108 | }
109 | }
110 |
111 | /**
112 | * 隐藏loading
113 | */
114 | public void endLoading() {
115 | if (null != loading) {
116 | loading.dismiss();
117 | }
118 | }
119 | public boolean isLoading(){
120 | return loading != null && loading.isShowing();
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/activity/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.activity;
2 |
3 | import android.content.Intent;
4 | import android.os.Bundle;
5 | import android.view.View;
6 | import android.widget.Button;
7 | import android.widget.Toast;
8 |
9 | import com.example.cj.videoeditor.R;
10 |
11 | public class MainActivity extends BaseActivity implements View.OnClickListener {
12 |
13 | @Override
14 | protected void onCreate(Bundle savedInstanceState) {
15 | super.onCreate(savedInstanceState);
16 | setContentView(R.layout.activity_main);
17 |
18 | Button recordBtn = (Button) findViewById(R.id.record_activity);
19 | Button selectBtn = (Button) findViewById(R.id.select_activity);
20 | Button audioBtn = (Button) findViewById(R.id.audio_activity);
21 | Button videoBtn = (Button) findViewById(R.id.video_connect);
22 |
23 | recordBtn.setOnClickListener(this);
24 | selectBtn.setOnClickListener(this);
25 | audioBtn.setOnClickListener(this);
26 | videoBtn.setOnClickListener(this);
27 | }
28 |
29 | @Override
30 | public void onClick(View v) {
31 | switch (v.getId()){
32 | case R.id.record_activity:
33 | startActivity(new Intent(MainActivity.this , RecordedActivity.class));
34 | break;
35 | case R.id.select_activity:
36 | VideoSelectActivity.openActivity(this);
37 | break;
38 | case R.id.audio_activity:
39 | startActivity(new Intent(MainActivity.this , AudioEditorActivity.class));
40 | break;
41 | case R.id.video_connect:
42 | // Toast.makeText(this,"该功能还未完成!!!",Toast.LENGTH_SHORT).show();
43 | startActivity(new Intent(MainActivity.this , VideoConnectActivity.class));
44 | break;
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/adapter/AudioAdapter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.adapter;
2 |
3 | import android.content.Context;
4 | import android.util.Log;
5 | import android.view.LayoutInflater;
6 | import android.view.View;
7 | import android.view.ViewGroup;
8 | import android.widget.BaseAdapter;
9 | import android.widget.ImageView;
10 | import android.widget.TextView;
11 |
12 | import com.example.cj.videoeditor.R;
13 | import com.example.cj.videoeditor.bean.Song;
14 | import com.example.cj.videoeditor.utils.DateUtils;
15 |
16 | import java.util.List;
17 |
18 |
19 | /**
20 | * Created by cj on 2017/6/27.
21 | * desc
22 | */
23 |
24 | public class AudioAdapter extends BaseAdapter{
25 | private List mData;
26 | private Context mContext;
27 | public AudioAdapter(Context context, List data){
28 | mContext = context;
29 | mData = data;
30 |
31 | }
32 | public void setData(List data){
33 | mData = data;
34 | }
35 | @Override
36 | public int getCount() {
37 | if (mData != null && mData.size() > 0)
38 | return mData.size();
39 | return 0;
40 | }
41 |
42 | @Override
43 | public Object getItem(int position) {
44 | return mData.get(position);
45 | }
46 |
47 | @Override
48 | public long getItemId(int position) {
49 | return position;
50 | }
51 |
52 | @Override
53 | public View getView(int position, View convertView, ViewGroup parent) {
54 | ViewHolder holder;
55 | if (convertView == null){
56 | holder = new ViewHolder();
57 | convertView = LayoutInflater.from(mContext).inflate(R.layout.item_media_audio , parent,false);
58 | holder.audioType = (ImageView) convertView.findViewById(R.id.audio_type);
59 | holder.audioName = (TextView) convertView.findViewById(R.id.audio_name);
60 | holder.audioSize= (TextView) convertView.findViewById(R.id.audio_size);
61 | holder.audioDuration = (TextView) convertView.findViewById(R.id.audio_duration);
62 | convertView.setTag(holder);
63 | }else {
64 | holder = (ViewHolder) convertView.getTag();
65 | }
66 | Song song = mData.get(position);
67 | holder.audioName.setText(song.getName());
68 |
69 | holder.audioSize.setText(song.getSize());
70 | holder.audioDuration.setText(DateUtils.covertToDate(song.getDuration()));
71 | if ("mp3".equals(song.getType())){
72 | holder.audioType.setImageResource(R.mipmap.img_mp3);
73 | }else if ("aac".equals(song.getType())){
74 | holder.audioType.setImageResource(R.mipmap.img_aac);
75 | }else if ("wma".equals(song.getType())){
76 | holder.audioType.setImageResource(R.mipmap.img_wma);
77 | }else {
78 | holder.audioType.setImageResource(R.mipmap.ic_launcher);
79 | }
80 |
81 | return convertView;
82 | }
83 | class ViewHolder{
84 | ImageView audioType;
85 | TextView audioName;
86 | TextView audioSize;
87 | TextView audioDuration;
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/bean/AudioSettingInfo.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.bean;
2 |
3 | /**
4 | * Created by cj on 2017/6/28.
5 | * desc
6 | */
7 |
8 | public class AudioSettingInfo {
9 | public boolean isSet;//是否设置过
10 | public String filePath;//bgm的文件路径
11 | public int volFirst;//原生音量大小
12 | public int volSecond;//bgm大小
13 | }
14 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/bean/CutBean.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.bean;
2 |
3 | import android.os.Parcel;
4 | import android.os.Parcelable;
5 |
6 | /**
7 | * Created by cj on 2017/7/10.
8 | * desc 以video的path和position为关键key
9 | * 记录 当前视频剪切的选择参数
10 | * 便于后面剪切视频
11 | */
12 |
13 | public class CutBean implements Parcelable {
14 | public int position;//这个选择参数 在当前视频集里的位置
15 | public String videoPath;
16 | public long startPoint;//开始剪切的时间点
17 | public long cutDuration;//剪切的时长
18 | public long videoDuration;//video的总长度
19 | public CutBean(){
20 |
21 | }
22 |
23 | protected CutBean(Parcel in) {
24 | position = in.readInt();
25 | videoPath = in.readString();
26 | startPoint = in.readLong();
27 | cutDuration = in.readLong();
28 | videoDuration = in.readLong();
29 | }
30 |
31 | public static final Creator CREATOR = new Creator() {
32 | @Override
33 | public CutBean createFromParcel(Parcel in) {
34 | return new CutBean(in);
35 | }
36 |
37 | @Override
38 | public CutBean[] newArray(int size) {
39 | return new CutBean[size];
40 | }
41 | };
42 |
43 | @Override
44 | public int describeContents() {
45 | return 0;
46 | }
47 |
48 | @Override
49 | public void writeToParcel(Parcel dest, int flags) {
50 | dest.writeInt(position);
51 | dest.writeString(videoPath);
52 | dest.writeLong(startPoint);
53 | dest.writeLong(cutDuration);
54 | dest.writeLong(videoDuration);
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/bean/MediaDecode.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.bean;
2 |
3 | import android.media.MediaExtractor;
4 |
5 | /**
6 | * Created by cj on 2017/7/11.
7 | * desc 音频解码的info类 包含了音频path 音频的MediaExtractor
8 | * 和本段音频的截取点cutPoint
9 | * 以及剪切时长 cutDuration
10 | */
11 |
12 | public class MediaDecode {
13 | public String path;
14 | public MediaExtractor extractor;
15 | public int cutPoint;
16 | public int cutDuration;
17 | public int duration;
18 | }
19 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/bean/Song.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.bean;
2 |
3 | /**
4 | * Created by cj on 2017/6/27.
5 | * desc 歌曲的bean
6 | */
7 |
8 | public class Song {
9 | public String artist;
10 | public String path;
11 | public String title;
12 | public String name;
13 | public String size;
14 | public int duration;
15 |
16 | public String getType() {
17 | return type;
18 | }
19 |
20 | public void setType(String type) {
21 | this.type = type;
22 | }
23 |
24 | public String type;
25 |
26 | public String getName() {
27 | return name;
28 | }
29 |
30 | public void setName(String name) {
31 | this.name = name;
32 | }
33 |
34 | public String getArtist() {
35 | return artist;
36 | }
37 |
38 | public void setArtist(String artist) {
39 | this.artist = artist;
40 | }
41 |
42 | public String getPath() {
43 | return path;
44 | }
45 |
46 | public void setPath(String path) {
47 | this.path = path;
48 | }
49 |
50 | public String getTitle() {
51 | return title;
52 | }
53 |
54 | public void setTitle(String title) {
55 | this.title = title;
56 | }
57 |
58 | public String getSize() {
59 | return size;
60 | }
61 |
62 | public void setSize(String size) {
63 | this.size = size;
64 | }
65 |
66 | public int getDuration() {
67 | return duration;
68 | }
69 |
70 | public void setDuration(int duration) {
71 | this.duration = duration;
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/camera/ICamera.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.camera;
2 |
3 | import android.graphics.Point;
4 | import android.graphics.SurfaceTexture;
5 | import android.hardware.Camera;
6 |
7 | /**
8 | * Created by cj on 2017/8/2.
9 | * desc 相机的接口控制类
10 | */
11 |
12 | public interface ICamera {
13 | /**
14 | * open the camera
15 | */
16 | void open(int cameraId);
17 |
18 | /**
19 | * set the preview texture
20 | */
21 | void setPreviewTexture(SurfaceTexture texture);
22 |
23 | /**
24 | * set the camera config
25 | */
26 | void setConfig(Config config);
27 |
28 | void setOnPreviewFrameCallback(PreviewFrameCallback callback);
29 |
30 | void preview();
31 |
32 | Camera.Size getPreviewSize();
33 |
34 | /**
35 | * close the camera
36 | */
37 | boolean close();
38 |
39 | class Config {
40 | public float rate = 1.778f; //宽高比
41 | public int minPreviewWidth;
42 | public int minPictureWidth;
43 | }
44 |
45 | interface PreviewFrameCallback {
46 | void onPreviewFrame(byte[] bytes, int width, int height);
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/filter/CameraFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.filter;
2 |
3 | import android.content.res.Resources;
4 |
5 |
6 | /**
7 | * Description:
8 | */
9 | public class CameraFilter extends OesFilter {
10 |
11 | public CameraFilter(Resources mRes) {
12 | super(mRes);
13 | }
14 |
15 | @Override
16 | public void setFlag(int flag) {
17 | super.setFlag(flag);
18 | float[] coord;
19 | if(getFlag()==1){ //前置摄像头 顺时针旋转90,并上下颠倒
20 | coord=new float[]{
21 | 1.0f, 1.0f,
22 | 0.0f, 1.0f,
23 | 1.0f, 0.0f,
24 | 0.0f, 0.0f,
25 | };
26 | }else{ //后置摄像头 顺时针旋转90度
27 | coord=new float[]{
28 | 0.0f, 1.0f,
29 | 1.0f, 1.0f,
30 | 0.0f, 0.0f,
31 | 1.0f, 0.0f,
32 | };
33 | }
34 | mTexBuffer.clear();
35 | mTexBuffer.put(coord);
36 | mTexBuffer.position(0);
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/filter/NoFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.filter;
2 |
3 | import android.content.res.Resources;
4 | import android.opengl.GLES20;
5 | import android.util.Log;
6 |
7 | /**
8 | * Description:
9 | */
10 | public class NoFilter extends AFilter {
11 |
12 | public NoFilter(Resources res) {
13 | super(res);
14 | }
15 |
16 | @Override
17 | protected void onCreate() {
18 | Log.e("thread", "---初始化NoFilter "+Thread.currentThread());
19 | createProgramByAssetsFile("shader/base_vertex.sh",
20 | "shader/base_fragment.sh");
21 | }
22 |
23 | /**
24 | * 背景默认为黑色
25 | */
26 | @Override
27 | protected void onClear() {
28 | Log.e("thread", "---onClear? 1 "+Thread.currentThread());
29 |
30 | GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
31 | Log.e("thread", "---onClear? 2 "+Thread.currentThread());
32 | GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
33 | Log.e("thread", "---onClear? 3 ");
34 | }
35 |
36 | @Override
37 | protected void onSizeChanged(int width, int height) {
38 |
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/filter/OesFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.filter;
2 |
3 | import android.content.res.Resources;
4 | import android.opengl.GLES11Ext;
5 | import android.opengl.GLES20;
6 |
7 | /**
8 | * Description: 加载默认的滤镜的filter
9 | */
10 | public class OesFilter extends AFilter{
11 |
12 | public OesFilter(Resources mRes) {
13 | super(mRes);
14 | }
15 |
16 | @Override
17 | protected void onCreate() {
18 | createProgramByAssetsFile("shader/oes_base_vertex.sh","shader/oes_base_fragment.sh");
19 | }
20 |
21 | @Override
22 | protected void onBindTexture() {
23 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0+getTextureType());
24 | GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,getTextureId());
25 | GLES20.glUniform1i(mHTexture,getTextureType());
26 | }
27 |
28 | @Override
29 | protected void onSizeChanged(int width, int height) {
30 |
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/filter/ProcessFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.filter;
2 |
3 | import android.content.res.Resources;
4 | import android.opengl.GLES20;
5 |
6 | import com.example.cj.videoeditor.utils.EasyGlUtils;
7 | import com.example.cj.videoeditor.utils.MatrixUtils;
8 |
9 |
10 | /**
11 | * draw并不执行父类的draw方法,所以矩阵对它无效
12 | * Description:
13 | */
14 | public class ProcessFilter extends AFilter {
15 |
16 | private AFilter mFilter;
17 | //创建离屏buffer
18 | private int[] fFrame = new int[1];
19 | private int[] fRender = new int[1];
20 | private int[] fTexture = new int[1];
21 |
22 | private int width;
23 | private int height;
24 |
25 |
26 | public ProcessFilter(Resources mRes) {
27 | super(mRes);
28 | mFilter=new NoFilter(mRes);
29 | float[] OM= MatrixUtils.getOriginalMatrix();
30 | MatrixUtils.flip(OM,false,true);//矩阵上下翻转
31 | mFilter.setMatrix(OM);
32 | }
33 |
34 | @Override
35 | protected void initBuffer() {
36 |
37 | }
38 |
39 | @Override
40 | protected void onCreate() {
41 | mFilter.create();
42 | }
43 |
44 | @Override
45 | public int getOutputTexture() {
46 | return fTexture[0];
47 | }
48 |
49 | @Override
50 | public void draw() {
51 | boolean b= GLES20.glIsEnabled(GLES20.GL_CULL_FACE);
52 | if(b){
53 | GLES20.glDisable(GLES20.GL_CULL_FACE);
54 | }
55 | GLES20.glViewport(0,0,width,height);
56 | EasyGlUtils.bindFrameTexture(fFrame[0],fTexture[0]);
57 | GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT,
58 | GLES20.GL_RENDERBUFFER, fRender[0]);
59 | mFilter.setTextureId(getTextureId());
60 | mFilter.draw();
61 | EasyGlUtils.unBindFrameBuffer();
62 | if(b){
63 | GLES20.glEnable(GLES20.GL_CULL_FACE);
64 | }
65 | }
66 |
67 | @Override
68 | protected void onSizeChanged(int width, int height) {
69 | if(this.width!=width&&this.height!=height){
70 | this.width=width;
71 | this.height=height;
72 | mFilter.setSize(width,height);
73 | deleteFrameBuffer();
74 | GLES20.glGenFramebuffers(1,fFrame,0);
75 | GLES20.glGenRenderbuffers(1,fRender,0);
76 | GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER,fRender[0]);
77 | GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16,
78 | width, height);
79 | GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT,
80 | GLES20.GL_RENDERBUFFER, fRender[0]);
81 | GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER,0);
82 | EasyGlUtils.genTexturesWithParameter(1,fTexture,0, GLES20.GL_RGBA,width,height);
83 | }
84 | }
85 |
86 | private void deleteFrameBuffer() {
87 | GLES20.glDeleteRenderbuffers(1, fRender, 0);
88 | GLES20.glDeleteFramebuffers(1, fFrame, 0);
89 | GLES20.glDeleteTextures(1, fTexture, 0);
90 | }
91 |
92 | }
93 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/filter/RotationOESFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.filter;
2 |
3 | import android.content.res.Resources;
4 |
5 | /**
6 | * Created by Administrator on 2017/6/19 0019.
7 | */
8 |
9 | public class RotationOESFilter extends OesFilter {
10 | public static final int ROT_0 = 0;
11 | public static final int ROT_90 = 90;
12 | public static final int ROT_180 = 180;
13 | public static final int ROT_270 = 270;
14 |
15 | public RotationOESFilter(Resources mRes) {
16 | super(mRes);
17 | }
18 |
19 | /**
20 | * 旋转视频操作
21 | *
22 | * @param rotation
23 | */
24 | public void setRotation(int rotation) {
25 | float[] coord;
26 | switch (rotation) {
27 | case ROT_0:
28 | coord = new float[]{
29 | 0.0f, 0.0f,
30 | 0.0f, 1.0f,
31 | 1.0f, 0.0f,
32 | 1.0f, 1.0f,
33 | };
34 | break;
35 | case ROT_90:
36 | coord = new float[]{
37 | 0.0f, 1.0f,
38 | 1.0f, 1.0f,
39 | 0.0f, 0.0f,
40 | 1.0f, 0.0f
41 | };
42 | break;
43 | case ROT_180:
44 | coord = new float[]{
45 | 1.0f, 1.0f,
46 | 1.0f, 0.0f,
47 | 0.0f, 1.0f,
48 | 0.0f, 0.0f,
49 | };
50 | break;
51 | case ROT_270:
52 | coord = new float[]{
53 | 1.0f, 0.0f,
54 | 0.0f, 0.0f,
55 | 1.0f, 1.0f,
56 | 0.0f, 1.0f
57 | };
58 | break;
59 | default:
60 | return;
61 | }
62 | mTexBuffer.clear();
63 | mTexBuffer.put(coord);
64 | mTexBuffer.position(0);
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/filter/WaterMarkFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.filter;
2 |
3 | import android.content.res.Resources;
4 | import android.graphics.Bitmap;
5 | import android.opengl.GLES20;
6 | import android.opengl.GLUtils;
7 |
8 | import com.example.cj.videoeditor.utils.MatrixUtils;
9 |
10 | /**
11 | * Created by qqche_000 on 2017/8/20.
12 | * 水印的Filter
13 | */
14 |
15 | public class WaterMarkFilter extends NoFilter{
16 | /**水印的放置位置和宽高*/
17 | private int x,y,w,h;
18 | /**控件的大小*/
19 | private int width,height;
20 | /**水印图片的bitmap*/
21 | private Bitmap mBitmap;
22 | /***/
23 | private NoFilter mFilter;
24 |
25 | public WaterMarkFilter(Resources mRes) {
26 | super(mRes);
27 | mFilter=new NoFilter(mRes){
28 | @Override
29 | protected void onClear() {
30 | }
31 | };
32 | }
33 | public void setWaterMark(Bitmap bitmap){
34 | if(this.mBitmap!=null){
35 | this.mBitmap.recycle();
36 | }
37 | this.mBitmap=bitmap;
38 | }
39 | @Override
40 | public void draw() {
41 | super.draw();
42 | GLES20.glViewport(x,y,w == 0 ? mBitmap.getWidth():w,h==0?mBitmap.getHeight():h);
43 | GLES20.glDisable(GLES20.GL_DEPTH_TEST);
44 | GLES20.glEnable(GLES20.GL_BLEND);
45 | GLES20.glBlendFunc(GLES20.GL_SRC_COLOR, GLES20.GL_DST_ALPHA);
46 | mFilter.draw();
47 | GLES20.glDisable(GLES20.GL_BLEND);
48 | GLES20.glViewport(0,0,width,height);
49 | }
50 |
51 | @Override
52 | protected void onCreate() {
53 | super.onCreate();
54 | mFilter.create();
55 | createTexture();
56 | }
57 | private int[] textures=new int[1];
58 | private void createTexture() {
59 | if(mBitmap!=null){
60 | //生成纹理
61 | GLES20.glGenTextures(1,textures,0);
62 | //生成纹理
63 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textures[0]);
64 | //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
65 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
66 | //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
67 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
68 | //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
69 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
70 | //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
71 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
72 | GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mBitmap, 0);
73 | //对画面进行矩阵旋转
74 | MatrixUtils.flip(mFilter.getMatrix(),false,true);
75 |
76 | mFilter.setTextureId(textures[0]);
77 | }
78 | }
79 |
80 | @Override
81 | protected void onSizeChanged(int width, int height) {
82 | this.width=width;
83 | this.height=height;
84 | mFilter.setSize(width,height);
85 | }
86 | public void setPosition(int x,int y,int width,int height){
87 | this.x=x;
88 | this.y=y;
89 | this.w=width;
90 | this.h=height;
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/filter/MagicBeautyFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.filter;
2 |
3 | import android.opengl.GLES20;
4 |
5 |
6 | import com.example.cj.videoeditor.R;
7 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
8 | import com.example.cj.videoeditor.gpufilter.utils.OpenGlUtils;
9 |
10 |
11 |
12 | /**
13 | * Created by cj on 2017/5/22.
14 | * 美白的filter
15 | */
16 | public class MagicBeautyFilter extends GPUImageFilter {
17 | private int mSingleStepOffsetLocation;
18 | private int mParamsLocation;
19 | private int mLevel;
20 |
21 | public MagicBeautyFilter(){
22 | super(NO_FILTER_VERTEX_SHADER ,
23 | OpenGlUtils.readShaderFromRawResource(R.raw.beauty));
24 | }
25 |
26 | protected void onInit() {
27 | super.onInit();
28 | mSingleStepOffsetLocation = GLES20.glGetUniformLocation(getProgram(), "singleStepOffset");
29 | mParamsLocation = GLES20.glGetUniformLocation(getProgram(), "params");
30 | setBeautyLevel(3);//beauty Level
31 | }
32 |
33 | private void setTexelSize(final float w, final float h) {
34 | setFloatVec2(mSingleStepOffsetLocation, new float[] {2.0f / w, 2.0f / h});
35 | }
36 |
37 | @Override
38 | public void onInputSizeChanged(final int width, final int height) {
39 | super.onInputSizeChanged(width, height);
40 | setTexelSize(width, height);
41 | }
42 |
43 | public void setBeautyLevel(int level){
44 | mLevel=level;
45 | switch (level) {
46 | case 1:
47 | setFloat(mParamsLocation, 1.0f);
48 | break;
49 | case 2:
50 | setFloat(mParamsLocation, 0.8f);
51 | break;
52 | case 3:
53 | setFloat(mParamsLocation,0.6f);
54 | break;
55 | case 4:
56 | setFloat(mParamsLocation, 0.4f);
57 | break;
58 | case 5:
59 | setFloat(mParamsLocation,0.33f);
60 | break;
61 | default:
62 | break;
63 | }
64 | }
65 | public int getBeautyLevel(){
66 | return mLevel;
67 | }
68 | public void onBeautyLevelChanged(){
69 | setBeautyLevel(3);//beauty level
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/filter/MagicBrannanFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.filter;
2 |
3 | import android.opengl.GLES20;
4 |
5 | import com.example.cj.videoeditor.MyApplication;
6 | import com.example.cj.videoeditor.R;
7 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
8 | import com.example.cj.videoeditor.gpufilter.utils.OpenGlUtils;
9 |
10 |
11 | public class MagicBrannanFilter extends GPUImageFilter {
12 | private int[] inputTextureHandles = {-1,-1,-1,-1,-1};
13 | private int[] inputTextureUniformLocations = {-1,-1,-1,-1,-1};
14 | private int mGLStrengthLocation;
15 |
16 | public MagicBrannanFilter(){
17 | super(NO_FILTER_VERTEX_SHADER, OpenGlUtils.readShaderFromRawResource(R.raw.brannan));
18 | }
19 |
20 | protected void onDestroy() {
21 | super.onDestroy();
22 | GLES20.glDeleteTextures(inputTextureHandles.length, inputTextureHandles, 0);
23 | for(int i = 0; i < inputTextureHandles.length; i++)
24 | inputTextureHandles[i] = -1;
25 | }
26 |
27 | protected void onDrawArraysAfter(){
28 | for(int i = 0; i < inputTextureHandles.length
29 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
30 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3));
31 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
32 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
33 | }
34 | }
35 |
36 | protected void onDrawArraysPre(){
37 | for(int i = 0; i < inputTextureHandles.length
38 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
39 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3) );
40 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, inputTextureHandles[i]);
41 | GLES20.glUniform1i(inputTextureUniformLocations[i], (i+3));
42 | }
43 | }
44 |
45 | protected void onInit(){
46 | super.onInit();
47 | for(int i=0; i < inputTextureUniformLocations.length; i++)
48 | inputTextureUniformLocations[i] = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture"+(2+i));
49 | mGLStrengthLocation = GLES20.glGetUniformLocation(mGLProgId,
50 | "strength");
51 | }
52 |
53 | protected void onInitialized(){
54 | super.onInitialized();
55 | setFloat(mGLStrengthLocation, 1.0f);
56 | runOnDraw(new Runnable(){
57 | public void run(){
58 | inputTextureHandles[0] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/brannan_process.png");
59 | inputTextureHandles[1] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/brannan_blowout.png");
60 | inputTextureHandles[2] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/brannan_contrast.png");
61 | inputTextureHandles[3] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/brannan_luma.png");
62 | inputTextureHandles[4] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/brannan_screen.png");
63 | }
64 | });
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/filter/MagicFreudFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.filter;
2 |
3 | import android.opengl.GLES20;
4 |
5 | import com.example.cj.videoeditor.MyApplication;
6 | import com.example.cj.videoeditor.R;
7 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
8 | import com.example.cj.videoeditor.gpufilter.utils.OpenGlUtils;
9 |
10 |
11 | public class MagicFreudFilter extends GPUImageFilter {
12 | private int mTexelHeightUniformLocation;
13 | private int mTexelWidthUniformLocation;
14 | private int[] inputTextureHandles = {-1};
15 | private int[] inputTextureUniformLocations = {-1};
16 | private int mGLStrengthLocation;
17 |
18 | public MagicFreudFilter(){
19 | super(NO_FILTER_VERTEX_SHADER, OpenGlUtils.readShaderFromRawResource(R.raw.freud));
20 | }
21 |
22 | protected void onDestroy() {
23 | super.onDestroy();
24 | GLES20.glDeleteTextures(1, inputTextureHandles, 0);
25 | for(int i = 0; i < inputTextureHandles.length; i++)
26 | inputTextureHandles[i] = -1;
27 | }
28 |
29 | protected void onDrawArraysAfter(){
30 | for(int i = 0; i < inputTextureHandles.length
31 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
32 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3));
33 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
34 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
35 | }
36 | }
37 |
38 | protected void onDrawArraysPre(){
39 | for(int i = 0; i < inputTextureHandles.length
40 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
41 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3) );
42 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, inputTextureHandles[i]);
43 | GLES20.glUniform1i(inputTextureUniformLocations[i], (i+3));
44 | }
45 | }
46 |
47 | protected void onInit(){
48 | super.onInit();
49 | inputTextureUniformLocations[0] = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture2");
50 |
51 | mTexelWidthUniformLocation = GLES20.glGetUniformLocation(getProgram(), "inputImageTextureWidth");
52 | mTexelHeightUniformLocation = GLES20.glGetUniformLocation(getProgram(), "inputImageTextureHeight");
53 |
54 | mGLStrengthLocation = GLES20.glGetUniformLocation(mGLProgId,
55 | "strength");
56 | }
57 |
58 | protected void onInitialized(){
59 | super.onInitialized();
60 | setFloat(mGLStrengthLocation, 1.0f);
61 | runOnDraw(new Runnable(){
62 | public void run(){
63 | inputTextureHandles[0] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/freud_rand.png");
64 | }
65 | });
66 | }
67 |
68 | public void onInputSizeChanged(final int width, final int height) {
69 | super.onInputSizeChanged(width, height);
70 | runOnDraw(new Runnable() {
71 | @Override
72 | public void run() {
73 | GLES20.glUniform1f(mTexelWidthUniformLocation, (float)width);
74 | GLES20.glUniform1f(mTexelHeightUniformLocation, (float)height);
75 | }
76 | });
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/filter/MagicHefeFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.filter;
2 |
3 | import android.opengl.GLES20;
4 |
5 | import com.example.cj.videoeditor.MyApplication;
6 | import com.example.cj.videoeditor.R;
7 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
8 | import com.example.cj.videoeditor.gpufilter.utils.OpenGlUtils;
9 |
10 |
11 | public class MagicHefeFilter extends GPUImageFilter {
12 | private int[] inputTextureHandles = {-1,-1,-1,-1};
13 | private int[] inputTextureUniformLocations = {-1,-1,-1,-1};
14 | private int mGLStrengthLocation;
15 |
16 | public MagicHefeFilter(){
17 | super(NO_FILTER_VERTEX_SHADER, OpenGlUtils.readShaderFromRawResource(R.raw.hefe));
18 | }
19 |
20 | protected void onDestroy() {
21 | super.onDestroy();
22 | GLES20.glDeleteTextures(inputTextureHandles.length, inputTextureHandles, 0);
23 | for(int i = 0; i < inputTextureHandles.length; i++)
24 | inputTextureHandles[i] = -1;
25 | }
26 |
27 | protected void onDrawArraysAfter(){
28 | for(int i = 0; i < inputTextureHandles.length
29 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
30 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3));
31 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
32 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
33 | }
34 | }
35 |
36 | protected void onDrawArraysPre(){
37 | for(int i = 0; i < inputTextureHandles.length
38 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
39 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3) );
40 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, inputTextureHandles[i]);
41 | GLES20.glUniform1i(inputTextureUniformLocations[i], (i+3));
42 | }
43 | }
44 |
45 | protected void onInit(){
46 | super.onInit();
47 | for(int i=0; i < inputTextureUniformLocations.length; i++)
48 | inputTextureUniformLocations[i] = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture"+(2+i));
49 | mGLStrengthLocation = GLES20.glGetUniformLocation(mGLProgId,
50 | "strength");
51 | }
52 |
53 | protected void onInitialized(){
54 | super.onInitialized();
55 | setFloat(mGLStrengthLocation, 1.0f);
56 | runOnDraw(new Runnable(){
57 | public void run(){
58 | inputTextureHandles[0] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/edgeburn.png");
59 | inputTextureHandles[1] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/hefemap.png");
60 | inputTextureHandles[2] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/hefemetal.png");
61 | inputTextureHandles[3] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/hefesoftlight.png");
62 | }
63 | });
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/filter/MagicHudsonFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.filter;
2 |
3 | import android.opengl.GLES20;
4 |
5 |
6 | import com.example.cj.videoeditor.MyApplication;
7 | import com.example.cj.videoeditor.R;
8 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
9 | import com.example.cj.videoeditor.gpufilter.utils.OpenGlUtils;
10 |
11 |
12 |
13 | public class MagicHudsonFilter extends GPUImageFilter {
14 | private int[] inputTextureHandles = {-1,-1,-1};
15 | private int[] inputTextureUniformLocations = {-1,-1,-1};
16 | private int mGLStrengthLocation;
17 |
18 | public MagicHudsonFilter(){
19 | super(NO_FILTER_VERTEX_SHADER, OpenGlUtils.readShaderFromRawResource(R.raw.hudson));
20 | }
21 |
22 | protected void onDestroy() {
23 | super.onDestroy();
24 | GLES20.glDeleteTextures(inputTextureHandles.length, inputTextureHandles, 0);
25 | for(int i = 0; i < inputTextureHandles.length; i++)
26 | inputTextureHandles[i] = -1;
27 | }
28 |
29 | protected void onDrawArraysAfter(){
30 | for(int i = 0; i < inputTextureHandles.length
31 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
32 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3));
33 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
34 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
35 | }
36 | }
37 |
38 | protected void onDrawArraysPre(){
39 | for(int i = 0; i < inputTextureHandles.length
40 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
41 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3) );
42 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, inputTextureHandles[i]);
43 | GLES20.glUniform1i(inputTextureUniformLocations[i], (i+3));
44 | }
45 | }
46 |
47 | protected void onInit(){
48 | super.onInit();
49 | for(int i=0; i < inputTextureUniformLocations.length; i++)
50 | inputTextureUniformLocations[i] = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture"+(2+i));
51 | mGLStrengthLocation = GLES20.glGetUniformLocation(mGLProgId,
52 | "strength");
53 | }
54 |
55 | protected void onInitialized(){
56 | super.onInitialized();
57 | setFloat(mGLStrengthLocation, 1.0f);
58 | runOnDraw(new Runnable(){
59 | public void run(){
60 | inputTextureHandles[0] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/hudsonbackground.png");
61 | inputTextureHandles[1] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/overlaymap.png");
62 | inputTextureHandles[2] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/hudsonmap.png");
63 | }
64 | });
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/filter/MagicInkwellFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.filter;
2 |
3 | import android.opengl.GLES20;
4 |
5 | import com.example.cj.videoeditor.MyApplication;
6 | import com.example.cj.videoeditor.R;
7 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
8 | import com.example.cj.videoeditor.gpufilter.utils.OpenGlUtils;
9 |
10 |
11 | public class MagicInkwellFilter extends GPUImageFilter {
12 | private int[] inputTextureHandles = {-1};
13 | private int[] inputTextureUniformLocations = {-1};
14 | private int mGLStrengthLocation;
15 |
16 | public MagicInkwellFilter(){
17 | super(NO_FILTER_VERTEX_SHADER, OpenGlUtils.readShaderFromRawResource(R.raw.inkwell));
18 | }
19 |
20 | public void onDestroy() {
21 | super.onDestroy();
22 | GLES20.glDeleteTextures(1, inputTextureHandles, 0);
23 | for(int i = 0; i < inputTextureHandles.length; i++)
24 | inputTextureHandles[i] = -1;
25 | }
26 |
27 | protected void onDrawArraysAfter(){
28 | for(int i = 0; i < inputTextureHandles.length
29 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
30 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3));
31 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
32 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
33 | }
34 | }
35 |
36 | protected void onDrawArraysPre(){
37 | for(int i = 0; i < inputTextureHandles.length
38 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
39 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3) );
40 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, inputTextureHandles[i]);
41 | GLES20.glUniform1i(inputTextureUniformLocations[i], (i+3));
42 | }
43 | }
44 |
45 | protected void onInit(){
46 | super.onInit();
47 | for(int i=0; i < inputTextureUniformLocations.length; i++)
48 | inputTextureUniformLocations[i] = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture"+(2+i));
49 | mGLStrengthLocation = GLES20.glGetUniformLocation(mGLProgId,
50 | "strength");
51 | }
52 |
53 | protected void onInitialized(){
54 | super.onInitialized();
55 | setFloat(mGLStrengthLocation, 1.0f);
56 | runOnDraw(new Runnable(){
57 | public void run(){
58 | inputTextureHandles[0] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/inkwellmap.png");
59 | }
60 | });
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/filter/MagicN1977Filter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.filter;
2 |
3 | import android.opengl.GLES20;
4 |
5 | import com.example.cj.videoeditor.MyApplication;
6 | import com.example.cj.videoeditor.R;
7 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
8 | import com.example.cj.videoeditor.gpufilter.utils.OpenGlUtils;
9 |
10 |
11 | public class MagicN1977Filter extends GPUImageFilter {
12 | private int[] inputTextureHandles = {-1,-1};
13 | private int[] inputTextureUniformLocations = {-1,-1};
14 | private int mGLStrengthLocation;
15 |
16 | public MagicN1977Filter(){
17 | super(NO_FILTER_VERTEX_SHADER, OpenGlUtils.readShaderFromRawResource(R.raw.n1977));
18 | }
19 |
20 | protected void onDrawArraysAfter(){
21 | for(int i = 0; i < inputTextureHandles.length
22 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
23 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3));
24 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
25 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
26 | }
27 | }
28 |
29 | protected void onDrawArraysPre(){
30 | for(int i = 0; i < inputTextureHandles.length
31 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
32 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3) );
33 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, inputTextureHandles[i]);
34 | GLES20.glUniform1i(inputTextureUniformLocations[i], (i+3));
35 | }
36 | }
37 |
38 | protected void onInit(){
39 | super.onInit();
40 | for(int i=0; i < inputTextureUniformLocations.length; i++)
41 | inputTextureUniformLocations[i] = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture"+(2+i));
42 | mGLStrengthLocation = GLES20.glGetUniformLocation(mGLProgId,
43 | "strength");
44 | }
45 |
46 | protected void onInitialized(){
47 | super.onInitialized();
48 | setFloat(mGLStrengthLocation, 1.0f);
49 | runOnDraw(new Runnable(){
50 | public void run(){
51 | inputTextureHandles[0] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/n1977map.png");
52 | inputTextureHandles[1] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/n1977blowout.png");
53 | }
54 | });
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/filter/MagicNashvilleFilter.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.filter;
2 |
3 | import android.opengl.GLES20;
4 |
5 | import com.example.cj.videoeditor.MyApplication;
6 | import com.example.cj.videoeditor.R;
7 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
8 | import com.example.cj.videoeditor.gpufilter.utils.OpenGlUtils;
9 |
10 |
11 | public class MagicNashvilleFilter extends GPUImageFilter {
12 | private int[] inputTextureHandles = {-1};
13 | private int[] inputTextureUniformLocations = {-1};
14 | private int mGLStrengthLocation;
15 |
16 | public MagicNashvilleFilter(){
17 | super(NO_FILTER_VERTEX_SHADER, OpenGlUtils.readShaderFromRawResource(R.raw.nashville));
18 | }
19 |
20 | public void onDestroy() {
21 | super.onDestroy();
22 | GLES20.glDeleteTextures(1, inputTextureHandles, 0);
23 | for(int i = 0; i < inputTextureHandles.length; i++)
24 | inputTextureHandles[i] = -1;
25 | }
26 |
27 | protected void onDrawArraysAfter(){
28 | for(int i = 0; i < inputTextureHandles.length
29 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
30 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3));
31 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
32 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
33 | }
34 | }
35 |
36 | protected void onDrawArraysPre(){
37 | for(int i = 0; i < inputTextureHandles.length
38 | && inputTextureHandles[i] != OpenGlUtils.NO_TEXTURE; i++){
39 | GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + (i+3) );
40 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, inputTextureHandles[i]);
41 | GLES20.glUniform1i(inputTextureUniformLocations[i], (i+3));
42 | }
43 | }
44 |
45 | public void onInit(){
46 | super.onInit();
47 | for(int i=0; i < inputTextureUniformLocations.length; i++)
48 | inputTextureUniformLocations[i] = GLES20.glGetUniformLocation(getProgram(), "inputImageTexture"+(2+i));
49 | mGLStrengthLocation = GLES20.glGetUniformLocation(mGLProgId,
50 | "strength");
51 | }
52 |
53 | public void onInitialized(){
54 | super.onInitialized();
55 | setFloat(mGLStrengthLocation, 1.0f);
56 | runOnDraw(new Runnable(){
57 | public void run(){
58 | inputTextureHandles[0] = OpenGlUtils.loadTexture(MyApplication.getContext(), "filter/nashvillemap.png");
59 | }
60 | });
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/helper/MagicFilterFactory.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.helper;
2 |
3 |
4 | import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
5 | import com.example.cj.videoeditor.gpufilter.filter.MagicAntiqueFilter;
6 | import com.example.cj.videoeditor.gpufilter.filter.MagicBrannanFilter;
7 | import com.example.cj.videoeditor.gpufilter.filter.MagicCoolFilter;
8 | import com.example.cj.videoeditor.gpufilter.filter.MagicFreudFilter;
9 | import com.example.cj.videoeditor.gpufilter.filter.MagicHefeFilter;
10 | import com.example.cj.videoeditor.gpufilter.filter.MagicHudsonFilter;
11 | import com.example.cj.videoeditor.gpufilter.filter.MagicInkwellFilter;
12 | import com.example.cj.videoeditor.gpufilter.filter.MagicN1977Filter;
13 | import com.example.cj.videoeditor.gpufilter.filter.MagicNashvilleFilter;
14 |
15 | public class MagicFilterFactory {
16 |
17 | private static MagicFilterType filterType = MagicFilterType.NONE;
18 |
19 | public static GPUImageFilter initFilters(MagicFilterType type) {
20 | if (type == null) {
21 | return null;
22 | }
23 | filterType = type;
24 | switch (type) {
25 | case ANTIQUE:
26 | return new MagicAntiqueFilter();
27 | case BRANNAN:
28 | return new MagicBrannanFilter();
29 | case FREUD:
30 | return new MagicFreudFilter();
31 | case HEFE:
32 | return new MagicHefeFilter();
33 | case HUDSON:
34 | return new MagicHudsonFilter();
35 | case INKWELL:
36 | return new MagicInkwellFilter();
37 | case N1977:
38 | return new MagicN1977Filter();
39 | case NASHVILLE:
40 | return new MagicNashvilleFilter();
41 | case COOL:
42 | return new MagicCoolFilter();
43 | case WARM:
44 | return new MagicWarmFilter();
45 | default:
46 | return null;
47 | }
48 | }
49 |
50 | public MagicFilterType getCurrentFilterType() {
51 | return filterType;
52 | }
53 |
54 | private static class MagicWarmFilter extends GPUImageFilter {
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/helper/MagicFilterType.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.gpufilter.helper;
2 |
3 | /**
4 | * Created by why8222 on 2016/2/25.
5 | */
6 | public enum MagicFilterType {
7 | NONE,
8 | WARM,
9 | ANTIQUE,
10 | COOL,
11 | BRANNAN,
12 | FREUD,
13 | HEFE,
14 | HUDSON,
15 | INKWELL,
16 | N1977,
17 | NASHVILLE,
18 | }
19 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/utils/Rotation.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2012 CyberAgent
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.cj.videoeditor.gpufilter.utils;
18 |
19 | public enum Rotation {
20 | NORMAL, ROTATION_90, ROTATION_180, ROTATION_270;
21 |
22 | /**
23 | * Retrieves the int representation of the Rotation.
24 | *
25 | * @return 0, 90, 180 or 270
26 | */
27 | public int asInt() {
28 | switch (this) {
29 | case NORMAL: return 0;
30 | case ROTATION_90: return 90;
31 | case ROTATION_180: return 180;
32 | case ROTATION_270: return 270;
33 | default: throw new IllegalStateException("Unknown Rotation!");
34 | }
35 | }
36 |
37 | /**
38 | * Create a Rotation from an integer. Needs to be either 0, 90, 180 or 270.
39 | *
40 | * @param rotation 0, 90, 180 or 270
41 | * @return Rotation object
42 | */
43 | public static Rotation fromInt(int rotation) {
44 | switch (rotation) {
45 | case 0: return NORMAL;
46 | case 90: return ROTATION_90;
47 | case 180: return ROTATION_180;
48 | case 270: return ROTATION_270;
49 | case 360: return NORMAL;
50 | default: throw new IllegalStateException(
51 | rotation + " is an unknown rotation. Needs to be either 0, 90, 180 or 270!");
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/gpufilter/utils/TextureRotationUtil.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2012 CyberAgent
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.example.cj.videoeditor.gpufilter.utils;
18 |
19 | public class TextureRotationUtil {
20 |
21 | public static final float TEXTURE_NO_ROTATION[] = {
22 | 0.0f, 1.0f,
23 | 1.0f, 1.0f,
24 | 0.0f, 0.0f,
25 | 1.0f, 0.0f,
26 | };
27 |
28 | public static final float TEXTURE_ROTATED_90[] = {
29 | 1.0f, 1.0f,
30 | 1.0f, 0.0f,
31 | 0.0f, 1.0f,
32 | 0.0f, 0.0f,
33 | };
34 | public static final float TEXTURE_ROTATED_180[] = {
35 | 1.0f, 0.0f,
36 | 0.0f, 0.0f,
37 | 1.0f, 1.0f,
38 | 0.0f, 1.0f,
39 | };
40 | public static final float TEXTURE_ROTATED_270[] = {
41 | 0.0f, 0.0f,
42 | 0.0f, 1.0f,
43 | 1.0f, 0.0f,
44 | 1.0f, 1.0f,
45 | };
46 |
47 | public static final float CUBE[] = {
48 | -1.0f, -1.0f,
49 | 1.0f, -1.0f,
50 | -1.0f, 1.0f,
51 | 1.0f, 1.0f,
52 | };
53 |
54 | private TextureRotationUtil() {}
55 |
56 | public static float[] getRotation(final Rotation rotation, final boolean flipHorizontal,
57 | final boolean flipVertical) {
58 | float[] rotatedTex;
59 | switch (rotation) {
60 | case ROTATION_90:
61 | rotatedTex = TEXTURE_ROTATED_90;
62 | break;
63 | case ROTATION_180:
64 | rotatedTex = TEXTURE_ROTATED_180;
65 | break;
66 | case ROTATION_270:
67 | rotatedTex = TEXTURE_ROTATED_270;
68 | break;
69 | case NORMAL:
70 | default:
71 | rotatedTex = TEXTURE_NO_ROTATION;
72 | break;
73 | }
74 | if (flipHorizontal) {
75 | rotatedTex = new float[]{
76 | flip(rotatedTex[0]), rotatedTex[1],
77 | flip(rotatedTex[2]), rotatedTex[3],
78 | flip(rotatedTex[4]), rotatedTex[5],
79 | flip(rotatedTex[6]), rotatedTex[7],
80 | };
81 | }
82 | if (flipVertical) {
83 | rotatedTex = new float[]{
84 | rotatedTex[0], flip(rotatedTex[1]),
85 | rotatedTex[2], flip(rotatedTex[3]),
86 | rotatedTex[4], flip(rotatedTex[5]),
87 | rotatedTex[6], flip(rotatedTex[7]),
88 | };
89 | }
90 | return rotatedTex;
91 | }
92 |
93 |
94 | private static float flip(final float i) {
95 | if (i == 0.0f) {
96 | return 1.0f;
97 | }
98 | return 0.0f;
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/jni/AudioJniUtils.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.jni;
2 |
3 | /**
4 | * Created by cj on 2017/10/11.
5 | * desc
6 | */
7 |
8 | public class AudioJniUtils {
9 |
10 |
11 | static {
12 | System.loadLibrary("native-lib");
13 | }
14 | public static native byte[] audioMix(byte[] sourceA,byte[] sourceB,byte[] dst,float firstVol , float secondVol);
15 |
16 | public static native String putString(String info);
17 |
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/media/MediaCodecInfo.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.media;
2 |
3 | import android.media.MediaExtractor;
4 |
5 | /**
6 | * Created by cj on 2017/7/11.
7 | * desc 音频解码的info类 包含了音频path 音频的MediaExtractor
8 | * 和本段音频的截取点cutPoint
9 | * 以及剪切时长 cutDuration
10 | */
11 |
12 | public class MediaCodecInfo {
13 | public String path;
14 | public MediaExtractor extractor;
15 | public int cutPoint;
16 | public int cutDuration;
17 | public int duration;
18 | }
19 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/media/VideoInfo.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.media;
2 |
3 | import java.io.Serializable;
4 |
5 | /**
6 | * Created by Administrator on 2017/6/29 0029.
7 | * 视频的信息bean
8 | */
9 |
10 | public class VideoInfo implements Serializable{
11 | public String path;//路径
12 | public int rotation;//旋转角度
13 | public int width;//宽
14 | public int height;//高
15 | public int bitRate;//比特率
16 | public int frameRate;//帧率
17 | public int frameInterval;//关键帧间隔
18 | public int duration;//时长
19 |
20 | public int expWidth;//期望宽度
21 | public int expHeight;//期望高度
22 | public int cutPoint;//剪切的开始点
23 | public int cutDuration;//剪切的时长
24 | }
25 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/utils/DateUtils.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.utils;
2 |
3 | import java.text.DateFormat;
4 | import java.text.SimpleDateFormat;
5 | import java.util.Date;
6 |
7 | /**
8 | * Created by cj on 2017/6/28.
9 | * desc
10 | */
11 |
12 | public class DateUtils {
13 |
14 | public static String covertToDate(long duration){
15 | Date date = new Date(duration);
16 | SimpleDateFormat format = new SimpleDateFormat("mm:ss");
17 | return format.format(date);
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/utils/DensityUtils.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.utils;
2 |
3 | import android.content.Context;
4 | import android.util.DisplayMetrics;
5 | import android.util.TypedValue;
6 | import android.view.WindowManager;
7 |
8 | public class DensityUtils {
9 |
10 | private DensityUtils() {
11 | throw new UnsupportedOperationException("cannot be instantiated");
12 | }
13 |
14 | /**
15 | * dp转px
16 | */
17 | public static int dp2px(Context context, float dpVal) {
18 | return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
19 | dpVal, context.getResources().getDisplayMetrics());
20 | }
21 |
22 | /**
23 | * sp转px
24 | */
25 | public static int sp2px(Context context, float spVal) {
26 | return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP,
27 | spVal, context.getResources().getDisplayMetrics());
28 | }
29 |
30 | /**
31 | * px转dp
32 | */
33 | public static float px2dp(Context context, float pxVal) {
34 | final float scale = context.getResources().getDisplayMetrics().density;
35 | return (pxVal / scale);
36 | }
37 |
38 | /**
39 | * px转sp
40 | */
41 | public static float px2sp(Context context, float pxVal) {
42 | return (pxVal / context.getResources().getDisplayMetrics().scaledDensity);
43 | }
44 | /**
45 | * 获取屏幕宽度
46 | * */
47 | public static int getScreenWidth(Context context) {
48 | int width = 0;
49 | WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
50 | DisplayMetrics dm = new DisplayMetrics();
51 | wm.getDefaultDisplay().getMetrics(dm);
52 | width = dm.widthPixels;
53 | return width;
54 | }
55 |
56 | }
57 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/utils/EasyGlUtils.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.utils;
2 |
3 | import android.opengl.GLES20;
4 |
5 | /**
6 | * Description:
7 | */
8 | public enum EasyGlUtils {
9 | ;
10 | EasyGlUtils(){
11 |
12 | }
13 |
14 | public static void useTexParameter(){
15 | //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
16 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
17 | //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
18 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
19 | //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
20 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
21 | //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
22 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
23 | }
24 |
25 | public static void useTexParameter(int gl_wrap_s,int gl_wrap_t,int gl_min_filter,
26 | int gl_mag_filter){
27 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,gl_wrap_s);
28 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,gl_wrap_t);
29 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,gl_min_filter);
30 | GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,gl_mag_filter);
31 | }
32 |
33 | public static void genTexturesWithParameter(int size,int[] textures,int start,
34 | int gl_format,int width,int height){
35 | GLES20.glGenTextures(size, textures, start);
36 | for (int i = 0; i < size; i++) {
37 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
38 | GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0,gl_format, width, height,
39 | 0, gl_format, GLES20.GL_UNSIGNED_BYTE, null);
40 | useTexParameter();
41 | }
42 | GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,0);
43 | }
44 |
45 | public static void bindFrameTexture(int frameBufferId,int textureId){
46 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
47 | GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
48 | GLES20.GL_TEXTURE_2D, textureId, 0);
49 | }
50 |
51 | public static void unBindFrameBuffer(){
52 | GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,0);
53 | }
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/utils/OpenGlUtils.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.utils;
2 |
3 | import android.content.res.Resources;
4 |
5 | import com.example.cj.videoeditor.MyApplication;
6 |
7 | import java.io.InputStream;
8 |
9 | /**
10 | * Created by qqche_000 on 2018/6/3.
11 | * 用于OpenGl的工具类
12 | */
13 |
14 | public class OpenGlUtils {
15 | //通过资源路径加载shader脚本文件
16 | public static String uRes(String path) {
17 | Resources resources = MyApplication.getContext().getResources();
18 | StringBuilder result = new StringBuilder();
19 | try {
20 | InputStream is = resources.getAssets().open(path);
21 | int ch;
22 | byte[] buffer = new byte[1024];
23 | while (-1 != (ch = is.read(buffer))) {
24 | result.append(new String(buffer, 0, ch));
25 | }
26 | } catch (Exception e) {
27 | return null;
28 | }
29 | return result.toString().replaceAll("\\r\\n", "\n");
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/utils/TimeFormatUtils.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.utils;
2 |
3 | /**
4 | * Created by Administrator on 2017/6/30 0030.
5 | */
6 |
7 | public class TimeFormatUtils {
8 | /**
9 | * 00:00:00 时分秒
10 | * @param millisec
11 | * @return
12 | */
13 | public static String formatMillisec(int millisec){
14 | int sec=millisec/1000;
15 | int min=sec/60;
16 | int hour=min/60;
17 | min=min%60;
18 | sec=sec%60;
19 | String t="";
20 | t=hour>=10?t+hour:t+"0"+hour+":";
21 | t=min>=10?t+min:t+"0"+min+":";
22 | t=sec>=10?t+sec:t+"0"+sec;
23 | return t;
24 | }
25 |
26 | /**
27 | * 00:00 分秒
28 | * @param millisec
29 | * @return
30 | */
31 | public static String formatMillisecWithoutHours(int millisec){
32 | int sec=millisec/1000;
33 | int min=sec/60;
34 | sec=sec%60;
35 | String t="";
36 | t=min>=10?t+min:t+"0"+min+":";
37 | t=sec>=10?t+sec:t+"0"+sec;
38 | return t;
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/app/src/main/java/com/example/cj/videoeditor/widget/FocusImageView.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor.widget;
2 |
3 |
4 | import android.content.Context;
5 | import android.content.res.TypedArray;
6 | import android.graphics.Point;
7 | import android.os.Handler;
8 | import android.util.AttributeSet;
9 | import android.view.View;
10 | import android.view.animation.Animation;
11 | import android.view.animation.AnimationUtils;
12 | import android.widget.ImageView;
13 | import android.widget.RelativeLayout;
14 |
15 | import com.example.cj.videoeditor.R;
16 |
17 |
18 | /**
19 | *create by GH 相机对焦控件
20 | */
21 | public class FocusImageView extends ImageView {
22 | public final static String TAG = "FocusImageView";
23 | private static final int NO_ID = -1;
24 | private int mFocusImg = NO_ID;
25 | private int mFocusSucceedImg = NO_ID;
26 | private int mFocusFailedImg = NO_ID;
27 | private Animation mAnimation;
28 | private Handler mHandler;
29 |
30 | public FocusImageView(Context context) {
31 | super(context);
32 | mAnimation = AnimationUtils.loadAnimation(getContext(), R.anim.focusview_show);
33 | setVisibility(View.GONE);
34 | mHandler = new Handler();
35 | }
36 |
37 | public FocusImageView(Context context, AttributeSet attrs) {
38 | super(context, attrs);
39 | mAnimation = AnimationUtils.loadAnimation(getContext(), R.anim.focusview_show);
40 | mHandler = new Handler();
41 |
42 | TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.FocusImageView);
43 | mFocusImg = a.getResourceId(R.styleable.FocusImageView_focus_focusing_id, NO_ID);
44 | mFocusSucceedImg = a.getResourceId(R.styleable.FocusImageView_focus_success_id, NO_ID);
45 | mFocusFailedImg = a.getResourceId(R.styleable.FocusImageView_focus_fail_id, NO_ID);
46 | a.recycle();
47 |
48 | }
49 |
50 | public void startFocus(Point point) {
51 | RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) getLayoutParams();
52 | params.topMargin = point.y - getHeight() / 2;
53 | params.leftMargin = point.x - getWidth() / 2;
54 | setLayoutParams(params);
55 | setVisibility(View.VISIBLE);
56 | setImageResource(mFocusImg);
57 | startAnimation(mAnimation);
58 | mHandler.postDelayed(new Runnable() {
59 | @Override
60 | public void run() {
61 | setVisibility(View.GONE);
62 | }
63 | }, 3500);
64 | }
65 |
66 | public void onFocusSuccess() {
67 | setImageResource(mFocusSucceedImg);
68 | mHandler.removeCallbacks(null, null);
69 | mHandler.postDelayed(new Runnable() {
70 | @Override
71 | public void run() {
72 | setVisibility(View.GONE);
73 | }
74 | }, 1000);
75 |
76 | }
77 |
78 | public void onFocusFailed() {
79 | setImageResource(mFocusFailedImg);
80 | mHandler.removeCallbacks(null, null);
81 | mHandler.postDelayed(new Runnable() {
82 | @Override
83 | public void run() {
84 | setVisibility(View.GONE);
85 | }
86 | }, 1000);
87 | }
88 |
89 | public void setFocusImg(int focus) {
90 | this.mFocusImg = focus;
91 | }
92 |
93 | public void setFocusSucceedImg(int focusSucceed) {
94 | this.mFocusSucceedImg = focusSucceed;
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/app/src/main/res/anim/focusview_show.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
17 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/app_back_selector.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/bg_circle_yellow.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
7 |
8 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/bt_capture.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | -
4 |
5 |
-
6 |
7 |
8 |
9 |
10 | -
12 |
13 |
14 |
15 |
16 |
17 |
18 | -
19 |
20 |
-
21 |
22 |
23 |
24 |
25 | -
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/btn_camera_beauty.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/btn_camera_filter.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/selector_beauty.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/selector_btn_image_choose.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/selector_rewardcamera.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/selector_title_bar_btn.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | -
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/setting_text_color_selector.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/t_progressbar.xml:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/toast_shape.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_audio.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
12 |
19 |
26 |
33 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_audio_mix.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
15 |
22 |
28 |
35 |
36 |
43 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_audio_preview.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
11 |
12 |
15 |
16 |
21 |
22 |
30 |
31 |
32 |
39 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_connect.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
12 |
13 |
14 |
17 |
18 |
25 |
31 |
32 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
15 |
16 |
23 |
30 |
37 |
38 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_media_select_video.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
14 |
15 |
22 |
23 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_music_select.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
11 |
18 |
24 |
25 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_recorde.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
11 |
14 |
22 |
29 |
36 |
42 |
49 |
50 |
51 |
59 |
60 |
61 |
62 |
63 |
64 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_video_connect.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
16 |
17 |
24 |
25 |
33 |
34 |
41 |
42 |
49 |
50 |
51 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_video_preview.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
11 |
12 |
15 |
16 |
21 |
22 |
30 |
31 |
32 |
39 |
45 |
51 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_video_select.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
11 |
17 |
25 |
26 |
27 |
28 |
35 |
36 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/item_media_audio.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
13 |
14 |
24 |
29 |
38 |
47 |
48 |
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/item_media_video.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
29 |
30 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/item_video_select.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
11 |
22 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/t_loading.xml:
--------------------------------------------------------------------------------
1 |
2 |
10 |
11 |
19 |
20 |
27 |
28 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/btn_back_n.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/btn_back_n.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/btn_back_p.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/btn_back_p.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/btn_back_white_n.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/btn_back_white_n.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/editor_beauty_normal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/editor_beauty_normal.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/editor_beauty_pressed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/editor_beauty_pressed.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/editor_img_confirm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/editor_img_confirm.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/editor_img_def_video.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/editor_img_def_video.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/editor_record_back.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/editor_record_back.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/editor_record_close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/editor_record_close.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/focus_focus_failed.9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/focus_focus_failed.9.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/focus_focused.9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/focus_focused.9.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/focus_focusing.9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/focus_focusing.9.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_camera_beauty.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/ic_camera_beauty.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_camera_beauty_pressed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/ic_camera_beauty_pressed.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_camera_filter.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/ic_camera_filter.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/icon_choice_nor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/icon_choice_nor.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/icon_choice_selected.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/icon_choice_selected.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/img_aac.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/img_aac.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/img_mp3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/img_mp3.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/img_wma.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/img_wma.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/record_camera_switch_normal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/record_camera_switch_normal.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/record_camera_switch_press.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/record_camera_switch_press.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/t_loading_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/t_loading_2.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/watermark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxhdpi/watermark.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/raw/amaro.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //blowout;
7 | uniform sampler2D inputImageTexture3; //overlay;
8 | uniform sampler2D inputImageTexture4; //map
9 |
10 | uniform float strength;
11 |
12 | void main()
13 | {
14 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
15 | vec4 texel = texture2D(inputImageTexture, textureCoordinate);
16 | vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
17 |
18 | texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
19 | texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
20 | texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
21 |
22 | vec4 mapped;
23 | mapped.r = texture2D(inputImageTexture4, vec2(texel.r, .16666)).r;
24 | mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
25 | mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
26 | mapped.a = 1.0;
27 |
28 | mapped.rgb = mix(originColor.rgb, mapped.rgb, strength);
29 |
30 | gl_FragColor = mapped;
31 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/antique.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 |
3 | precision highp float;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D curve;
7 |
8 | void main()
9 | {
10 | highp vec4 textureColor;
11 | highp vec4 textureColorRes;
12 | highp float satVal = 65.0 / 100.0;
13 |
14 | float xCoordinate = textureCoordinate.x;
15 | float yCoordinate = textureCoordinate.y;
16 |
17 | highp float redCurveValue;
18 | highp float greenCurveValue;
19 | highp float blueCurveValue;
20 |
21 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
22 | textureColorRes = textureColor;
23 |
24 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
25 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
26 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
27 |
28 | highp float G = (redCurveValue + greenCurveValue + blueCurveValue);
29 | G = G / 3.0;
30 |
31 | redCurveValue = ((1.0 - satVal) * G + satVal * redCurveValue);
32 | greenCurveValue = ((1.0 - satVal) * G + satVal * greenCurveValue);
33 | blueCurveValue = ((1.0 - satVal) * G + satVal * blueCurveValue);
34 | redCurveValue = (((redCurveValue) > (1.0)) ? (1.0) : (((redCurveValue) < (0.0)) ? (0.0) : (redCurveValue)));
35 | greenCurveValue = (((greenCurveValue) > (1.0)) ? (1.0) : (((greenCurveValue) < (0.0)) ? (0.0) : (greenCurveValue)));
36 | blueCurveValue = (((blueCurveValue) > (1.0)) ? (1.0) : (((blueCurveValue) < (0.0)) ? (0.0) : (blueCurveValue)));
37 |
38 | redCurveValue = texture2D(curve, vec2(redCurveValue, 0.0)).a;
39 | greenCurveValue = texture2D(curve, vec2(greenCurveValue, 0.0)).a;
40 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 0.0)).a;
41 |
42 | highp vec4 base = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
43 | highp vec4 overlayer = vec4(250.0/255.0, 227.0/255.0, 193.0/255.0, 1.0);
44 |
45 | textureColor = overlayer * base;
46 | base = (textureColor - base) * 0.850980 + base;
47 | textureColor = base;
48 |
49 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
50 | }
51 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/blackcat.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision highp float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D curve;
6 | vec3 rgb2hsv(vec3 c)
7 | {
8 | vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
9 | vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
10 | vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
11 |
12 | float d = q.x - min(q.w, q.y);
13 | float e = 1.0e-10;
14 | return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
15 | }
16 |
17 | vec3 hsv2rgb(vec3 c) {
18 | vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
19 | vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
20 | return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
21 | }
22 |
23 | void main()
24 | {
25 | float GreyVal;
26 | lowp vec4 textureColor;
27 | lowp vec4 textureColorOri;
28 | float xCoordinate = textureCoordinate.x;
29 | float yCoordinate = textureCoordinate.y;
30 |
31 | highp float redCurveValue;
32 | highp float greenCurveValue;
33 | highp float blueCurveValue;
34 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
35 | // step1 curve
36 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
37 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
38 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
39 |
40 |
41 | //textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
42 | vec3 tColor = vec3(redCurveValue, greenCurveValue, blueCurveValue);
43 | tColor = rgb2hsv(tColor);
44 |
45 | tColor.g = tColor.g * 1.2;
46 |
47 | float dStrength = 1.0;
48 | float dSatStrength = 0.3;
49 |
50 | float dGap = 0.0;
51 |
52 | if( tColor.r >= 0.0 && tColor.r < 0.417)
53 | {
54 | tColor.g = tColor.g + (tColor.g * dSatStrength);
55 | }
56 | else if( tColor.r > 0.958 && tColor.r <= 1.0)
57 | {
58 | tColor.g = tColor.g + (tColor.g * dSatStrength);
59 | }
60 | else if( tColor.r >= 0.875 && tColor.r <= 0.958)
61 | {
62 | dGap = abs(tColor.r - 0.875);
63 | dStrength = (dGap / 0.0833);
64 |
65 | tColor.g = tColor.g + (tColor.g * dSatStrength * dStrength);
66 | }
67 | else if( tColor.r >= 0.0417 && tColor.r <= 0.125)
68 | {
69 | dGap = abs(tColor.r - 0.125);
70 | dStrength = (dGap / 0.0833);
71 |
72 | tColor.g = tColor.g + (tColor.g * dSatStrength * dStrength);
73 | }
74 |
75 | tColor = hsv2rgb(tColor);
76 | tColor = clamp(tColor, 0.0, 1.0);
77 |
78 | redCurveValue = texture2D(curve, vec2(tColor.r, 1.0)).r;
79 | greenCurveValue = texture2D(curve, vec2(tColor.g, 1.0)).r;
80 | blueCurveValue = texture2D(curve, vec2(tColor.b, 1.0)).r;
81 |
82 | redCurveValue = texture2D(curve, vec2(redCurveValue, 1.0)).g;
83 | greenCurveValue = texture2D(curve, vec2(greenCurveValue, 1.0)).g;
84 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 1.0)).g;
85 |
86 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
87 |
88 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
89 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/brannan.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //process
7 | uniform sampler2D inputImageTexture3; //blowout
8 | uniform sampler2D inputImageTexture4; //contrast
9 | uniform sampler2D inputImageTexture5; //luma
10 | uniform sampler2D inputImageTexture6; //screen
11 |
12 | mat3 saturateMatrix = mat3(
13 | 1.105150, -0.044850,-0.046000,
14 | -0.088050,1.061950,-0.089200,
15 | -0.017100,-0.017100,1.132900);
16 |
17 | vec3 luma = vec3(.3, .59, .11);
18 |
19 | uniform float strength;
20 |
21 | void main()
22 | {
23 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
24 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
25 |
26 | vec2 lookup;
27 | lookup.y = 0.5;
28 | lookup.x = texel.r;
29 | texel.r = texture2D(inputImageTexture2, lookup).r;
30 | lookup.x = texel.g;
31 | texel.g = texture2D(inputImageTexture2, lookup).g;
32 | lookup.x = texel.b;
33 | texel.b = texture2D(inputImageTexture2, lookup).b;
34 |
35 | texel = saturateMatrix * texel;
36 |
37 |
38 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
39 | float d = dot(tc, tc);
40 | vec3 sampled;
41 | lookup.y = 0.5;
42 | lookup.x = texel.r;
43 | sampled.r = texture2D(inputImageTexture3, lookup).r;
44 | lookup.x = texel.g;
45 | sampled.g = texture2D(inputImageTexture3, lookup).g;
46 | lookup.x = texel.b;
47 | sampled.b = texture2D(inputImageTexture3, lookup).b;
48 | float value = smoothstep(0.0, 1.0, d);
49 | texel = mix(sampled, texel, value);
50 |
51 | lookup.x = texel.r;
52 | texel.r = texture2D(inputImageTexture4, lookup).r;
53 | lookup.x = texel.g;
54 | texel.g = texture2D(inputImageTexture4, lookup).g;
55 | lookup.x = texel.b;
56 | texel.b = texture2D(inputImageTexture4, lookup).b;
57 |
58 |
59 | lookup.x = dot(texel, luma);
60 | texel = mix(texture2D(inputImageTexture5, lookup).rgb, texel, .5);
61 |
62 | lookup.x = texel.r;
63 | texel.r = texture2D(inputImageTexture6, lookup).r;
64 | lookup.x = texel.g;
65 | texel.g = texture2D(inputImageTexture6, lookup).g;
66 | lookup.x = texel.b;
67 | texel.b = texture2D(inputImageTexture6, lookup).b;
68 |
69 | texel = mix(originColor.rgb, texel.rgb, strength);
70 |
71 | gl_FragColor = vec4(texel, 1.0);
72 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/calm.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 |
3 | precision highp float;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D grey1Frame;
7 | uniform sampler2D grey2Frame;
8 | uniform sampler2D curve;
9 |
10 | const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
11 |
12 | void main()
13 | {
14 | lowp float satura = 0.5;
15 | float GreyVal;
16 | lowp vec4 textureColor;
17 | lowp vec4 textureColorRes;
18 |
19 | highp float redCurveValue;
20 | highp float greenCurveValue;
21 | highp float blueCurveValue;
22 |
23 | vec4 grey1Color;
24 | vec4 grey2Color;
25 |
26 | float xCoordinate = textureCoordinate.x;
27 | float yCoordinate = textureCoordinate.y;
28 |
29 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
30 | textureColorRes = textureColor;
31 |
32 | grey1Color = texture2D(grey1Frame, vec2(xCoordinate, yCoordinate));
33 | grey2Color = texture2D(grey2Frame, vec2(xCoordinate, yCoordinate));
34 |
35 | // step 1. saturation
36 | lowp float luminance = dot(textureColor.rgb, luminanceWeighting);
37 | lowp vec3 greyScaleColor = vec3(luminance);
38 |
39 | textureColor = vec4(mix(greyScaleColor, textureColor.rgb, satura), textureColor.w);
40 |
41 | // step 2. level, blur curve, rgb curve
42 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
43 | redCurveValue = texture2D(curve, vec2(redCurveValue, 1.0/2.0)).r;
44 |
45 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
46 | greenCurveValue = texture2D(curve, vec2(greenCurveValue, 1.0/2.0)).g;
47 |
48 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
49 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 1.0/2.0)).b;
50 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 1.0/2.0)).g;
51 |
52 | lowp vec4 base = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
53 |
54 | redCurveValue = texture2D(curve, vec2(redCurveValue, 1.0)).r;
55 | greenCurveValue = texture2D(curve, vec2(greenCurveValue, 1.0)).r;
56 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 1.0)).r;
57 | lowp vec4 overlayer = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
58 | //gl_FragColor = base * (1.0 - grey1Color.r) + overlayer * grey1Color.r;
59 | base = (base - overlayer) * (1.0 - grey1Color.r) + overlayer;
60 |
61 | redCurveValue = texture2D(curve, vec2(base.r, 1.0)).g;
62 | greenCurveValue = texture2D(curve, vec2(base.g, 1.0)).g;
63 | blueCurveValue = texture2D(curve, vec2(base.b, 1.0)).g;
64 | overlayer = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
65 |
66 | textureColor = (base - overlayer) * (1.0 - grey2Color.r) + overlayer;
67 | //base * (grey2Color.r) + overlayer * (1.0 - grey2Color.r);
68 |
69 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
70 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/cool.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision highp float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D curve;
6 |
7 | void main()
8 | {
9 | lowp vec4 textureColor;
10 | lowp vec4 textureColorOri;
11 |
12 | float xCoordinate = textureCoordinate.x;
13 | float yCoordinate = textureCoordinate.y;
14 |
15 | highp float redCurveValue;
16 | highp float greenCurveValue;
17 | highp float blueCurveValue;
18 |
19 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
20 | textureColorOri = textureColor;
21 | // step1 curve
22 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
23 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
24 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
25 | // step2 level
26 | redCurveValue = texture2D(curve, vec2(redCurveValue, 0.0)).a;
27 | greenCurveValue = texture2D(curve, vec2(greenCurveValue, 0.0)).a;
28 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 0.0)).a;
29 | // step3 brightness/constrast adjust
30 | redCurveValue = redCurveValue * 1.25 - 0.12549;
31 | greenCurveValue = greenCurveValue * 1.25 - 0.12549;
32 | blueCurveValue = blueCurveValue * 1.25 - 0.12549;
33 | //redCurveValue = (((redCurveValue) > (1.0)) ? (1.0) : (((redCurveValue) < (0.0)) ? (0.0) : (redCurveValue)));
34 | //greenCurveValue = (((greenCurveValue) > (1.0)) ? (1.0) : (((greenCurveValue) < (0.0)) ? (0.0) : (greenCurveValue)));
35 | //blueCurveValue = (((blueCurveValue) > (1.0)) ? (1.0) : (((blueCurveValue) < (0.0)) ? (0.0) : (blueCurveValue)));
36 | // step4 normal blending with original
37 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
38 | textureColor = (textureColorOri - textureColor) * 0.549 + textureColor;
39 |
40 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
41 | }
42 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/crayon.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision mediump float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform vec2 singleStepOffset;
6 | uniform float strength;
7 |
8 | const highp vec3 W = vec3(0.299,0.587,0.114);
9 |
10 | const mat3 rgb2yiqMatrix = mat3(
11 | 0.299, 0.587, 0.114,
12 | 0.596,-0.275,-0.321,
13 | 0.212,-0.523, 0.311);
14 |
15 | const mat3 yiq2rgbMatrix = mat3(
16 | 1.0, 0.956, 0.621,
17 | 1.0,-0.272,-1.703,
18 | 1.0,-1.106, 0.0);
19 |
20 |
21 | void main()
22 | {
23 | vec4 oralColor = texture2D(inputImageTexture, textureCoordinate);
24 |
25 | vec3 maxValue = vec3(0.,0.,0.);
26 |
27 | for(int i = -2; i<=2; i++)
28 | {
29 | for(int j = -2; j<=2; j++)
30 | {
31 | vec4 tempColor = texture2D(inputImageTexture, textureCoordinate+singleStepOffset*vec2(i,j));
32 | maxValue.r = max(maxValue.r,tempColor.r);
33 | maxValue.g = max(maxValue.g,tempColor.g);
34 | maxValue.b = max(maxValue.b,tempColor.b);
35 | }
36 | }
37 |
38 | vec3 textureColor = oralColor.rgb / maxValue;
39 |
40 | float gray = dot(textureColor, W);
41 | float k = 0.223529;
42 | float alpha = min(gray,k)/k;
43 |
44 | textureColor = textureColor * alpha + (1.-alpha)*oralColor.rgb;
45 |
46 | vec3 yiqColor = textureColor * rgb2yiqMatrix;
47 |
48 | yiqColor.r = max(0.0,min(1.0,pow(gray,strength)));
49 |
50 | textureColor = yiqColor * yiq2rgbMatrix;
51 |
52 | gl_FragColor = vec4(textureColor, oralColor.w);
53 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/default_vertex.glsl:
--------------------------------------------------------------------------------
1 | attribute vec4 position;
2 | attribute vec4 inputTextureCoordinate;
3 |
4 | uniform mat4 textureTransform;
5 | varying vec2 textureCoordinate;
6 |
7 | void main()
8 | {
9 | textureCoordinate = (textureTransform * inputTextureCoordinate).xy;
10 | gl_Position = position;
11 | }
12 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/earlybird.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //earlyBirdCurves
7 | uniform sampler2D inputImageTexture3; //earlyBirdOverlay
8 | uniform sampler2D inputImageTexture4; //vig
9 | uniform sampler2D inputImageTexture5; //earlyBirdBlowout
10 | uniform sampler2D inputImageTexture6; //earlyBirdMap
11 |
12 | const mat3 saturate = mat3(
13 | 1.210300,
14 | -0.089700,
15 | -0.091000,
16 | -0.176100,
17 | 1.123900,
18 | -0.177400,
19 | -0.034200,
20 | -0.034200,
21 | 1.265800);
22 | const vec3 rgbPrime = vec3(0.25098, 0.14640522, 0.0);
23 | const vec3 desaturate = vec3(.3, .59, .11);
24 |
25 | void main()
26 | {
27 |
28 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
29 |
30 |
31 | vec2 lookup;
32 | lookup.y = 0.5;
33 |
34 | lookup.x = texel.r;
35 | texel.r = texture2D(inputImageTexture2, lookup).r;
36 |
37 | lookup.x = texel.g;
38 | texel.g = texture2D(inputImageTexture2, lookup).g;
39 |
40 | lookup.x = texel.b;
41 | texel.b = texture2D(inputImageTexture2, lookup).b;
42 |
43 | float desaturatedColor;
44 | vec3 result;
45 | desaturatedColor = dot(desaturate, texel);
46 |
47 |
48 | lookup.x = desaturatedColor;
49 | result.r = texture2D(inputImageTexture3, lookup).r;
50 | lookup.x = desaturatedColor;
51 | result.g = texture2D(inputImageTexture3, lookup).g;
52 | lookup.x = desaturatedColor;
53 | result.b = texture2D(inputImageTexture3, lookup).b;
54 |
55 | texel = saturate * mix(texel, result, .5);
56 |
57 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
58 | float d = dot(tc, tc);
59 |
60 | vec3 sampled;
61 | lookup.y = .5;
62 |
63 | /*
64 | lookup.x = texel.r;
65 | sampled.r = texture2D(inputImageTexture4, lookup).r;
66 |
67 | lookup.x = texel.g;
68 | sampled.g = texture2D(inputImageTexture4, lookup).g;
69 |
70 | lookup.x = texel.b;
71 | sampled.b = texture2D(inputImageTexture4, lookup).b;
72 |
73 | float value = smoothstep(0.0, 1.25, pow(d, 1.35)/1.65);
74 | texel = mix(texel, sampled, value);
75 | */
76 |
77 | //---
78 |
79 | lookup = vec2(d, texel.r);
80 | texel.r = texture2D(inputImageTexture4, lookup).r;
81 | lookup.y = texel.g;
82 | texel.g = texture2D(inputImageTexture4, lookup).g;
83 | lookup.y = texel.b;
84 | texel.b = texture2D(inputImageTexture4, lookup).b;
85 | float value = smoothstep(0.0, 1.25, pow(d, 1.35)/1.65);
86 |
87 | //---
88 |
89 | lookup.x = texel.r;
90 | sampled.r = texture2D(inputImageTexture5, lookup).r;
91 | lookup.x = texel.g;
92 | sampled.g = texture2D(inputImageTexture5, lookup).g;
93 | lookup.x = texel.b;
94 | sampled.b = texture2D(inputImageTexture5, lookup).b;
95 | texel = mix(sampled, texel, value);
96 |
97 |
98 | lookup.x = texel.r;
99 | texel.r = texture2D(inputImageTexture6, lookup).r;
100 | lookup.x = texel.g;
101 | texel.g = texture2D(inputImageTexture6, lookup).g;
102 | lookup.x = texel.b;
103 | texel.b = texture2D(inputImageTexture6, lookup).b;
104 |
105 | gl_FragColor = vec4(texel, 1.0);
106 | }
107 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/emerald.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision highp float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D curve;
6 |
7 | vec3 RGBtoHSL(vec3 c) {
8 | vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
9 | vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
10 | vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
11 |
12 | float d = q.x - min(q.w, q.y);
13 | float e = 1.0e-10;
14 | return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
15 | }
16 |
17 | vec3 HSLtoRGB(vec3 c) {
18 | vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
19 | vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
20 | return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
21 | }
22 |
23 | void main() {
24 | float GreyVal;
25 | highp vec4 textureColor;
26 | float xCoordinate = textureCoordinate.x;
27 | float yCoordinate = textureCoordinate.y;
28 |
29 | highp float redCurveValue;
30 | highp float greenCurveValue;
31 | highp float blueCurveValue;
32 |
33 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
34 |
35 | // step1 curve
36 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
37 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
38 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
39 | vec3 tColor = vec3(redCurveValue, greenCurveValue, blueCurveValue);
40 | tColor = RGBtoHSL(tColor);
41 | tColor = clamp(tColor, 0.0, 1.0);
42 |
43 | tColor.g = tColor.g * 1.5;
44 |
45 | float dStrength = 1.0;
46 | float dSatStrength = 0.15;
47 | float dHueStrength = 0.08;
48 |
49 | float dGap = 0.0;
50 |
51 | if( tColor.r >= 0.625 && tColor.r <= 0.708)
52 | {
53 | tColor.r = tColor.r - (tColor.r * dHueStrength);
54 | tColor.g = tColor.g + (tColor.g * dSatStrength);
55 | }
56 | else if( tColor.r >= 0.542 && tColor.r < 0.625)
57 | {
58 | dGap = abs(tColor.r - 0.542);
59 | dStrength = (dGap / 0.0833);
60 |
61 | tColor.r = tColor.r + (tColor.r * dHueStrength * dStrength);
62 | tColor.g = tColor.g + (tColor.g * dSatStrength * dStrength);
63 | }
64 | else if( tColor.r > 0.708 && tColor.r <= 0.792)
65 | {
66 | dGap = abs(tColor.r - 0.792);
67 | dStrength = (dGap / 0.0833);
68 |
69 | tColor.r = tColor.r + (tColor.r * dHueStrength * dStrength);
70 | tColor.g = tColor.g + (tColor.g * dSatStrength * dStrength);
71 | }
72 |
73 | tColor = HSLtoRGB(tColor);
74 | tColor = clamp(tColor, 0.0, 1.0);
75 |
76 | redCurveValue = texture2D(curve, vec2(tColor.r, 1.0)).r;
77 | greenCurveValue = texture2D(curve, vec2(tColor.g, 1.0)).r;
78 | blueCurveValue = texture2D(curve, vec2(tColor.b, 1.0)).r;
79 |
80 | redCurveValue = texture2D(curve, vec2(redCurveValue, 1.0)).g;
81 | greenCurveValue = texture2D(curve, vec2(greenCurveValue, 1.0)).g;
82 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 1.0)).g;
83 |
84 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
85 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
86 | }
87 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/evergreen.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision highp float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D curve;
6 |
7 | vec3 RGBtoHSL(vec3 c)
8 | {
9 | vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
10 | vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
11 | vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
12 |
13 | float d = q.x - min(q.w, q.y);
14 | float e = 1.0e-10;
15 | return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
16 | }
17 |
18 | vec3 HSLtoRGB(vec3 c)
19 | {
20 | vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
21 | vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
22 | return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
23 | }
24 |
25 | void main()
26 | {
27 | float GreyVal;
28 | lowp vec4 textureColor;
29 | float xCoordinate = textureCoordinate.x;
30 | float yCoordinate = textureCoordinate.y;
31 |
32 | highp float redCurveValue;
33 | highp float greenCurveValue;
34 | highp float blueCurveValue;
35 |
36 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
37 |
38 | vec3 tColor = vec3(textureColor.r, textureColor.g, textureColor.b);
39 |
40 | tColor = RGBtoHSL(tColor);
41 | tColor = clamp(tColor, 0.0, 1.0);
42 |
43 |
44 | tColor.g = tColor.g * 1.3;
45 |
46 | float dStrength = 1.0;
47 | float dSatStrength = 0.5;
48 | float dGap = 0.0;
49 |
50 |
51 | if( tColor.r >= 0.292 && tColor.r <= 0.375)
52 | {
53 | tColor.g = tColor.g + (tColor.g * dSatStrength);
54 | }
55 | else if( tColor.r >= 0.208 && tColor.r < 0.292)
56 | {
57 | dGap = abs(tColor.r - 0.208);
58 | dStrength = (dGap / 0.0833);
59 |
60 | tColor.g = tColor.g + (tColor.g * dSatStrength * dStrength);
61 | }
62 | else if( tColor.r > 0.375 && tColor.r <= 0.458)
63 | {
64 | dGap = abs(tColor.r - 0.458);
65 | dStrength = (dGap / 0.0833);
66 |
67 | tColor.g = tColor.g + (tColor.g * dSatStrength * dStrength);
68 | }
69 | tColor = HSLtoRGB(tColor);
70 | tColor = clamp(tColor, 0.0, 1.0);
71 |
72 | redCurveValue = texture2D(curve, vec2(tColor.r, 0.0)).b;
73 | greenCurveValue = texture2D(curve, vec2(tColor.g, 0.0)).b;
74 | blueCurveValue = texture2D(curve, vec2(tColor.b, 0.0)).b;
75 | redCurveValue = texture2D(curve, vec2(redCurveValue, 0.0)).r;
76 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 0.0)).g;
77 |
78 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
79 |
80 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
81 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/hefe.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //edgeBurn
7 | uniform sampler2D inputImageTexture3; //hefeMap
8 | uniform sampler2D inputImageTexture4; //hefeGradientMap
9 | uniform sampler2D inputImageTexture5; //hefeSoftLight
10 | uniform sampler2D inputImageTexture6; //hefeMetal
11 |
12 | uniform float strength;
13 |
14 | void main()
15 | {
16 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
17 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
18 | vec3 edge = texture2D(inputImageTexture2, textureCoordinate).rgb;
19 | texel = texel * edge;
20 |
21 | texel = vec3(
22 | texture2D(inputImageTexture3, vec2(texel.r, .16666)).r,
23 | texture2D(inputImageTexture3, vec2(texel.g, .5)).g,
24 | texture2D(inputImageTexture3, vec2(texel.b, .83333)).b);
25 |
26 | vec3 luma = vec3(.30, .59, .11);
27 | vec3 gradSample = texture2D(inputImageTexture4, vec2(dot(luma, texel), .5)).rgb;
28 | vec3 final = vec3(
29 | texture2D(inputImageTexture5, vec2(gradSample.r, texel.r)).r,
30 | texture2D(inputImageTexture5, vec2(gradSample.g, texel.g)).g,
31 | texture2D(inputImageTexture5, vec2(gradSample.b, texel.b)).b
32 | );
33 |
34 | vec3 metal = texture2D(inputImageTexture6, textureCoordinate).rgb;
35 | vec3 metaled = vec3(
36 | texture2D(inputImageTexture5, vec2(metal.r, texel.r)).r,
37 | texture2D(inputImageTexture5, vec2(metal.g, texel.g)).g,
38 | texture2D(inputImageTexture5, vec2(metal.b, texel.b)).b
39 | );
40 |
41 | metaled.rgb = mix(originColor.rgb, metaled.rgb, strength);
42 |
43 | gl_FragColor = vec4(metaled, 1.0);
44 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/hudson.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //blowout;
7 | uniform sampler2D inputImageTexture3; //overlay;
8 | uniform sampler2D inputImageTexture4; //map
9 |
10 | uniform float strength;
11 |
12 | void main()
13 | {
14 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
15 |
16 | vec4 texel = texture2D(inputImageTexture, textureCoordinate);
17 |
18 | vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
19 |
20 | texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
21 | texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
22 | texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
23 |
24 | vec4 mapped;
25 | mapped.r = texture2D(inputImageTexture4, vec2(texel.r, .16666)).r;
26 | mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
27 | mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
28 | mapped.a = 1.0;
29 |
30 | mapped.rgb = mix(originColor.rgb, mapped.rgb, strength);
31 |
32 | gl_FragColor = mapped;
33 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/inkwell.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2;
7 |
8 | void main()
9 | {
10 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
11 | texel = vec3(dot(vec3(0.3, 0.6, 0.1), texel));
12 | texel = vec3(texture2D(inputImageTexture2, vec2(texel.r, .16666)).r);
13 | gl_FragColor = vec4(texel, 1.0);
14 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/kevin_new.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2;
7 |
8 | void main()
9 | {
10 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
11 |
12 | vec2 lookup;
13 | lookup.y = .5;
14 |
15 | lookup.x = texel.r;
16 | texel.r = texture2D(inputImageTexture2, lookup).r;
17 |
18 | lookup.x = texel.g;
19 | texel.g = texture2D(inputImageTexture2, lookup).g;
20 |
21 | lookup.x = texel.b;
22 | texel.b = texture2D(inputImageTexture2, lookup).b;
23 |
24 | gl_FragColor = vec4(texel, 1.0);
25 | }
26 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/lomo.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2;
7 | uniform sampler2D inputImageTexture3;
8 |
9 | uniform float strength;
10 |
11 | void main()
12 | {
13 | vec4 originColor = vec4(0.2,0.6,0.9,1.0);
14 |
15 | vec3 texel;
16 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
17 | float d = dot(tc, tc);
18 | vec2 lookup = vec2(d, originColor.r);
19 | texel.r = texture2D(inputImageTexture3, lookup).r;
20 | lookup.y = originColor.g;
21 | texel.g = texture2D(inputImageTexture3, lookup).g;
22 | lookup.y = originColor.b;
23 | texel.b = texture2D(inputImageTexture3, lookup).b;
24 |
25 | texel.rgb = mix(originColor.rgb, texel.rgb, strength);
26 |
27 | gl_FragColor = vec4(texel,1.0);
28 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/n1977.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2;
7 |
8 | void main()
9 | {
10 |
11 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
12 |
13 | texel = vec3(
14 | texture2D(inputImageTexture2, vec2(texel.r, .16666)).r,
15 | texture2D(inputImageTexture2, vec2(texel.g, .5)).g,
16 | texture2D(inputImageTexture2, vec2(texel.b, .83333)).b);
17 |
18 | gl_FragColor = vec4(texel, 1.0);
19 | }
20 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/nashville.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2;
7 |
8 | void main()
9 | {
10 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
11 | texel = vec3(
12 | texture2D(inputImageTexture2, vec2(texel.r, .16666)).r,
13 | texture2D(inputImageTexture2, vec2(texel.g, .5)).g,
14 | texture2D(inputImageTexture2, vec2(texel.b, .83333)).b);
15 | gl_FragColor = vec4(texel, 1.0);
16 | }
17 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/rise.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //blowout;
7 | uniform sampler2D inputImageTexture3; //overlay;
8 | uniform sampler2D inputImageTexture4; //map
9 |
10 | uniform float strength;
11 |
12 | void main()
13 | {
14 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
15 | vec4 texel = texture2D(inputImageTexture, textureCoordinate);
16 | vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
17 |
18 | texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
19 | texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
20 | texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
21 |
22 | vec4 mapped;
23 | mapped.r = texture2D(inputImageTexture4, vec2(texel.r, .16666)).r;
24 | mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
25 | mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
26 | mapped.a = 1.0;
27 |
28 | mapped.rgb = mix(originColor.rgb, mapped.rgb, strength);
29 |
30 | gl_FragColor = mapped;
31 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/romance.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision highp float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D curve;
6 |
7 | void main()
8 | {
9 | lowp vec4 textureColor;
10 | lowp vec4 textureColorRes;
11 | lowp vec4 textureColorOri;
12 | vec4 grey1Color;
13 | vec4 layerColor;
14 | mediump float satVal = 115.0 / 100.0;
15 |
16 | float xCoordinate = textureCoordinate.x;
17 | float yCoordinate = textureCoordinate.y;
18 |
19 | highp float redCurveValue;
20 | highp float greenCurveValue;
21 | highp float blueCurveValue;
22 |
23 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
24 | textureColorRes = textureColor;
25 | textureColorOri = textureColor;
26 |
27 | // step1. screen blending
28 | textureColor = 1.0 - ((1.0 - textureColorOri) * (1.0 - textureColorOri));
29 | textureColor = (textureColor - textureColorOri) + textureColorOri;
30 |
31 | // step2. curve
32 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
33 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
34 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
35 |
36 | // step3. saturation
37 | highp float G = (redCurveValue + greenCurveValue + blueCurveValue);
38 | G = G / 3.0;
39 |
40 | redCurveValue = ((1.0 - satVal) * G + satVal * redCurveValue);
41 | greenCurveValue = ((1.0 - satVal) * G + satVal * greenCurveValue);
42 | blueCurveValue = ((1.0 - satVal) * G + satVal * blueCurveValue);
43 |
44 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
45 |
46 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
47 | }
48 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/sakura.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | uniform sampler2D inputImageTexture;
4 | uniform sampler2D curve;
5 | uniform float texelWidthOffset;
6 | uniform float texelHeightOffset;
7 |
8 | varying mediump vec2 textureCoordinate;
9 |
10 | vec4 gaussianBlur(sampler2D sampler) {
11 | lowp float strength = 1.;
12 | vec4 color = vec4(0.);
13 | vec2 step = vec2(0.);
14 |
15 | color += texture2D(sampler,textureCoordinate)* 0.25449 ;
16 |
17 | step.x = 1.37754 * texelWidthOffset * strength;
18 | step.y = 1.37754 * texelHeightOffset * strength;
19 | color += texture2D(sampler,textureCoordinate+step) * 0.24797;
20 | color += texture2D(sampler,textureCoordinate-step) * 0.24797;
21 |
22 | step.x = 3.37754 * texelWidthOffset * strength;
23 | step.y = 3.37754 * texelHeightOffset * strength;
24 | color += texture2D(sampler,textureCoordinate+step) * 0.09122;
25 | color += texture2D(sampler,textureCoordinate-step) * 0.09122;
26 |
27 | step.x = 5.37754 * texelWidthOffset * strength;
28 | step.y = 5.37754 * texelHeightOffset * strength;
29 | color += texture2D(sampler,textureCoordinate+step) * 0.03356;
30 | color += texture2D(sampler,textureCoordinate-step) * 0.03356;
31 |
32 | return color;
33 | }
34 |
35 | vec4 overlay(vec4 c1, vec4 c2){
36 | vec4 r1 = vec4(0.,0.,0.,1.);
37 |
38 | r1.r = c1.r < 0.5 ? 2.0*c1.r*c2.r : 1.0 - 2.0*(1.0-c1.r)*(1.0-c2.r);
39 | r1.g = c1.g < 0.5 ? 2.0*c1.g*c2.g : 1.0 - 2.0*(1.0-c1.g)*(1.0-c2.g);
40 | r1.b = c1.b < 0.5 ? 2.0*c1.b*c2.b : 1.0 - 2.0*(1.0-c1.b)*(1.0-c2.b);
41 |
42 | return r1;
43 | }
44 |
45 | vec4 level0c(vec4 color, sampler2D sampler) {
46 | color.r = texture2D(sampler, vec2(color.r, 0.)).r;
47 | color.g = texture2D(sampler, vec2(color.g, 0.)).r;
48 | color.b = texture2D(sampler, vec2(color.b, 0.)).r;
49 | return color;
50 | }
51 |
52 | vec4 normal(vec4 c1, vec4 c2, float alpha) {
53 | return (c2-c1) * alpha + c1;
54 | }
55 |
56 | vec4 screen(vec4 c1, vec4 c2) {
57 | vec4 r1 = vec4(1.) - ((vec4(1.) - c1) * (vec4(1.) - c2));
58 | return r1;
59 | }
60 |
61 | void main() {
62 | // naver skin
63 | lowp vec4 c0 = texture2D(inputImageTexture, textureCoordinate);
64 | lowp vec4 c1 = gaussianBlur(inputImageTexture);
65 | lowp vec4 c2 = overlay(c0, level0c(c1, curve));
66 | lowp vec4 c3 = normal(c0,c2,0.15);
67 |
68 | gl_FragColor = c3;
69 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/sierra.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //blowout;
7 | uniform sampler2D inputImageTexture3; //overlay;
8 | uniform sampler2D inputImageTexture4; //map
9 |
10 | uniform float strength;
11 |
12 | void main()
13 | {
14 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
15 | vec4 texel = texture2D(inputImageTexture, textureCoordinate);
16 | vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
17 |
18 | texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
19 | texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
20 | texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
21 |
22 | vec4 mapped;
23 | mapped.r = texture2D(inputImageTexture4, vec2(texel.r, .16666)).r;
24 | mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
25 | mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
26 | mapped.a = 1.0;
27 |
28 | mapped.rgb = mix(originColor.rgb, mapped.rgb, strength);
29 | gl_FragColor = mapped;
30 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/sketch.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision mediump float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform vec2 singleStepOffset;
6 | uniform float strength;
7 |
8 | const highp vec3 W = vec3(0.299,0.587,0.114);
9 |
10 |
11 | void main()
12 | {
13 | float threshold = 0.0;
14 | //pic1
15 | vec4 oralColor = texture2D(inputImageTexture, textureCoordinate);
16 |
17 | //pic2
18 | vec3 maxValue = vec3(0.,0.,0.);
19 |
20 | for(int i = -2; i<=2; i++)
21 | {
22 | for(int j = -2; j<=2; j++)
23 | {
24 | vec4 tempColor = texture2D(inputImageTexture, textureCoordinate+singleStepOffset*vec2(i,j));
25 | maxValue.r = max(maxValue.r,tempColor.r);
26 | maxValue.g = max(maxValue.g,tempColor.g);
27 | maxValue.b = max(maxValue.b,tempColor.b);
28 | threshold += dot(tempColor.rgb, W);
29 | }
30 | }
31 | //pic3
32 | float gray1 = dot(oralColor.rgb, W);
33 |
34 | //pic4
35 | float gray2 = dot(maxValue, W);
36 |
37 | //pic5
38 | float contour = gray1 / gray2;
39 |
40 | threshold = threshold / 25.;
41 | float alpha = max(1.0,gray1>threshold?1.0:(gray1/threshold));
42 |
43 | float result = contour * alpha + (1.0-alpha)*gray1;
44 |
45 | gl_FragColor = vec4(vec3(result,result,result), oralColor.w);
46 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/skinwhiten.glsl:
--------------------------------------------------------------------------------
1 | precision highp float;
2 |
3 | uniform sampler2D inputImageTexture;
4 | uniform sampler2D curve;
5 |
6 | uniform float texelWidthOffset;
7 | uniform float texelHeightOffset;
8 |
9 | varying mediump vec2 textureCoordinate;
10 |
11 | const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
12 |
13 | vec4 gaussianBlur(sampler2D sampler) {
14 | lowp float strength = 1.;
15 | vec4 color = vec4(0.);
16 | vec2 step = vec2(0.);
17 |
18 | color += texture2D(sampler,textureCoordinate)* 0.25449 ;
19 |
20 | step.x = 1.37754 * texelWidthOffset * strength;
21 | step.y = 1.37754 * texelHeightOffset * strength;
22 | color += texture2D(sampler,textureCoordinate+step) * 0.24797;
23 | color += texture2D(sampler,textureCoordinate-step) * 0.24797;
24 |
25 | step.x = 3.37754 * texelWidthOffset * strength;
26 | step.y = 3.37754 * texelHeightOffset * strength;
27 | color += texture2D(sampler,textureCoordinate+step) * 0.09122;
28 | color += texture2D(sampler,textureCoordinate-step) * 0.09122;
29 |
30 | step.x = 5.37754 * texelWidthOffset * strength;
31 | step.y = 5.37754 * texelHeightOffset * strength;
32 |
33 | color += texture2D(sampler,textureCoordinate+step) * 0.03356;
34 | color += texture2D(sampler,textureCoordinate-step) * 0.03356;
35 |
36 | return color;
37 | }
38 |
39 | void main() {
40 | vec4 blurColor;
41 | lowp vec4 textureColor;
42 | lowp float strength = -1.0 / 510.0;
43 |
44 | float xCoordinate = textureCoordinate.x;
45 | float yCoordinate = textureCoordinate.y;
46 |
47 | lowp float satura = 0.7;
48 | // naver skin
49 | textureColor = texture2D(inputImageTexture, textureCoordinate);
50 | blurColor = gaussianBlur(inputImageTexture);
51 |
52 | //saturation
53 | lowp float luminance = dot(blurColor.rgb, luminanceWeighting);
54 | lowp vec3 greyScaleColor = vec3(luminance);
55 |
56 | blurColor = vec4(mix(greyScaleColor, blurColor.rgb, satura), blurColor.w);
57 |
58 | lowp float redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
59 | lowp float greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).r;
60 | lowp float blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).r;
61 |
62 | redCurveValue = min(1.0, redCurveValue + strength);
63 | greenCurveValue = min(1.0, greenCurveValue + strength);
64 | blueCurveValue = min(1.0, blueCurveValue + strength);
65 |
66 | mediump vec4 overlay = blurColor;
67 |
68 | mediump vec4 base = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
69 | //gl_FragColor = overlay;
70 |
71 | // step4 overlay blending
72 | mediump float ra;
73 | if (base.r < 0.5) {
74 | ra = overlay.r * base.r * 2.0;
75 | } else {
76 | ra = 1.0 - ((1.0 - base.r) * (1.0 - overlay.r) * 2.0);
77 | }
78 |
79 | mediump float ga;
80 | if (base.g < 0.5) {
81 | ga = overlay.g * base.g * 2.0;
82 | } else {
83 | ga = 1.0 - ((1.0 - base.g) * (1.0 - overlay.g) * 2.0);
84 | }
85 |
86 | mediump float ba;
87 | if (base.b < 0.5) {
88 | ba = overlay.b * base.b * 2.0;
89 | } else {
90 | ba = 1.0 - ((1.0 - base.b) * (1.0 - overlay.b) * 2.0);
91 | }
92 |
93 | textureColor = vec4(ra, ga, ba, 1.0);
94 |
95 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
96 | }
97 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/suger_tablets.glsl:
--------------------------------------------------------------------------------
1 | varying mediump vec2 textureCoordinate;
2 | varying mediump vec2 textureCoordinate2; // TODO: This is not used
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D inputImageTexture2; // lookup texture
6 | uniform mediump float strength;
7 |
8 | void main()
9 | {
10 | mediump vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
11 | mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
12 |
13 | mediump float blueColor = textureColor.b * 63.0;
14 |
15 | mediump vec2 quad1;
16 | quad1.y = floor(floor(blueColor) / 8.0);
17 | quad1.x = floor(blueColor) - (quad1.y * 8.0);
18 |
19 | mediump vec2 quad2;
20 | quad2.y = floor(ceil(blueColor) / 8.0);
21 | quad2.x = ceil(blueColor) - (quad2.y * 8.0);
22 |
23 | mediump vec2 texPos1;
24 | texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
25 | texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
26 |
27 | mediump vec2 texPos2;
28 | texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
29 | texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
30 |
31 | mediump vec4 newColor1 = texture2D(inputImageTexture2, texPos1);
32 | mediump vec4 newColor2 = texture2D(inputImageTexture2, texPos2);
33 |
34 | mediump vec4 newColor = mix(newColor1, newColor2, fract(blueColor));
35 |
36 | newColor.rgb = mix(originColor.rgb, newColor.rgb, strength);
37 |
38 | gl_FragColor = vec4(newColor.rgb, textureColor.w);
39 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/sunset.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 |
3 | precision highp float;
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D curve;
6 |
7 | uniform sampler2D grey1Frame;
8 | uniform sampler2D grey2Frame;
9 |
10 | void main()
11 | {
12 | float GreyVal;
13 | lowp vec4 textureColor;
14 | lowp vec4 textureColorOri;
15 | float xCoordinate = textureCoordinate.x;
16 | float yCoordinate = textureCoordinate.y;
17 |
18 | highp float redCurveValue;
19 | highp float greenCurveValue;
20 | highp float blueCurveValue;
21 |
22 | vec4 grey1Color;
23 | vec4 grey2Color;
24 |
25 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
26 | grey1Color = texture2D(grey2Frame, vec2(xCoordinate, yCoordinate));
27 | grey2Color = texture2D(grey1Frame, vec2(xCoordinate, yCoordinate));
28 |
29 | // step1 normal blending with original
30 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
31 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
32 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
33 |
34 | textureColorOri = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
35 | textureColor = (textureColorOri - textureColor) * grey1Color.r + textureColor;
36 |
37 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).a;
38 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).a;
39 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).a;
40 |
41 | //textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
42 |
43 | // step3 60% opacity ExclusionBlending
44 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
45 | mediump vec4 textureColor2 = vec4(0.08627, 0.03529, 0.15294, 1.0);
46 | textureColor2 = textureColor + textureColor2 - (2.0 * textureColor2 * textureColor);
47 |
48 | textureColor = (textureColor2 - textureColor) * 0.6784 + textureColor;
49 |
50 |
51 | mediump vec4 overlay = vec4(0.6431, 0.5882, 0.5803, 1.0);
52 | mediump vec4 base = textureColor;
53 |
54 | // overlay blending
55 | mediump float ra;
56 | if (base.r < 0.5) {
57 | ra = overlay.r * base.r * 2.0;
58 | } else {
59 | ra = 1.0 - ((1.0 - base.r) * (1.0 - overlay.r) * 2.0);
60 | }
61 |
62 | mediump float ga;
63 | if (base.g < 0.5) {
64 | ga = overlay.g * base.g * 2.0;
65 | } else {
66 | ga = 1.0 - ((1.0 - base.g) * (1.0 - overlay.g) * 2.0);
67 | }
68 |
69 | mediump float ba;
70 | if (base.b < 0.5) {
71 | ba = overlay.b * base.b * 2.0;
72 | } else {
73 | ba = 1.0 - ((1.0 - base.b) * (1.0 - overlay.b) * 2.0);
74 | }
75 |
76 | textureColor = vec4(ra, ga, ba, 1.0);
77 | base = (textureColor - base) + base;
78 |
79 | // again overlay blending
80 | overlay = vec4(0.0, 0.0, 0.0, 1.0);
81 |
82 | // overlay blending
83 | if (base.r < 0.5) {
84 | ra = overlay.r * base.r * 2.0;
85 | } else {
86 | ra = 1.0 - ((1.0 - base.r) * (1.0 - overlay.r) * 2.0);
87 | }
88 |
89 | if (base.g < 0.5) {
90 | ga = overlay.g * base.g * 2.0;
91 | } else {
92 | ga = 1.0 - ((1.0 - base.g) * (1.0 - overlay.g) * 2.0);
93 | }
94 |
95 | if (base.b < 0.5) {
96 | ba = overlay.b * base.b * 2.0;
97 | } else {
98 | ba = 1.0 - ((1.0 - base.b) * (1.0 - overlay.b) * 2.0);
99 | }
100 |
101 | textureColor = vec4(ra, ga, ba, 1.0);
102 | textureColor = (textureColor - base) * (grey2Color * 0.549) + base;
103 |
104 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
105 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/sutro.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //sutroMap;
7 | uniform sampler2D inputImageTexture3; //sutroMetal;
8 | uniform sampler2D inputImageTexture4; //softLight
9 | uniform sampler2D inputImageTexture5; //sutroEdgeburn
10 | uniform sampler2D inputImageTexture6; //sutroCurves
11 |
12 | uniform float strength;
13 |
14 | void main()
15 | {
16 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
17 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
18 |
19 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
20 | float d = dot(tc, tc);
21 | vec2 lookup = vec2(d, texel.r);
22 | texel.r = texture2D(inputImageTexture2, lookup).r;
23 | lookup.y = texel.g;
24 | texel.g = texture2D(inputImageTexture2, lookup).g;
25 | lookup.y = texel.b;
26 | texel.b = texture2D(inputImageTexture2, lookup).b;
27 |
28 | vec3 rgbPrime = vec3(0.1019, 0.0, 0.0);
29 | float m = dot(vec3(.3, .59, .11), texel.rgb) - 0.03058;
30 | texel = mix(texel, rgbPrime + m, 0.32);
31 |
32 | vec3 metal = texture2D(inputImageTexture3, textureCoordinate).rgb;
33 | texel.r = texture2D(inputImageTexture4, vec2(metal.r, texel.r)).r;
34 | texel.g = texture2D(inputImageTexture4, vec2(metal.g, texel.g)).g;
35 | texel.b = texture2D(inputImageTexture4, vec2(metal.b, texel.b)).b;
36 |
37 | texel = texel * texture2D(inputImageTexture5, textureCoordinate).rgb;
38 |
39 | texel.r = texture2D(inputImageTexture6, vec2(texel.r, .16666)).r;
40 | texel.g = texture2D(inputImageTexture6, vec2(texel.g, .5)).g;
41 | texel.b = texture2D(inputImageTexture6, vec2(texel.b, .83333)).b;
42 |
43 | texel.rgb = mix(originColor.rgb, texel.rgb, strength);
44 |
45 | gl_FragColor = vec4(texel, 1.0);
46 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/tender.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision highp float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D curve;
6 | uniform sampler2D grey1Frame;
7 |
8 | void main()
9 | {
10 | mediump vec4 textureColor;
11 | mediump vec4 textureColorRes;
12 | vec4 grey1Color;
13 | mediump float satVal = 65.0 / 100.0;
14 | mediump float mask1R = 29.0 / 255.0;
15 | mediump float mask1G = 43.0 / 255.0;
16 | mediump float mask1B = 95.0 / 255.0;
17 |
18 | highp float xCoordinate = textureCoordinate.x;
19 | highp float yCoordinate = textureCoordinate.y;
20 |
21 | highp float redCurveValue;
22 | highp float greenCurveValue;
23 | highp float blueCurveValue;
24 |
25 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
26 | textureColorRes = textureColor;
27 |
28 | grey1Color = texture2D(grey1Frame, vec2(xCoordinate, yCoordinate));
29 |
30 | // step1. saturation
31 | highp float G = (textureColor.r + textureColor.g + textureColor.b);
32 | G = G / 3.0;
33 |
34 | redCurveValue = ((1.0 - satVal) * G + satVal * textureColor.r);
35 | greenCurveValue = ((1.0 - satVal) * G + satVal * textureColor.g);
36 | blueCurveValue = ((1.0 - satVal) * G + satVal * textureColor.b);
37 |
38 | // step2 curve
39 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
40 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
41 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
42 |
43 | // step3 30% opacity ExclusionBlending
44 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
45 | mediump vec4 textureColor2 = vec4(mask1R, mask1G, mask1B, 1.0);
46 | textureColor2 = textureColor + textureColor2 - (2.0 * textureColor2 * textureColor);
47 |
48 | textureColor = (textureColor2 - textureColor) * 0.3 + textureColor;
49 |
50 | mediump vec4 overlay = vec4(0, 0, 0, 1.0);
51 | mediump vec4 base = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
52 |
53 | // step4 overlay blending
54 | mediump float ra;
55 | if (base.r < 0.5)
56 | {
57 | ra = overlay.r * base.r * 2.0;
58 | }
59 | else
60 | {
61 | ra = 1.0 - ((1.0 - base.r) * (1.0 - overlay.r) * 2.0);
62 | }
63 |
64 | mediump float ga;
65 | if (base.g < 0.5)
66 | {
67 | ga = overlay.g * base.g * 2.0;
68 | }
69 | else
70 | {
71 | ga = 1.0 - ((1.0 - base.g) * (1.0 - overlay.g) * 2.0);
72 | }
73 |
74 | mediump float ba;
75 | if (base.b < 0.5)
76 | {
77 | ba = overlay.b * base.b * 2.0;
78 | }
79 | else
80 | {
81 | ba = 1.0 - ((1.0 - base.b) * (1.0 - overlay.b) * 2.0);
82 | }
83 |
84 | textureColor = vec4(ra, ga, ba, 1.0);
85 | base = (textureColor - base) * (grey1Color.r/2.0) + base;
86 |
87 | gl_FragColor = vec4(base.r, base.g, base.b, 1.0);
88 | }
89 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/toaster2_filter_shader.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //toaster_metal
7 | uniform sampler2D inputImageTexture3; //toaster_soft_light
8 | uniform sampler2D inputImageTexture4; //toaster_curves
9 | uniform sampler2D inputImageTexture5; //toaster_overlay_map_warm
10 | uniform sampler2D inputImageTexture6; //toaster_color_shift
11 |
12 | void main()
13 | {
14 | mediump vec3 texel;
15 | mediump vec2 lookup;
16 | vec2 blue;
17 | vec2 green;
18 | vec2 red;
19 | mediump vec4 tmpvar_1;
20 | tmpvar_1 = texture2D (inputImageTexture, textureCoordinate);
21 | texel = tmpvar_1.xyz;
22 | mediump vec4 tmpvar_2;
23 | tmpvar_2 = texture2D (inputImageTexture2, textureCoordinate);
24 | mediump vec2 tmpvar_3;
25 | tmpvar_3.x = tmpvar_2.x;
26 | tmpvar_3.y = tmpvar_1.x;
27 | texel.x = texture2D (inputImageTexture3, tmpvar_3).x;
28 | mediump vec2 tmpvar_4;
29 | tmpvar_4.x = tmpvar_2.y;
30 | tmpvar_4.y = tmpvar_1.y;
31 | texel.y = texture2D (inputImageTexture3, tmpvar_4).y;
32 | mediump vec2 tmpvar_5;
33 | tmpvar_5.x = tmpvar_2.z;
34 | tmpvar_5.y = tmpvar_1.z;
35 | texel.z = texture2D (inputImageTexture3, tmpvar_5).z;
36 | red.x = texel.x;
37 | red.y = 0.16666;
38 | green.x = texel.y;
39 | green.y = 0.5;
40 | blue.x = texel.z;
41 | blue.y = 0.833333;
42 | texel.x = texture2D (inputImageTexture4, red).x;
43 | texel.y = texture2D (inputImageTexture4, green).y;
44 | texel.z = texture2D (inputImageTexture4, blue).z;
45 | mediump vec2 tmpvar_6;
46 | tmpvar_6 = ((2.0 * textureCoordinate) - 1.0);
47 | mediump vec2 tmpvar_7;
48 | tmpvar_7.x = dot (tmpvar_6, tmpvar_6);
49 | tmpvar_7.y = texel.x;
50 | lookup = tmpvar_7;
51 | texel.x = texture2D (inputImageTexture5, tmpvar_7).x;
52 | lookup.y = texel.y;
53 | texel.y = texture2D (inputImageTexture5, lookup).y;
54 | lookup.y = texel.z;
55 | texel.z = texture2D (inputImageTexture5, lookup).z;
56 | red.x = texel.x;
57 | green.x = texel.y;
58 | blue.x = texel.z;
59 | texel.x = texture2D (inputImageTexture6, red).x;
60 | texel.y = texture2D (inputImageTexture6, green).y;
61 | texel.z = texture2D (inputImageTexture6, blue).z;
62 | mediump vec4 tmpvar_8;
63 | tmpvar_8.w = 1.0;
64 | tmpvar_8.xyz = texel;
65 | gl_FragColor = tmpvar_8;
66 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/valencia.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //map
7 | uniform sampler2D inputImageTexture3; //gradMap
8 |
9 | mat3 saturateMatrix = mat3(
10 | 1.1402,
11 | -0.0598,
12 | -0.061,
13 | -0.1174,
14 | 1.0826,
15 | -0.1186,
16 | -0.0228,
17 | -0.0228,
18 | 1.1772);
19 |
20 | vec3 lumaCoeffs = vec3(.3, .59, .11);
21 |
22 | uniform float strength;
23 |
24 | void main()
25 | {
26 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
27 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
28 |
29 | texel = vec3(
30 | texture2D(inputImageTexture2, vec2(texel.r, .1666666)).r,
31 | texture2D(inputImageTexture2, vec2(texel.g, .5)).g,
32 | texture2D(inputImageTexture2, vec2(texel.b, .8333333)).b
33 | );
34 |
35 | texel = saturateMatrix * texel;
36 | float luma = dot(lumaCoeffs, texel);
37 | texel = vec3(
38 | texture2D(inputImageTexture3, vec2(luma, texel.r)).r,
39 | texture2D(inputImageTexture3, vec2(luma, texel.g)).g,
40 | texture2D(inputImageTexture3, vec2(luma, texel.b)).b);
41 |
42 | texel.rgb = mix(originColor.rgb, texel.rgb, strength);
43 | gl_FragColor = vec4(texel, 1.0);
44 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/walden.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //map
7 | uniform sampler2D inputImageTexture3; //vigMap
8 |
9 | uniform float strength;
10 |
11 | void main()
12 | {
13 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
14 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
15 |
16 | texel = vec3(
17 | texture2D(inputImageTexture2, vec2(texel.r, .16666)).r,
18 | texture2D(inputImageTexture2, vec2(texel.g, .5)).g,
19 | texture2D(inputImageTexture2, vec2(texel.b, .83333)).b);
20 |
21 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
22 | float d = dot(tc, tc);
23 | vec2 lookup = vec2(d, texel.r);
24 | texel.r = texture2D(inputImageTexture3, lookup).r;
25 | lookup.y = texel.g;
26 | texel.g = texture2D(inputImageTexture3, lookup).g;
27 | lookup.y = texel.b;
28 | texel.b = texture2D(inputImageTexture3, lookup).b;
29 |
30 | texel.rgb = mix(originColor.rgb, texel.rgb, strength);
31 |
32 | gl_FragColor = vec4(texel, 1.0);
33 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/warm.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision highp float;
3 |
4 | uniform sampler2D inputImageTexture;
5 | uniform sampler2D curve;
6 | uniform sampler2D greyFrame;
7 | uniform sampler2D layerImage;
8 |
9 | void main()
10 | {
11 | lowp vec4 textureColor;
12 | vec4 greyColor;
13 | vec4 layerColor;
14 |
15 | float xCoordinate = textureCoordinate.x;
16 | float yCoordinate = textureCoordinate.y;
17 |
18 | highp float redCurveValue;
19 | highp float greenCurveValue;
20 | highp float blueCurveValue;
21 |
22 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
23 |
24 | greyColor = texture2D(greyFrame, vec2(xCoordinate, yCoordinate));
25 | layerColor = texture2D(layerImage, vec2(xCoordinate, yCoordinate));
26 |
27 | // step1 curve
28 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
29 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
30 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
31 |
32 | // step2 curve with mask
33 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
34 |
35 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).a;
36 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).a;
37 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).a;
38 |
39 | lowp vec4 textureColor2 = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
40 |
41 | // step3 screen with 60%
42 | lowp vec4 base = vec4(mix(textureColor.rgb, textureColor2.rgb, 1.0 - greyColor.r), textureColor.a);
43 | lowp vec4 overlayer = vec4(layerColor.r, layerColor.g, layerColor.b, 1.0);
44 |
45 | // screen blending
46 | textureColor = 1.0 - ((1.0 - base) * (1.0 - overlayer));
47 | textureColor = (textureColor - base) * 0.6 + base;
48 |
49 | redCurveValue = texture2D(curve, vec2(textureColor.r, 1.0)).r;
50 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 1.0)).g;
51 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 1.0)).b;
52 | textureColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, 1.0);
53 |
54 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
55 | }
--------------------------------------------------------------------------------
/app/src/main/res/raw/whitecat.glsl:
--------------------------------------------------------------------------------
1 | varying highp vec2 textureCoordinate;
2 | precision highp float;
3 | uniform sampler2D inputImageTexture;
4 | uniform sampler2D curve;
5 |
6 | vec3 rgb2hsv(vec3 c)
7 | {
8 | vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
9 | vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
10 | vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
11 |
12 | float d = q.x - min(q.w, q.y);
13 | float e = 1.0e-10;
14 | return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
15 | }
16 |
17 | vec3 hsv2rgb(vec3 c)
18 | {
19 | vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
20 | vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
21 | return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
22 | }
23 |
24 | void main()
25 | {
26 | float GreyVal;
27 | lowp vec4 textureColor;
28 | lowp vec4 textureColorOri;
29 | float xCoordinate = textureCoordinate.x;
30 | float yCoordinate = textureCoordinate.y;
31 |
32 | highp float redCurveValue;
33 | highp float greenCurveValue;
34 | highp float blueCurveValue;
35 |
36 | textureColor = texture2D( inputImageTexture, vec2(xCoordinate, yCoordinate));
37 |
38 | // step1 20% opacity ExclusionBlending
39 | mediump vec4 textureColor2 = textureColor;
40 | textureColor2 = textureColor + textureColor2 - (2.0 * textureColor2 * textureColor);
41 |
42 | textureColor = (textureColor2 - textureColor) * 0.2 + textureColor;
43 |
44 | // step2 curve
45 | redCurveValue = texture2D(curve, vec2(textureColor.r, 0.0)).r;
46 | greenCurveValue = texture2D(curve, vec2(textureColor.g, 0.0)).g;
47 | blueCurveValue = texture2D(curve, vec2(textureColor.b, 0.0)).b;
48 |
49 | redCurveValue = texture2D(curve, vec2(redCurveValue, 1.0)).r;
50 | greenCurveValue = texture2D(curve, vec2(greenCurveValue, 1.0)).r;
51 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 1.0)).r;
52 |
53 | redCurveValue = texture2D(curve, vec2(redCurveValue, 1.0)).g;
54 | greenCurveValue = texture2D(curve, vec2(greenCurveValue, 1.0)).g;
55 | blueCurveValue = texture2D(curve, vec2(blueCurveValue, 1.0)).g;
56 |
57 |
58 | vec3 tColor = vec3(redCurveValue, greenCurveValue, blueCurveValue);
59 | tColor = rgb2hsv(tColor);
60 |
61 | tColor.g = tColor.g * 0.65;
62 |
63 | tColor = hsv2rgb(tColor);
64 | tColor = clamp(tColor, 0.0, 1.0);
65 |
66 | mediump vec4 base = vec4(tColor, 1.0);
67 | mediump vec4 overlay = vec4(0.62, 0.6, 0.498, 1.0);
68 | // step6 overlay blending
69 | mediump float ra;
70 | if (base.r < 0.5)
71 | {
72 | ra = overlay.r * base.r * 2.0;
73 | } else
74 | {
75 | ra = 1.0 - ((1.0 - base.r) * (1.0 - overlay.r) * 2.0);
76 | }
77 |
78 | mediump float ga;
79 | if (base.g < 0.5)
80 | {
81 | ga = overlay.g * base.g * 2.0;
82 | } else
83 | {
84 | ga = 1.0 - ((1.0 - base.g) * (1.0 - overlay.g) * 2.0);
85 | }
86 |
87 | mediump float ba;
88 | if (base.b < 0.5)
89 | {
90 | ba = overlay.b * base.b * 2.0;
91 | } else
92 | {
93 | ba = 1.0 - ((1.0 - base.b) * (1.0 - overlay.b) * 2.0);
94 | }
95 | textureColor = vec4(ra, ga, ba, 1.0);
96 | textureColor = (textureColor - base) * 0.1 + base;
97 |
98 | gl_FragColor = vec4(textureColor.r, textureColor.g, textureColor.b, 1.0);
99 | }
100 |
--------------------------------------------------------------------------------
/app/src/main/res/raw/xproii_filter_shader.glsl:
--------------------------------------------------------------------------------
1 | precision mediump float;
2 |
3 | varying mediump vec2 textureCoordinate;
4 |
5 | uniform sampler2D inputImageTexture;
6 | uniform sampler2D inputImageTexture2; //map
7 | uniform sampler2D inputImageTexture3; //vigMap
8 |
9 | uniform float strength;
10 |
11 | void main()
12 | {
13 | vec4 originColor = texture2D(inputImageTexture, textureCoordinate);
14 | vec3 texel = texture2D(inputImageTexture, textureCoordinate).rgb;
15 |
16 | vec2 tc = (2.0 * textureCoordinate) - 1.0;
17 | float d = dot(tc, tc);
18 | vec2 lookup = vec2(d, texel.r);
19 | texel.r = texture2D(inputImageTexture3, lookup).r;
20 | lookup.y = texel.g;
21 | texel.g = texture2D(inputImageTexture3, lookup).g;
22 | lookup.y = texel.b;
23 | texel.b = texture2D(inputImageTexture3, lookup).b;
24 |
25 | vec2 red = vec2(texel.r, 0.16666);
26 | vec2 green = vec2(texel.g, 0.5);
27 | vec2 blue = vec2(texel.b, .83333);
28 | texel.r = texture2D(inputImageTexture2, red).r;
29 | texel.g = texture2D(inputImageTexture2, green).g;
30 | texel.b = texture2D(inputImageTexture2, blue).b;
31 |
32 | texel.rgb = mix(originColor.rgb, texel.rgb, strength);
33 |
34 | gl_FragColor = vec4(texel, 1.0);
35 |
36 | }
--------------------------------------------------------------------------------
/app/src/main/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 64dp
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 | #848484
7 | #7f333336
8 | #f0b017
9 |
10 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | VideoEditor
3 | 视频
4 | 录制
5 | 本地视频美颜
6 | 音频处理
7 | 视频拼接
8 | 选择视频
9 | 选择音频
10 | PCM转音频
11 | 音频混合
12 | 选择第一个音频
13 | 选择第一个视频
14 | 选择第二个音频
15 | 选择第二个视频
16 | 左右滑动切换滤镜哟...
17 | 点击确定分离音频
18 |
19 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/app/src/test/java/com/example/cj/videoeditor/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package com.example.cj.videoeditor;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() throws Exception {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | jcenter()
6 | maven {
7 | url 'https://maven.google.com/'
8 | name 'Google'
9 | }
10 | }
11 | dependencies {
12 | classpath 'com.android.tools.build:gradle:3.6.3'
13 |
14 | // NOTE: Do not place your application dependencies here; they belong
15 | // in the individual module build.gradle files
16 | }
17 | }
18 |
19 | allprojects {
20 | repositories {
21 | jcenter()
22 | maven {
23 | url 'https://maven.google.com/'
24 | name 'Google'
25 | }
26 | }
27 | }
28 |
29 | task clean(type: Delete) {
30 | delete rootProject.buildDir
31 | }
32 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | ## Project-wide Gradle settings.
2 | #
3 | # For more details on how to configure your build environment visit
4 | # http://www.gradle.org/docs/current/userguide/build_environment.html
5 | #
6 | # Specifies the JVM arguments used for the daemon process.
7 | # The setting is particularly useful for tweaking memory settings.
8 | # Default value: -Xmx1024m -XX:MaxPermSize=256m
9 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
10 | #
11 | # When configured, Gradle will run in incubating parallel mode.
12 | # This option should only be used with decoupled projects. More details, visit
13 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
14 | # org.gradle.parallel=true
15 | #Tue Jun 05 21:09:17 CST 2018
16 | systemProp.http.proxyHost=127.0.0.1
17 | org.gradle.jvmargs=-Xmx1536m
18 | systemProp.http.proxyPort=1080
19 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qqchenjian318/VideoEditor-For-Android/e116fda60c6bc271d8babfd62c04998e46d292c1/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Sun Jul 16 12:13:53 CST 2017
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------