├── library
├── .gitignore
├── src
│ ├── main
│ │ ├── res
│ │ │ └── values
│ │ │ │ └── strings.xml
│ │ ├── AndroidManifest.xml
│ │ └── java
│ │ │ └── com
│ │ │ └── lmy
│ │ │ └── lymedia
│ │ │ ├── media
│ │ │ ├── render
│ │ │ │ ├── Filter.java
│ │ │ │ ├── BaseFilter.java
│ │ │ │ ├── FFmpegFilter.java
│ │ │ │ └── AcvFilter.java
│ │ │ ├── Player.java
│ │ │ ├── AudioDevice.java
│ │ │ ├── VideoRender.java
│ │ │ ├── VideoPlayer.java
│ │ │ └── FFmpegPlayer.java
│ │ │ ├── utils
│ │ │ ├── Util.java
│ │ │ └── FrameUtil.java
│ │ │ └── widget
│ │ │ ├── VideoSurfaceView.java
│ │ │ └── VideoPlayerGLSurfaceView.java
│ └── androidTest
│ │ └── java
│ │ └── com
│ │ └── lmy
│ │ └── lymedia
│ │ └── ApplicationTest.java
├── proguard-rules.pro
└── build.gradle
├── samples
├── .gitignore
├── src
│ ├── main
│ │ ├── res
│ │ │ ├── mipmap-hdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-mdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-xhdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-xxhdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── mipmap-xxxhdpi
│ │ │ │ └── ic_launcher.png
│ │ │ ├── values
│ │ │ │ ├── strings.xml
│ │ │ │ ├── colors.xml
│ │ │ │ ├── dimens.xml
│ │ │ │ └── styles.xml
│ │ │ ├── values-v21
│ │ │ │ └── styles.xml
│ │ │ ├── values-w820dp
│ │ │ │ └── dimens.xml
│ │ │ └── layout
│ │ │ │ ├── activity_render2.xml
│ │ │ │ ├── activity_video_record.xml
│ │ │ │ ├── activity_main.xml
│ │ │ │ ├── activity_video_play.xml
│ │ │ │ └── activity_render.xml
│ │ ├── java
│ │ │ └── com
│ │ │ │ └── lmy
│ │ │ │ └── samples
│ │ │ │ ├── camera
│ │ │ │ ├── recorder
│ │ │ │ │ ├── CONSTANTS.java
│ │ │ │ │ ├── Frame.java
│ │ │ │ │ ├── Util.java
│ │ │ │ │ ├── AudioRecorder.java
│ │ │ │ │ ├── VideoRecorderWrapper.java
│ │ │ │ │ └── VideoRecordManager.java
│ │ │ │ ├── ImageUtil.java
│ │ │ │ ├── ShotGLSurfaceView.java
│ │ │ │ ├── MyCameraGLSurfaceView.java
│ │ │ │ └── CameraInstance.java
│ │ │ │ └── ui
│ │ │ │ ├── MainActivity.java
│ │ │ │ ├── VideoPlayActivity.java
│ │ │ │ ├── RenderActivity2.java
│ │ │ │ ├── VideoRecordActivity.java
│ │ │ │ └── RenderActivity.java
│ │ └── AndroidManifest.xml
│ └── androidTest
│ │ └── java
│ │ └── com
│ │ └── lmy
│ │ └── samples
│ │ └── ApplicationTest.java
├── proguard-rules.pro
└── build.gradle
├── settings.gradle
├── README.md
├── .gitignore
├── gradle.properties
├── gradlew.bat
├── gradlew
└── LICENSE
/library/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/samples/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':library', ':samples'
2 |
--------------------------------------------------------------------------------
/library/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Library
3 |
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # LyMedia
2 | This a library about play video,play audio,render video frame.It depend org.bytedeco:javacpp and org.bytedeco:javacv.
3 |
--------------------------------------------------------------------------------
/samples/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imalimin/LyMedia/HEAD/samples/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/samples/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imalimin/LyMedia/HEAD/samples/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/samples/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imalimin/LyMedia/HEAD/samples/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/samples/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imalimin/LyMedia/HEAD/samples/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/samples/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imalimin/LyMedia/HEAD/samples/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/samples/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Samples
3 | VideoRecordActivity
4 |
5 |
--------------------------------------------------------------------------------
/samples/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/library/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/samples/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 | 16dp
6 |
7 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/recorder/CONSTANTS.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera.recorder;
2 |
3 | public class CONSTANTS {
4 | public final static int RESOLUTION_HIGH_VALUE = 2;
5 | public final static int RESOLUTION_MEDIUM_VALUE = 1;
6 | public final static int RESOLUTION_LOW_VALUE = 0;
7 | }
8 |
--------------------------------------------------------------------------------
/samples/src/main/res/values-v21/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
--------------------------------------------------------------------------------
/samples/src/main/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 64dp
6 |
7 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/render/Filter.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media.render;
2 |
3 | import org.bytedeco.javacv.Frame;
4 |
5 | /**
6 | * Created by lmy on 2016/4/8.
7 | */
8 | public interface Filter {
9 | Frame filter(Frame frame);
10 |
11 | void onStart();
12 |
13 | void onStop();
14 |
15 | boolean isStarting();
16 |
17 | void onCreate(int width, int height);
18 | }
19 |
--------------------------------------------------------------------------------
/library/src/androidTest/java/com/lmy/lymedia/ApplicationTest.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia;
2 |
3 | import android.app.Application;
4 | import android.test.ApplicationTestCase;
5 |
6 | /**
7 | * Testing Fundamentals
8 | */
9 | public class ApplicationTest extends ApplicationTestCase {
10 | public ApplicationTest() {
11 | super(Application.class);
12 | }
13 | }
--------------------------------------------------------------------------------
/samples/src/androidTest/java/com/lmy/samples/ApplicationTest.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples;
2 |
3 | import android.app.Application;
4 | import android.test.ApplicationTestCase;
5 |
6 | /**
7 | * Testing Fundamentals
8 | */
9 | public class ApplicationTest extends ApplicationTestCase {
10 | public ApplicationTest() {
11 | super(Application.class);
12 | }
13 | }
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/utils/Util.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.utils;
2 |
3 | /**
4 | * Created by 李明艺 on 2016/3/21.
5 | *
6 | * @author lrlmy@foxmail.com
7 | */
8 | public class Util {
9 |
10 | public static String getSdcardPath() {
11 | if (android.os.Environment.getExternalStorageState().equals(
12 | android.os.Environment.MEDIA_MOUNTED)) {
13 | return android.os.Environment.getExternalStorageDirectory().getAbsolutePath();
14 | } else {
15 | return "/storage/sdcard0";
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/render/BaseFilter.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media.render;
2 |
3 | /**
4 | * Created by lmy on 2016/4/8.
5 | */
6 | public abstract class BaseFilter implements Filter {
7 | protected boolean starting = false;
8 | protected int width, height;
9 |
10 | @Override
11 | public void onCreate(int width, int height) {
12 | this.width = width;
13 | this.height = height;
14 | onStart();
15 | this.starting = true;
16 | }
17 |
18 | @Override
19 | public boolean isStarting() {
20 | return starting;
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /local.properties
2 | /.idea
3 | .DS_Store
4 | *.iml
5 |
6 | # Built application files
7 | *.apk
8 | *.ap_
9 |
10 | # Files for the Dalvik VM
11 | *.dex
12 |
13 | # Java class files
14 | *.class
15 |
16 | # Generated files
17 | bin/
18 | gen/
19 |
20 | # Gradle files
21 | .gradle/
22 | gradle/
23 | build/
24 |
25 | # Local configuration file (sdk path, etc)
26 | local.properties
27 |
28 | # Proguard folder generated by Eclipse
29 | proguard/
30 |
31 | # Log Files
32 | *.log
33 |
34 | # Android Studio Navigation editor temp files
35 | .navigation/
36 |
37 | # Android Studio captures folder
38 | captures/
39 |
40 | # jniLibs
41 | jniLibs/
--------------------------------------------------------------------------------
/library/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in D:\Android\sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/samples/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in D:\Android\sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/samples/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/recorder/Frame.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera.recorder;
2 |
3 | import org.bytedeco.javacpp.opencv_core;
4 |
5 | import static org.bytedeco.javacpp.opencv_core.cvCreateImage;
6 |
7 | /**
8 | * Created by 李明艺 on 2016/3/1.
9 | *
10 | * @author lrlmy@foxmail.com
11 | */
12 | public class Frame {
13 | public opencv_core.IplImage image;
14 | public long frameTimeMillis;
15 | public long frameNanoTime;
16 |
17 | public static Frame create(int width, int height, int depth, int channels) {
18 | return new Frame(cvCreateImage(new opencv_core.CvSize(width, height), depth, channels), 0, 0);
19 | }
20 |
21 | public Frame(opencv_core.IplImage img, long timeMill, long timeNano) {
22 | image = img;
23 | frameTimeMillis = timeMill;
24 | frameNanoTime = timeNano;
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/samples/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 22
5 | buildToolsVersion "22.0.1"
6 |
7 | defaultConfig {
8 | applicationId "com.lmy.samples"
9 | minSdkVersion 15
10 | targetSdkVersion 22
11 | versionCode 1
12 | versionName "1.0"
13 | }
14 | buildTypes {
15 | release {
16 | minifyEnabled false
17 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
18 | }
19 | }
20 | }
21 |
22 | dependencies {
23 | compile fileTree(dir: 'libs', include: ['*.jar'])
24 | testCompile 'junit:junit:4.12'
25 | compile project(':library')
26 | compile 'com.android.support:appcompat-v7:22.2.1'
27 | compile 'com.lmy.lycommon:library:0.1.22'
28 | compile 'com.android.support:design:22.2.1'
29 | }
30 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/render/FFmpegFilter.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media.render;
2 |
3 | import org.bytedeco.javacv.FFmpegFrameFilter;
4 | import org.bytedeco.javacv.FrameFilter;
5 |
6 | /**
7 | * Created by lmy on 2016/4/8.
8 | */
9 | public abstract class FFmpegFilter extends BaseFilter {
10 | private FFmpegFrameFilter mFilter;
11 | protected String filter;
12 |
13 | public FFmpegFilter(String filter) {
14 | this.filter = filter;
15 | }
16 |
17 | @Override
18 | public void onStop() {
19 | starting = false;
20 | try {
21 | if (mFilter != null) {
22 | mFilter.stop();
23 | mFilter.release();
24 | mFilter = null;
25 | }
26 | } catch (FrameFilter.Exception e) {
27 | e.printStackTrace();
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | # Default value: -Xmx10248m -XX:MaxPermSize=256m
13 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
14 |
15 | # When configured, Gradle will run in incubating parallel mode.
16 | # This option should only be used with decoupled projects. More details, visit
17 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
18 | # org.gradle.parallel=true
--------------------------------------------------------------------------------
/samples/src/main/res/layout/activity_render2.xml:
--------------------------------------------------------------------------------
1 |
2 |
12 |
13 |
18 |
19 |
23 |
24 |
--------------------------------------------------------------------------------
/samples/src/main/res/layout/activity_video_record.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
13 |
14 |
18 |
19 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/samples/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
15 |
16 |
21 |
22 |
27 |
28 |
--------------------------------------------------------------------------------
/library/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 |
3 | android {
4 | compileSdkVersion 23
5 | buildToolsVersion "23.0.2"
6 |
7 | defaultConfig {
8 | minSdkVersion 15
9 | targetSdkVersion 23
10 | versionCode 1
11 | versionName "1.0"
12 | }
13 | buildTypes {
14 | release {
15 | minifyEnabled false
16 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
17 | }
18 | }
19 | lintOptions {
20 | abortOnError false
21 | }
22 | }
23 |
24 | //定义一个函数,target是生成jar包的文件名,classDir是class文件所在的文件夹
25 | def makeJar(String target, String classDir) {
26 | exec {
27 | executable "jar" //调用jar
28 | args "cvf", target
29 | args "-C", classDir
30 | args "", "."
31 | }
32 | }
33 | //新建一个task,名为buildLib,依赖build(build是一个自带的task)
34 | task buildLib(dependsOn: ['build']) << {
35 | makeJar("build/lymedia.jar", "build/intermediates/classes/release")
36 | }
37 |
38 | dependencies {
39 | compile fileTree(dir: 'libs', include: ['*.jar'])
40 | compile 'org.bytedeco:javacpp:1.1'
41 | compile 'org.bytedeco:javacv:1.1'
42 | }
43 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/Player.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media;
2 |
3 | /**
4 | * Created by lmy on 2016/3/24.
5 | */
6 | public abstract class Player {
7 | protected int curFrameNumber = 0;
8 | protected long rate = 0;
9 | protected boolean play = false;
10 | //设置相关
11 | protected boolean looping;
12 | protected boolean autoPlay = true;
13 |
14 |
15 | public void seek(int number) {
16 | this.curFrameNumber = number;
17 | }
18 |
19 | public void play() {
20 | this.play = true;
21 | }
22 |
23 | public void pause() {
24 | this.play = false;
25 | }
26 |
27 | public void stop() {
28 | this.play = false;
29 | }
30 |
31 | public abstract int getWidth();
32 |
33 | public abstract int getHeight();
34 |
35 | public boolean isLooping() {
36 | return looping;
37 | }
38 |
39 | public void setLooping(boolean looping) {
40 | this.looping = looping;
41 | }
42 |
43 | public boolean isAutoPlay() {
44 | return autoPlay;
45 | }
46 |
47 | public void setAutoPlay(boolean autoPlay) {
48 | this.autoPlay = autoPlay;
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/samples/src/main/res/layout/activity_video_play.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
12 |
13 |
17 |
18 |
23 |
24 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/render/AcvFilter.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media.render;
2 |
3 | import android.util.Log;
4 |
5 | import com.lmy.lymedia.utils.Util;
6 |
7 | import org.bytedeco.javacv.FFmpegFrameFilter;
8 | import org.bytedeco.javacv.Frame;
9 | import org.bytedeco.javacv.FrameFilter;
10 |
11 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGBA;
12 |
13 | /**
14 | * Created by lmy on 2016/4/8.
15 | */
16 | public class AcvFilter extends FFmpegFilter {
17 | private final static String FILTER = "curves=psfile='%s'";
18 | private FFmpegFrameFilter mFilter;
19 |
20 | public AcvFilter(String filter) {
21 | super(filter);
22 | }
23 |
24 | @Override
25 | public Frame filter(Frame frame) {
26 | try {
27 | mFilter.push(frame);
28 | while ((frame = mFilter.pull()) == null) {
29 | Log.v("AcvFilter", "wait render");
30 | }
31 | } catch (FrameFilter.Exception e) {
32 | e.printStackTrace();
33 | }
34 | return frame;
35 | }
36 |
37 | @Override
38 | public void onStart() {
39 | mFilter = new FFmpegFrameFilter(String.format(FILTER, filter), width, height);
40 | mFilter.setPixelFormat(AV_PIX_FMT_RGBA);
41 | try {
42 | mFilter.start();
43 | } catch (FrameFilter.Exception e) {
44 | e.printStackTrace();
45 | }
46 | }
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/recorder/Util.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera.recorder;
2 |
3 | /**
4 | * Created by 李明艺 on 2016/3/1.
5 | *
6 | * @author lrlmy@foxmail.com
7 | */
8 | public class Util {
9 | public static int getTimeStampInNsFromSampleCounted(int paramInt) {
10 | return (int) (paramInt / 0.0441D);
11 | }
12 |
13 | public static String root() {
14 | if (android.os.Environment.getExternalStorageState().equals(
15 | android.os.Environment.MEDIA_MOUNTED)) {
16 | return android.os.Environment.getExternalStorageDirectory().getAbsolutePath();
17 | }
18 | return "/storage/sdcard0";
19 | }
20 |
21 | public static VideoRecorderWrapper.RecorderParameters getRecorderParameter(int currentResolution) {
22 | VideoRecorderWrapper.RecorderParameters parameters = new VideoRecorderWrapper.RecorderParameters();
23 | if (currentResolution == CONSTANTS.RESOLUTION_HIGH_VALUE) {
24 | parameters.setAudioBitrate(128000);
25 | parameters.setVideoQuality(1);
26 | } else if (currentResolution == CONSTANTS.RESOLUTION_MEDIUM_VALUE) {
27 | parameters.setAudioBitrate(128000);
28 | parameters.setVideoQuality(10);
29 | } else if (currentResolution == CONSTANTS.RESOLUTION_LOW_VALUE) {
30 | parameters.setAudioBitrate(96000);
31 | parameters.setVideoQuality(20);
32 | }
33 | return parameters;
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/ui/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.ui;
2 |
3 | import android.content.Intent;
4 | import android.support.v7.app.AppCompatActivity;
5 | import android.os.Bundle;
6 | import android.view.View;
7 | import android.widget.Button;
8 |
9 | import com.lmy.lymedia.utils.Util;
10 | import com.lmy.lymedia.widget.VideoSurfaceView;
11 | import com.lmy.samples.R;
12 |
13 | public class MainActivity extends AppCompatActivity {
14 |
15 | @Override
16 | protected void onCreate(Bundle savedInstanceState) {
17 | super.onCreate(savedInstanceState);
18 | setContentView(R.layout.activity_main);
19 | intiView();
20 | }
21 |
22 | private void intiView() {
23 | findViewById(R.id.play).setOnClickListener(onClickListener);
24 | findViewById(R.id.record).setOnClickListener(onClickListener);
25 | findViewById(R.id.render).setOnClickListener(onClickListener);
26 | }
27 |
28 | private View.OnClickListener onClickListener = new View.OnClickListener() {
29 | @Override
30 | public void onClick(View view) {
31 | switch (view.getId()) {
32 | case R.id.play:
33 | startActivity(new Intent(MainActivity.this, VideoPlayActivity.class));
34 | break;
35 | case R.id.record:
36 | startActivity(new Intent(MainActivity.this, VideoRecordActivity.class));
37 | break;
38 | case R.id.render:
39 | startActivity(new Intent(MainActivity.this, RenderActivity2.class));
40 | break;
41 | }
42 | }
43 | };
44 | }
45 |
--------------------------------------------------------------------------------
/samples/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/ui/VideoPlayActivity.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.ui;
2 |
3 | import android.support.v7.app.AppCompatActivity;
4 | import android.os.Bundle;
5 | import android.view.View;
6 | import android.widget.Button;
7 |
8 | import com.lmy.lymedia.media.render.AcvFilter;
9 | import com.lmy.lymedia.utils.Util;
10 | import com.lmy.lymedia.widget.VideoSurfaceView;
11 | import com.lmy.samples.R;
12 |
13 | public class VideoPlayActivity extends AppCompatActivity {
14 | private VideoSurfaceView mSurfaceView;
15 | private Button oneBtn;
16 | private Button twoBtn;
17 |
18 | @Override
19 | protected void onCreate(Bundle savedInstanceState) {
20 | super.onCreate(savedInstanceState);
21 | setContentView(R.layout.activity_video_play);
22 | mSurfaceView = (VideoSurfaceView) findViewById(R.id.surface);
23 | oneBtn = (Button) findViewById(R.id.button1);
24 | twoBtn = (Button) findViewById(R.id.button2);
25 | mSurfaceView.initPlayer(Util.getSdcardPath() + "/test.f4v");
26 | mSurfaceView.setFilter(new AcvFilter(Util.getSdcardPath() + "/test_filters/FA_Curves3.acv"));
27 | mSurfaceView.play();
28 | oneBtn.setOnClickListener(new View.OnClickListener() {
29 | @Override
30 | public void onClick(View v) {
31 | mSurfaceView.releasePlaer();
32 | mSurfaceView.initPlayer(Util.getSdcardPath() + "/test.mp4");
33 | }
34 | });
35 | twoBtn.setOnClickListener(new View.OnClickListener() {
36 | @Override
37 | public void onClick(View v) {
38 | mSurfaceView.releasePlaer();
39 | mSurfaceView.initPlayer(Util.getSdcardPath() + "/test.f4v");
40 | }
41 | });
42 | }
43 |
44 | @Override
45 | protected void onDestroy() {
46 | super.onDestroy();
47 | mSurfaceView.releasePlaer();
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/ImageUtil.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera;
2 |
3 | import android.graphics.Bitmap;
4 | import android.os.Environment;
5 | import android.util.Log;
6 |
7 | import java.io.BufferedOutputStream;
8 | import java.io.File;
9 | import java.io.FileOutputStream;
10 | import java.io.IOException;
11 |
12 | /**
13 | * Created by 李明艺 on 2016/2/29.
14 | *
15 | * @author lrlmy@foxmail.com
16 | */
17 | public class ImageUtil {
18 | public static final String TAG = "ImageUtil";
19 | public static final File parentPath = Environment.getExternalStorageDirectory();
20 | public static String storagePath = null;
21 | public static final String DST_FOLDER = "lava";
22 |
23 | public static String getPath() {
24 | if (storagePath == null) {
25 | storagePath = parentPath.getAbsolutePath() + "/" + DST_FOLDER;
26 | File file = new File(storagePath);
27 | if (!file.exists()) {
28 | file.mkdir();
29 | }
30 | }
31 |
32 | return storagePath;
33 | }
34 |
35 | public static void saveBitmap(Bitmap bmp) {
36 | String path = getPath();
37 | long currentTime = System.currentTimeMillis();
38 | String filename = path + "/" + currentTime + ".jpg";
39 | saveBitmap(bmp, filename);
40 | }
41 |
42 | public static void saveBitmap(Bitmap bmp, String filename) {
43 | Log.i(TAG, "saving Bitmap : " + filename);
44 | try {
45 | FileOutputStream fileout = new FileOutputStream(filename);
46 | BufferedOutputStream bufferOutStream = new BufferedOutputStream(fileout);
47 | bmp.compress(Bitmap.CompressFormat.JPEG, 100, bufferOutStream);
48 | bufferOutStream.flush();
49 | bufferOutStream.close();
50 | } catch (IOException e) {
51 | Log.e(TAG, "Err when saving bitmap...");
52 | e.printStackTrace();
53 | return;
54 | }
55 | Log.i(TAG, "Bitmap " + filename + " saved!");
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/ui/RenderActivity2.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.ui;
2 |
3 | import android.os.Bundle;
4 | import android.support.v7.app.AppCompatActivity;
5 | import android.view.View;
6 | import android.widget.Button;
7 | import android.widget.TextView;
8 | import android.widget.Toast;
9 |
10 | import com.lmy.lymedia.media.VideoRender;
11 | import com.lmy.lymedia.media.render.AcvFilter;
12 | import com.lmy.lymedia.utils.Util;
13 | import com.lmy.samples.R;
14 |
15 | public class RenderActivity2 extends AppCompatActivity {
16 | private Button mStartBtn;
17 | private TextView mTextView;
18 | private VideoRender mVideoRender;
19 |
20 | @Override
21 | protected void onCreate(Bundle savedInstanceState) {
22 | super.onCreate(savedInstanceState);
23 | setContentView(R.layout.activity_render2);
24 | initView();
25 | }
26 |
27 | private void initView() {
28 | mStartBtn = (Button) findViewById(R.id.start);
29 | mTextView = (TextView) findViewById(R.id.text);
30 | mVideoRender = new VideoRender(Util.getSdcardPath() + "/test.mp4", Util.getSdcardPath() + "/test_render.mp4");
31 | if (mVideoRender.init()) {
32 | mVideoRender.setFilter(new AcvFilter(Util.getSdcardPath() + "/test_filters/FA_Curves3.acv"));
33 | mVideoRender.setRenderListener(new VideoRender.RenderListener() {
34 | @Override
35 | public void onProgress(int progress) {
36 | mTextView.setText(String.valueOf(progress));
37 | }
38 | });
39 | mStartBtn.setOnClickListener(new View.OnClickListener() {
40 | @Override
41 | public void onClick(View v) {
42 | mVideoRender.start();
43 | }
44 | });
45 | } else {
46 | Toast.makeText(this, "初始化失败", Toast.LENGTH_SHORT).show();
47 | }
48 | }
49 |
50 | @Override
51 | protected void onDestroy() {
52 | super.onDestroy();
53 | if (mVideoRender != null)
54 | mVideoRender.stop();
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/widget/VideoSurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.widget;
2 |
3 | import android.annotation.TargetApi;
4 | import android.app.Activity;
5 | import android.content.Context;
6 | import android.os.Build;
7 | import android.util.AttributeSet;
8 | import android.util.Log;
9 | import android.view.Display;
10 | import android.view.SurfaceHolder;
11 | import android.view.SurfaceView;
12 |
13 | import com.lmy.lymedia.media.FFmpegPlayer;
14 | import com.lmy.lymedia.media.render.Filter;
15 |
16 | /**
17 | * Created by Administrator on 2016/3/21.
18 | */
19 | public class VideoSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
20 | private FFmpegPlayer mPlayer;
21 | private OnPreparedListener onPreparedListener;
22 |
23 | public VideoSurfaceView(Context context) {
24 | super(context);
25 | init();
26 | }
27 |
28 | public VideoSurfaceView(Context context, AttributeSet attrs) {
29 | super(context, attrs);
30 | init();
31 | }
32 |
33 | public VideoSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
34 | super(context, attrs, defStyleAttr);
35 | init();
36 | }
37 |
38 | @TargetApi(Build.VERSION_CODES.LOLLIPOP)
39 | public VideoSurfaceView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
40 | super(context, attrs, defStyleAttr, defStyleRes);
41 | init();
42 | }
43 |
44 | private void init() {
45 | getHolder().addCallback(this);
46 | }
47 |
48 | public void initPlayer(String path) {
49 | mPlayer = FFmpegPlayer.create(getHolder(), path);
50 | mPlayer.setLooping(true);
51 | }
52 |
53 | public void play() {
54 | mPlayer.play();
55 | }
56 |
57 | public void setFilter(Filter filter) {
58 | mPlayer.setFilter(filter);
59 | }
60 |
61 | private void initLayout(int width, int height) {
62 | Display display = ((Activity) getContext()).getWindowManager().getDefaultDisplay();
63 | float scale = width / (float) height;
64 | getLayoutParams().height = (int) (display.getWidth() / scale);
65 | requestLayout();
66 | }
67 |
68 | @Override
69 | public void surfaceCreated(SurfaceHolder holder) {
70 | Log.v("SurfaceView", "surfaceCreated...");
71 | initLayout(mPlayer.getWidth(), mPlayer.getHeight());
72 | play();
73 | if (onPreparedListener != null)
74 | onPreparedListener.onPrepared();
75 | }
76 |
77 | @Override
78 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
79 | Log.v("SurfaceView", "surfaceChanged...");
80 | }
81 |
82 | @Override
83 | public void surfaceDestroyed(SurfaceHolder holder) {
84 | Log.v("SurfaceView", "surfaceDestroyed...");
85 | mPlayer.pause();
86 | }
87 |
88 | public void releasePlaer() {
89 | mPlayer.stop();
90 | }
91 |
92 | public void setOnPreparedListener(OnPreparedListener onPreparedListener) {
93 | this.onPreparedListener = onPreparedListener;
94 | }
95 |
96 | public interface OnPreparedListener {
97 | void onPrepared();
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/ui/VideoRecordActivity.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.ui;
2 |
3 | import android.app.ProgressDialog;
4 | import android.os.Bundle;
5 | import android.support.design.widget.FloatingActionButton;
6 | import android.support.design.widget.Snackbar;
7 | import android.support.v7.app.AppCompatActivity;
8 | import android.support.v7.widget.Toolbar;
9 | import android.view.KeyEvent;
10 | import android.view.MotionEvent;
11 | import android.view.View;
12 | import android.widget.Button;
13 |
14 | import com.lmy.samples.R;
15 | import com.lmy.samples.camera.MyCameraGLSurfaceView;
16 |
17 | public class VideoRecordActivity extends AppCompatActivity {
18 | private MyCameraGLSurfaceView mGLSurfaceView;
19 | private Button mButton;
20 |
21 | @Override
22 | protected void onCreate(Bundle savedInstanceState) {
23 | super.onCreate(savedInstanceState);
24 | setContentView(R.layout.activity_video_record);
25 | initView();
26 | }
27 |
28 | private void initView() {
29 | mGLSurfaceView = (MyCameraGLSurfaceView) findViewById(R.id.glview);
30 | mButton = (Button) findViewById(R.id.button);
31 |
32 | mGLSurfaceView.setFitVideoSize(true);
33 | mGLSurfaceView.initecoder();
34 | mButton.setOnTouchListener(new View.OnTouchListener() {
35 | @Override
36 | public boolean onTouch(View v, MotionEvent event) {
37 | if (MotionEvent.ACTION_DOWN == event.getAction() && !mGLSurfaceView.isRecording())
38 | mGLSurfaceView.startRecording();
39 | else if (MotionEvent.ACTION_UP == event.getAction() && mGLSurfaceView.isRecording())
40 | mGLSurfaceView.pauseRecording();
41 | return false;
42 | }
43 | });
44 | }
45 |
46 | private void waitForEncoding(final boolean shouldFinish) {
47 | final ProgressDialog pd = new ProgressDialog(this);
48 | pd.show();
49 | new Thread(new Runnable() {
50 | @Override
51 | public void run() {
52 | while (!mGLSurfaceView.getRecoderManager().encodeCompeleted()) {
53 | try {
54 | Thread.sleep(1000);
55 | } catch (InterruptedException e) {
56 | e.printStackTrace();
57 | }
58 | }
59 | runOnUiThread(new Runnable() {
60 | @Override
61 | public void run() {
62 | pd.dismiss();
63 | if (shouldFinish) {
64 | finish();
65 | } else {
66 | }
67 | }
68 | });
69 | }
70 | }).start();
71 | }
72 |
73 | @Override
74 | protected void onResume() {
75 | super.onResume();
76 | mGLSurfaceView.onResume();
77 | }
78 |
79 | @Override
80 | protected void onPause() {
81 | super.onPause();
82 | mGLSurfaceView.onPause();
83 | }
84 |
85 | @Override
86 | public boolean onKeyDown(int keyCode, KeyEvent event) {
87 | if (keyCode == KeyEvent.KEYCODE_BACK) {
88 | if (mGLSurfaceView.getRecoderManager().isStarting() && !mGLSurfaceView.getRecoderManager().encodeCompeleted()) {
89 | waitForEncoding(true);
90 | return true;
91 | }
92 | }
93 | return super.onKeyDown(keyCode, event);
94 | }
95 |
96 | @Override
97 | public void onDestroy() {
98 | super.onDestroy();
99 | mGLSurfaceView.stopRecording();
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/utils/FrameUtil.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.utils;
2 |
3 | import android.util.Log;
4 |
5 | import org.bytedeco.javacv.Frame;
6 |
7 | import java.nio.Buffer;
8 | import java.nio.ByteBuffer;
9 | import java.nio.FloatBuffer;
10 |
11 | /**
12 | * Created by Administrator on 2016/3/21.
13 | */
14 | public class FrameUtil {
15 | public static int[] RGB242RGB565(int[] rgb) {
16 | rgb[0] = (rgb[0] << 8) & 0xF800;
17 | rgb[1] = (rgb[1] << 3) & 0x7E0;
18 | rgb[2] = rgb[2] >> 3;
19 | return rgb;
20 | }
21 |
22 | public static ByteBuffer RGB242RGB565(ByteBuffer buffer, int lenght) {
23 | for (int i = 0; i < lenght; i++) {
24 | if (i % 3 == 0)
25 | buffer.put(i, (byte) ((buffer.get(i) << 8) & 0xF800));
26 | if (i % 3 == 1)
27 | buffer.put(i, (byte) ((buffer.get(i) << 3) & 0x7E0));
28 | if (i % 3 == 2)
29 | buffer.put(i, (byte) (buffer.get(i) >> 3));
30 | }
31 | return buffer;
32 | }
33 |
34 |
35 | public static int frameType(Frame frame) {
36 | if (frame == null) return -1;
37 | if (frame.image != null) return 0;
38 | else if (frame.samples != null) return 1;
39 | else return -1;
40 | }
41 |
42 | public static Frame copy(Frame frame) {
43 | Frame tmp = new Frame();
44 | int type = frameType(frame);
45 | if (type == 0) {
46 | tmp = new Frame(frame.imageWidth, frame.imageHeight, frame.imageDepth, frame.imageChannels);
47 | tmp.image[0] = copy((ByteBuffer) frame.image[0]);
48 | } else {
49 | tmp.sampleRate = frame.sampleRate;
50 | tmp.audioChannels = frame.audioChannels;
51 | // tmp.samples = frame.samples.clone();
52 | for (int i = 0; i < tmp.samples.length; i++)
53 | tmp.samples[i] = copy((FloatBuffer) frame.samples[i].position(0));
54 | }
55 | return tmp;
56 | }
57 |
58 | public static ByteBuffer copy(ByteBuffer src) {
59 | // Create the clone buffer with same capacity as the original
60 | ByteBuffer cloneBuffer = ByteBuffer.allocateDirect(src.capacity());
61 | //ByteBuffer cloneBuffer = deepCopy(originalByteBuffer);
62 |
63 | // Save parameters from the original byte buffer
64 | int position = src.position();
65 | int limit = src.limit();
66 |
67 | // Set range to the entire buffer
68 | src.position(0).limit(src.capacity());
69 |
70 | // Read from original and put into clone
71 | cloneBuffer.put(src);
72 |
73 | // Set the order same as original
74 | cloneBuffer.order(src.order());
75 |
76 | // Set clone position to 0 and set the range as the original
77 | cloneBuffer.position(0);
78 | cloneBuffer.position(position).limit(limit);
79 |
80 | return cloneBuffer;
81 | }
82 |
83 | public static FloatBuffer copy(FloatBuffer src) {
84 | // Create the clone buffer with same capacity as the original
85 | FloatBuffer cloneBuffer = FloatBuffer.allocate(src.capacity());
86 | //ByteBuffer cloneBuffer = deepCopy(originalByteBuffer);
87 |
88 | // Save parameters from the original byte buffer
89 | int position = src.position();
90 | int limit = src.limit();
91 |
92 | // Set range to the entire buffer
93 | src.position(0).limit(src.capacity());
94 |
95 | // Read from original and put into clone
96 | cloneBuffer.put(src);
97 |
98 | // Set the order same as original
99 | // cloneBuffer.order(src.order());
100 |
101 | // Set clone position to 0 and set the range as the original
102 | cloneBuffer.position(0);
103 | cloneBuffer.position(position).limit(limit);
104 |
105 | return cloneBuffer;
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/AudioDevice.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media;
2 |
3 | import android.media.AudioFormat;
4 | import android.media.AudioManager;
5 | import android.media.AudioTrack;
6 | import android.os.Build;
7 | import android.util.Log;
8 |
9 | import java.nio.Buffer;
10 | import java.nio.FloatBuffer;
11 |
12 | /**
13 | * Created by Administrator on 2016/3/23.
14 | */
15 | public class AudioDevice {
16 | private int streamType = AudioManager.STREAM_MUSIC; // 流类型
17 | private int sampleRateInHz = 44100; // 设置音频数据的采样率
18 | private int channelConfig = AudioFormat.CHANNEL_OUT_STEREO; //CHANNEL_OUT_MONO类型是单声道
19 | private int audioFormat = AudioFormat.ENCODING_PCM_16BIT; //采样精度
20 | private int minBufSize = 0;
21 | private int mode = AudioTrack.MODE_STREAM; // 设置模式类型,在这里设置为流类型
22 | private AudioTrack audioTrack;
23 | private short[] buffer = new short[1024];
24 | private float[] floatBuffer = new float[1024];
25 |
26 | public AudioDevice(int rate, int channels) {
27 | sampleRateInHz = rate;
28 | channelConfig = channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO;
29 | audioFormat = Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_FLOAT;
30 | minBufSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
31 | audioTrack = new AudioTrack(streamType, sampleRateInHz, channelConfig, audioFormat, minBufSize, mode);
32 | audioTrack.play();
33 | Log.v("AudioDevice", toString());
34 | }
35 |
36 | public void writeSamples(Buffer[] buffers) {
37 | if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
38 | fillByteBuffer(buffers);
39 | audioTrack.write(buffer, 0, buffer.length);
40 | } else {
41 | fillFloatBuffer(buffers);
42 | audioTrack.write(floatBuffer, 0, floatBuffer.length, AudioTrack.WRITE_BLOCKING);
43 | }
44 | }
45 |
46 | private void fillByteBuffer(Buffer[] buffers) {
47 | if (buffer.length == 1) {
48 | FloatBuffer b = (FloatBuffer) buffers[0];
49 | b.rewind();
50 | if (buffer.length < b.capacity())
51 | buffer = new short[b.capacity()];
52 | for (int i = 0; i < b.capacity(); i++)
53 | buffer[i] = (byte) (b.get(i) * Short.MAX_VALUE);
54 | } else {
55 | FloatBuffer b1 = (FloatBuffer) buffers[0];
56 | FloatBuffer b2 = (FloatBuffer) buffers[0];
57 | if (buffer.length < b1.capacity() + b2.capacity())
58 | buffer = new short[b1.capacity() + b2.capacity()];
59 | for (int i = 0; i < b1.capacity(); i++) {
60 | buffer[2 * i] = (short) (b1.get(i) * Short.MAX_VALUE);
61 | buffer[2 * i + 1] = (short) (b2.get(i) * Short.MAX_VALUE);
62 | }
63 | }
64 | }
65 |
66 | private void fillFloatBuffer(Buffer[] buffers) {
67 | if (buffer.length == 1) {
68 | FloatBuffer b = (FloatBuffer) buffers[0];
69 | b.rewind();
70 | if (floatBuffer.length < b.capacity())
71 | floatBuffer = new float[b.capacity()];
72 | for (int i = 0; i < b.capacity(); i++)
73 | floatBuffer[i] = b.get(i) * Short.MAX_VALUE;
74 | } else {
75 | FloatBuffer b1 = (FloatBuffer) buffers[0];
76 | FloatBuffer b2 = (FloatBuffer) buffers[0];
77 | if (floatBuffer.length < b1.capacity() + b2.capacity())
78 | floatBuffer = new float[b1.capacity() + b2.capacity()];
79 | for (int i = 0; i < b1.capacity(); i++) {
80 | floatBuffer[2 * i] = b1.get(i);
81 | floatBuffer[2 * i + 1] = b2.get(i);
82 | }
83 | }
84 | }
85 |
86 | public void release() {
87 | if (audioTrack != null) {
88 | audioTrack.stop();
89 | audioTrack.release();
90 | audioTrack = null;
91 | }
92 | }
93 |
94 | @Override
95 | public String toString() {
96 | return "AudioDevice{" +
97 | "streamType=" + streamType +
98 | ", sampleRateInHz=" + sampleRateInHz +
99 | ", channelConfig=" + channelConfig +
100 | ", audioFormat=" + audioFormat +
101 | ", minBufSize=" + minBufSize +
102 | ", mode=" + mode +
103 | '}';
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/recorder/AudioRecorder.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera.recorder;
2 |
3 | import android.media.AudioFormat;
4 | import android.media.AudioRecord;
5 | import android.media.MediaRecorder;
6 |
7 | import java.nio.ShortBuffer;
8 |
9 | /**
10 | * Created by 李明艺 on 2015/12/29.
11 | *
12 | * @author 李明艺
13 | */
14 | public class AudioRecorder {
15 | //调用系统的录制音频类
16 | private AudioRecord audioRecord;
17 | //录制音频的线程
18 | private AudioRecordRunnable audioRecordRunnable;
19 | private Thread audioThread;
20 | //开启和停止录制音频的标记
21 | volatile boolean runAudioThread = true;
22 |
23 | //音频的采样率,recorderParameters中会有默认值
24 | private int sampleRate = 44100;
25 | //音频时间戳
26 | private volatile long mAudioTimestamp = 0L;
27 | private volatile long mAudioTimeRecorded;
28 | private boolean recording = false;
29 |
30 | private DateFeedback dateFeedback;
31 | private TimestampUpdate timestampUpdate;
32 |
33 | public AudioRecorder() {
34 | init();
35 | }
36 |
37 | private void init() {
38 | audioRecordRunnable = new AudioRecordRunnable();
39 | audioThread = new Thread(audioRecordRunnable);
40 | audioThread.start();
41 | }
42 |
43 | /**
44 | * 录制音频的线程
45 | *
46 | * @author QD
47 | */
48 | class AudioRecordRunnable implements Runnable {
49 | int bufferSize;
50 | short[] audioData;
51 | int bufferReadResult;
52 | private final AudioRecord audioRecord;
53 | public volatile boolean isInitialized;
54 | private int mCount = 0;
55 |
56 | private AudioRecordRunnable() {
57 | bufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
58 | audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
59 | AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
60 | audioData = new short[bufferSize];
61 | }
62 |
63 | /**
64 | * shortBuffer包含了音频的数据和起始位置
65 | *
66 | * @param shortBuffer
67 | */
68 | private void record(ShortBuffer shortBuffer) {
69 | this.mCount += shortBuffer.limit();
70 | feedback(shortBuffer);
71 | }
72 |
73 | /**
74 | * 更新音频的时间戳
75 | */
76 | private void updateTimestamp() {
77 | int i = Util.getTimeStampInNsFromSampleCounted(this.mCount);
78 | if (mAudioTimestamp != i) {
79 | mAudioTimestamp = i;
80 | mAudioTimeRecorded = System.nanoTime();
81 | timestampUpdate(mAudioTimestamp, mAudioTimeRecorded);
82 | }
83 | }
84 |
85 | @Override
86 | public void run() {
87 | android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
88 | this.isInitialized = false;
89 | if (audioRecord != null) {
90 | //判断音频录制是否被初始化
91 | while (this.audioRecord.getState() == 0) {
92 | try {
93 | Thread.sleep(100L);
94 | } catch (InterruptedException localInterruptedException) {
95 | }
96 | }
97 | this.isInitialized = true;
98 | this.audioRecord.startRecording();
99 | while (runAudioThread) {
100 | updateTimestamp();
101 | bufferReadResult = this.audioRecord.read(audioData, 0, audioData.length);
102 | if (recording && bufferReadResult > 0)
103 | record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
104 | }
105 | this.audioRecord.stop();
106 | this.audioRecord.release();
107 | }
108 | }
109 | }
110 |
111 | public void startRecording() {
112 | recording = true;
113 | }
114 |
115 | public void pauseRecording() {
116 | recording = false;
117 | }
118 |
119 | public void stopRecording() {
120 | recording = false;
121 | runAudioThread = false;
122 | }
123 |
124 | public void destroy() {
125 | if (this.audioRecord != null) {
126 | this.audioRecord.stop();
127 | this.audioRecord.release();
128 | this.audioRecord = null;
129 | }
130 | }
131 |
132 | public void feedback(ShortBuffer buffer) {
133 | if (dateFeedback == null) return;
134 | dateFeedback.feedback(buffer);
135 | }
136 |
137 | public void timestampUpdate(long timestamp, long timeRecorded) {
138 | if (timestampUpdate == null) return;
139 | timestampUpdate.update(timestamp, timeRecorded);
140 | }
141 |
142 | public DateFeedback getDateFeedback() {
143 | return dateFeedback;
144 | }
145 |
146 | public void setDateFeedback(DateFeedback dateFeedback) {
147 | this.dateFeedback = dateFeedback;
148 | }
149 |
150 | public TimestampUpdate getTimestampUpdate() {
151 | return timestampUpdate;
152 | }
153 |
154 | public void setTimestampUpdate(TimestampUpdate timestampUpdate) {
155 | this.timestampUpdate = timestampUpdate;
156 | }
157 |
158 | public interface DateFeedback {
159 | void feedback(ShortBuffer buffer);
160 | }
161 |
162 | public interface TimestampUpdate {
163 | void update(long timestamp, long timeRecorded);
164 | }
165 | }
166 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/ShotGLSurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.graphics.Bitmap;
6 | import android.graphics.SurfaceTexture;
7 | import android.hardware.Camera;
8 | import android.opengl.GLES20;
9 | import android.os.AsyncTask;
10 | import android.os.Handler;
11 | import android.os.Looper;
12 | import android.os.Message;
13 | import android.util.AttributeSet;
14 | import android.view.Display;
15 | import android.view.SurfaceHolder;
16 |
17 | import com.lmy.lycommon.camera.CameraInstance;
18 | import com.lmy.lycommon.gles.widget.CameraGLSurfaceView;
19 | import com.lmy.samples.camera.recorder.Util;
20 |
21 | import org.bytedeco.javacpp.opencv_core;
22 |
23 | import javax.microedition.khronos.egl.EGLConfig;
24 | import javax.microedition.khronos.opengles.GL10;
25 |
26 | import static org.bytedeco.javacpp.opencv_core.cvCreateImage;
27 | import static org.bytedeco.javacpp.opencv_core.cvFlip;
28 |
29 | /**
30 | * Created by 李明艺 on 2016/3/28.
31 | *
32 | * @author lrlmy@foxmail.com
33 | *
34 | * 拍照控件
35 | */
36 | public class ShotGLSurfaceView extends CameraGLSurfaceView {
37 | private final static int RESIZE_FIT = 0x0000;
38 | private int mWidth, mHeight;
39 | private float scale;
40 | private String path;
41 | private ShotListener shotListener;
42 |
43 | public ShotGLSurfaceView(Context context) {
44 | super(context);
45 | }
46 |
47 | public ShotGLSurfaceView(Context context, AttributeSet attrs) {
48 | super(context, attrs);
49 | }
50 |
51 | public void init(float scale) {
52 | this.scale = scale;
53 | this.path = Util.root() + "/" + System.currentTimeMillis() + ".jpg";
54 | Display d = ((Activity) getContext()).getWindowManager().getDefaultDisplay();
55 | mWidth = d.getWidth();
56 | mHeight = (int) (mWidth / scale);
57 | }
58 |
59 | @Override
60 | protected void init() {
61 | this.setEGLContextClientVersion(2);
62 | this.setEGLConfigChooser(8, 8, 8, 8, 8, 0);
63 | this.getHolder().setFormat(-3);
64 | this.setRenderer(this);
65 | this.setRenderMode(0);
66 | }
67 |
68 | private Handler mHandler = new Handler(Looper.getMainLooper()) {
69 | @Override
70 | public void handleMessage(Message msg) {
71 | super.handleMessage(msg);
72 | if (msg.what == RESIZE_FIT)
73 | reSize(mWidth, mHeight);
74 | }
75 | };
76 |
77 | public void switchCamera() {
78 | int cameraID = 0;
79 | if (cameraInstance().getFacing() == Camera.CameraInfo.CAMERA_FACING_BACK)
80 | cameraID = Camera.CameraInfo.CAMERA_FACING_FRONT;
81 | else
82 | cameraID = Camera.CameraInfo.CAMERA_FACING_BACK;
83 | cameraInstance().tryOpenCamera(new CameraInstance.CameraOpenCallback() {
84 | @Override
85 | public void cameraReady() {
86 | if (!cameraInstance().isPreviewing()) {
87 | cameraInstance().stopPreview();
88 | }
89 | cameraInstance().startPreview(getSurfaceTexture());
90 | }
91 | }, cameraID);
92 | }
93 |
94 | @Override
95 | public CameraInstance cameraInstance() {
96 | return CameraInstance.getInstance(1920, 1080);
97 | }
98 |
99 | private void fitView() {
100 | mHandler.sendEmptyMessage(RESIZE_FIT);
101 | }
102 |
103 | @Override
104 | public void onFrameAvailable(SurfaceTexture surfaceTexture) {
105 | super.onFrameAvailable(surfaceTexture);
106 | }
107 |
108 | @Override
109 | public void onSurfaceCreated(GL10 gl, EGLConfig config) {
110 | super.onSurfaceCreated(gl, config);
111 | }
112 |
113 | @Override
114 | public void onSurfaceChanged(GL10 gl, int width, int height) {
115 | super.onSurfaceChanged(gl, width, height);
116 | fitView();
117 | }
118 |
119 | @Override
120 | public void surfaceDestroyed(SurfaceHolder holder) {
121 | super.surfaceDestroyed(holder);
122 | }
123 |
124 | @Override
125 | public void onDrawFrame(GL10 gl) {
126 | super.onDrawFrame(gl);
127 | if (take)
128 | takeShotFunc();
129 | }
130 |
131 | private boolean take = false;
132 | private opencv_core.IplImage mCacheImage;
133 |
134 | public void takeShot() {
135 | this.take = true;
136 | }
137 |
138 | private void takeShotFunc() {
139 | this.take = false;
140 | mCacheImage = cvCreateImage(new opencv_core.CvSize(mWidth, mHeight), opencv_core.IPL_DEPTH_8U, 4);
141 | GLES20.glReadPixels(getDrawViewport().x, (getDrawViewport().height - mHeight) / 2 + getDrawViewport().y, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mCacheImage.getByteBuffer());
142 | new AsyncTask() {
143 | @Override
144 | protected String doInBackground(Void... params) {
145 | cvFlip(mCacheImage, mCacheImage, 0);
146 | Bitmap bmp = Bitmap.createBitmap(mCacheImage.cvSize().width(), mCacheImage.cvSize().height(), Bitmap.Config.ARGB_8888);
147 | bmp.copyPixelsFromBuffer(mCacheImage.getByteBuffer());
148 | ImageUtil.saveBitmap(bmp, path);
149 | return path;
150 | }
151 |
152 | @Override
153 | protected void onPostExecute(String s) {
154 | super.onPostExecute(s);
155 | if (shotListener != null)
156 | shotListener.onShot(s);
157 | }
158 | }.execute();
159 | }
160 |
161 | public String getPath() {
162 | return path;
163 | }
164 |
165 | public void setShotListener(ShotListener shotListener) {
166 | this.shotListener = shotListener;
167 | }
168 |
169 | public interface ShotListener {
170 | void onShot(String path);
171 | }
172 | }
173 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/VideoRender.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media;
2 |
3 | import android.os.AsyncTask;
4 | import android.util.Log;
5 |
6 | import com.lmy.lymedia.media.render.Filter;
7 | import com.lmy.lymedia.utils.FrameUtil;
8 |
9 | import org.bytedeco.javacv.FFmpegFrameGrabber;
10 | import org.bytedeco.javacv.FFmpegFrameRecorder;
11 | import org.bytedeco.javacv.Frame;
12 | import org.bytedeco.javacv.FrameGrabber;
13 | import org.bytedeco.javacv.FrameRecorder;
14 |
15 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB565;
16 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGBA;
17 |
18 | /**
19 | * Created by Administrator on 2016/3/28.
20 | */
21 | public class VideoRender {
22 | private FFmpegFrameGrabber mFrameGrabber;
23 | private FFmpegFrameRecorder mFrameRecorder;
24 | private String srcPath;
25 | private String dstPath;
26 | private Frame mFrame;
27 | private RenderTask mRenderTask;
28 | private RenderListener renderListener;
29 | private Filter mFilter;
30 |
31 | public VideoRender(String srcPath, String dstPath) {
32 | this.srcPath = srcPath;
33 | this.dstPath = dstPath;
34 | this.mRenderTask = new RenderTask();
35 | }
36 |
37 | public boolean init() {
38 | return initGrabber() && initRecorder(getWidth(), getHeight());
39 | }
40 |
41 | private boolean initGrabber() {
42 | try {
43 | if (mFrameGrabber != null) {//如果已经有实例,则先释放资源再初始化
44 | mFrameGrabber.stop();
45 | mFrameGrabber.release();
46 | mFrameGrabber = null;
47 | }
48 | mFrameGrabber = FFmpegFrameGrabber.createDefault(srcPath);
49 | mFrameGrabber.setPixelFormat(AV_PIX_FMT_RGBA);
50 | mFrameGrabber.start();
51 | } catch (FrameGrabber.Exception e) {
52 | e.printStackTrace();
53 | return false;
54 | }
55 | return true;
56 | }
57 |
58 | private void setPixelFormat(int fmt) {
59 | mFrameGrabber.setPixelFormat(fmt);
60 | }
61 |
62 | private boolean initRecorder(int width, int height) {
63 | try {
64 | if (mFrameRecorder != null) {//如果已经有实例,则先释放资源再初始化
65 | mFrameRecorder.stop();
66 | mFrameRecorder.release();
67 | mFrameRecorder = null;
68 | }
69 | mFrameRecorder = FFmpegFrameRecorder.createDefault(dstPath, mFrameGrabber.getImageWidth(), mFrameGrabber.getImageHeight());
70 | mFrameRecorder.setFormat(mFrameGrabber.getFormat());
71 | mFrameRecorder.setSampleRate(mFrameGrabber.getSampleRate());
72 | mFrameRecorder.setFrameRate(mFrameGrabber.getFrameRate());
73 | mFrameRecorder.setVideoCodec(mFrameGrabber.getVideoCodec());
74 | // mFrameRecorder.setVideoQuality(1);
75 | // mFrameRecorder.setAudioQuality(1);
76 | mFrameRecorder.setAudioCodec(mFrameGrabber.getAudioCodec());
77 | mFrameRecorder.setVideoBitrate(mFrameGrabber.getVideoBitrate());
78 | mFrameRecorder.setAudioBitrate(mFrameGrabber.getAudioBitrate());
79 | mFrameRecorder.setAudioChannels(mFrameGrabber.getAudioChannels());
80 | mFrameRecorder.setImageWidth(width);
81 | mFrameRecorder.setImageHeight(height);
82 | mFrameRecorder.start();
83 | } catch (FrameRecorder.Exception e) {
84 | e.printStackTrace();
85 | return false;
86 | }
87 | return true;
88 | }
89 |
90 | public void start() {
91 | mRenderTask.execute();
92 | }
93 |
94 | public void stop() {
95 | mRenderTask.stopFunc();
96 | }
97 |
98 | private void release() {
99 | if (mFilter != null) {
100 | mFilter.onStop();
101 | mFilter = null;
102 | }
103 | try {
104 | if (mFrameGrabber != null) {//如果已经有实例,则先释放资源再初始化
105 | mFrameGrabber.stop();
106 | mFrameGrabber.release();
107 | mFrameGrabber = null;
108 | }
109 |
110 | if (mFrameRecorder != null) {//如果已经有实例,则先释放资源再初始化
111 | mFrameRecorder.stop();
112 | mFrameRecorder.release();
113 | mFrameRecorder = null;
114 | }
115 | } catch (FrameGrabber.Exception e) {
116 | e.printStackTrace();
117 | } catch (FrameRecorder.Exception e) {
118 | e.printStackTrace();
119 | }
120 | }
121 |
122 | public void setRenderListener(RenderListener renderListener) {
123 | this.renderListener = renderListener;
124 | }
125 |
126 | public void setFilter(Filter filter) {
127 | this.mFilter = filter;
128 | mFilter.onCreate(getWidth(), getHeight());
129 | }
130 |
131 | private class RenderTask extends AsyncTask {
132 | private boolean run = false;
133 |
134 | private Frame filter(Frame frame) {
135 | if (mFilter != null)
136 | return mFilter.filter(frame);
137 | else return frame;
138 | }
139 |
140 | private void stopFunc() {
141 | this.run = false;
142 | }
143 |
144 | @Override
145 | protected Integer doInBackground(Void... params) {
146 | run = true;
147 | int mFrameNumber = 0;
148 | publishProgress(mFrameNumber * 100 / mFrameGrabber.getLengthInFrames());
149 | Log.w("VideoRender", "Frame lenght: " + mFrameGrabber.getLengthInFrames());
150 | while (run && mFrameNumber <= mFrameGrabber.getLengthInFrames()) {
151 | try {
152 | mFrame = mFrameGrabber.grab();
153 | int type = FrameUtil.frameType(mFrame);
154 | if (type == 0) {
155 | mFrame = filter(mFrame);
156 | mFrameRecorder.setTimestamp(mFrameGrabber.getTimestamp());
157 | mFrameRecorder.record(mFrame);
158 | ++mFrameNumber;
159 | publishProgress(mFrameNumber * 100 / mFrameGrabber.getLengthInFrames());
160 | Log.w("VideoRender", "Frame num=" + mFrameNumber + ", time: " + mFrameGrabber.getTimestamp());
161 | } else if (type == 1) {
162 | mFrameRecorder.recordSamples(mFrame.samples);
163 | } else break;
164 | } catch (FrameGrabber.Exception e) {
165 | e.printStackTrace();
166 | ++mFrameNumber;
167 | Log.w("VideoRender", "Frame missed!");
168 | } catch (FrameRecorder.Exception e) {
169 | e.printStackTrace();
170 | }
171 | }
172 | Log.w("VideoRender", "release");
173 | release();
174 | publishProgress(100);
175 | return 0;
176 | }
177 |
178 | @Override
179 | protected void onProgressUpdate(Integer... values) {
180 | super.onProgressUpdate(values);
181 | if (renderListener != null)
182 | renderListener.onProgress(values[0]);
183 | }
184 |
185 | @Override
186 | protected void onPostExecute(Integer integer) {
187 | super.onPostExecute(integer);
188 | }
189 | }
190 |
191 | public int getWidth() {
192 | return mFrameGrabber.getImageWidth();
193 | }
194 |
195 | public int getHeight() {
196 | return mFrameGrabber.getImageHeight();
197 | }
198 |
199 | public interface RenderListener {
200 | void onProgress(int progress);
201 | }
202 | }
203 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/recorder/VideoRecorderWrapper.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera.recorder;
2 |
3 | import android.os.Build;
4 | import android.util.Log;
5 |
6 | import org.bytedeco.javacpp.avcodec;
7 | import org.bytedeco.javacv.FFmpegFrameGrabber;
8 | import org.bytedeco.javacv.FFmpegFrameRecorder;
9 | import org.bytedeco.javacv.FrameRecorder;
10 |
11 | import java.nio.Buffer;
12 | import java.nio.ShortBuffer;
13 |
14 | /**
15 | * Created by 李明艺 on 2016/3/1.
16 | *
17 | * @author lrlmy@foxmail.com
18 | */
19 | public class VideoRecorderWrapper implements AudioRecorder.DateFeedback {
20 | //视频文件宽高
21 | private int videoWidth = -1, videoHeight = -1;
22 | //是否正在录制
23 | private boolean recording = false;
24 | //是否已经开始录制
25 | private boolean starting = false;
26 | //帧率
27 | private int frameRate = 30;
28 | private AudioRecorder audioRecorder;
29 |
30 | private FFmpegFrameRecorder mFrameEncoder;
31 | private String path;
32 |
33 | // static {
34 | // System.loadLibrary("checkneon");
35 | // }
36 | //
37 | // public native static int checkNeonFromJNI();
38 |
39 | public VideoRecorderWrapper(int videoWidth, int videoHeight, int frameRate, String path) {
40 | this.videoWidth = videoWidth;
41 | this.videoHeight = videoHeight;
42 | this.frameRate = frameRate;
43 | this.path = path;
44 | init();
45 | }
46 |
47 | private void init() {
48 | audioRecorder = new AudioRecorder();
49 | audioRecorder.setDateFeedback(this);
50 | cacheFrame = new org.bytedeco.javacv.Frame(videoWidth, videoHeight, org.bytedeco.javacv.Frame.DEPTH_UBYTE, 4);
51 | initVideoRecorder(videoWidth, videoHeight);
52 | }
53 |
54 | public void initVideoRecorder(int width, int height) {
55 | this.videoWidth = width;
56 | this.videoHeight = height;
57 | // String path = Util.getSdcardPath() + "/lava/" + System.currentTimeMillis() + ".mp4";
58 | RecorderParameters recorderParameters = Util.getRecorderParameter(CONSTANTS.RESOLUTION_MEDIUM_VALUE);
59 | recorderParameters.setVideoFrameRate(frameRate);
60 | //TODO 视频录制分辨率
61 | try {
62 | mFrameEncoder = FFmpegFrameRecorder.createDefault(path, width, height);
63 | mFrameEncoder.setFormat(recorderParameters.getVideoOutputFormat());
64 | mFrameEncoder.setSampleRate(recorderParameters.getAudioSamplingRate());
65 | mFrameEncoder.setFrameRate(recorderParameters.getVideoFrameRate());
66 | mFrameEncoder.setVideoCodec(recorderParameters.getVideoCodec());
67 | mFrameEncoder.setVideoQuality(recorderParameters.getVideoQuality());
68 | mFrameEncoder.setAudioQuality(recorderParameters.getVideoQuality());
69 | mFrameEncoder.setAudioCodec(recorderParameters.getAudioCodec());
70 | mFrameEncoder.setVideoBitrate(recorderParameters.getVideoBitrate());
71 | mFrameEncoder.setAudioBitrate(recorderParameters.getAudioBitrate());
72 | mFrameEncoder.setAudioChannels(recorderParameters.getAudioChannel());
73 | mFrameEncoder.setImageWidth(width);
74 | mFrameEncoder.setImageHeight(height);
75 | mFrameEncoder.start();
76 | } catch (FrameRecorder.Exception e) {
77 | e.printStackTrace();
78 | }
79 | Log.v("000", "initVideoRecorder width=" + width + ", height=" + height);
80 | }
81 |
82 | private org.bytedeco.javacv.Frame cacheFrame;
83 |
84 | public void write(Frame frame) {
85 | mFrameEncoder.setTimestamp(frame.frameTimeMillis);
86 | try {
87 | cacheFrame.image[0] = frame.image.getByteBuffer();
88 | mFrameEncoder.record(cacheFrame);
89 | } catch (FrameRecorder.Exception e) {
90 | e.printStackTrace();
91 | }
92 | }
93 |
94 | public void startRecording() {
95 | recording = true;
96 | if (!starting) {
97 | starting = true;
98 | }
99 | audioRecorder.startRecording();
100 | }
101 |
102 | public void pauseRecording() {
103 | recording = false;
104 | audioRecorder.pauseRecording();
105 | }
106 |
107 | public void stopRecording() {
108 | recording = false;
109 | audioRecorder.stopRecording();
110 | release();
111 | }
112 |
113 | public void release() {
114 | if (mFrameEncoder != null) {
115 | try {
116 | mFrameEncoder.stop();
117 | mFrameEncoder.release();
118 | } catch (FrameRecorder.Exception e) {
119 | e.printStackTrace();
120 | }
121 | mFrameEncoder = null;
122 | }
123 | }
124 |
125 | public boolean isRecording() {
126 | return recording;
127 | }
128 |
129 | public boolean isStarting() {
130 | return starting;
131 | }
132 |
133 | @Override
134 | public void feedback(ShortBuffer buffer) {
135 | try {
136 | mFrameEncoder.recordSamples(new Buffer[]{buffer});
137 | } catch (FrameRecorder.Exception e) {
138 | e.printStackTrace();
139 | }
140 | }
141 |
142 | public void setTimestampUpdate(AudioRecorder.TimestampUpdate timestampUpdate) {
143 | if (audioRecorder != null)
144 | audioRecorder.setTimestampUpdate(timestampUpdate);
145 | }
146 |
147 | public static class RecorderParameters {
148 | private static boolean AAC_SUPPORTED = Build.VERSION.SDK_INT >= 10;
149 | // private int videoCodec = avcodec.AV_CODEC_ID_H264;
150 | private int videoCodec = avcodec.AV_CODEC_ID_MPEG4;
151 | private int videoFrameRate = 30;
152 | //private int videoBitrate = 500 *1000;
153 | private int videoQuality = 2;
154 | private int audioCodec = AAC_SUPPORTED ? avcodec.AV_CODEC_ID_AAC : avcodec.AV_CODEC_ID_AMR_NB;
155 | private int audioChannel = 1;
156 | private int audioBitrate = 96000;//192000;//AAC_SUPPORTED ? 96000 : 12200;
157 | private int videoBitrate = 3500000;
158 | private int audioSamplingRate = AAC_SUPPORTED ? 44100 : 8000;
159 | private String videoOutputFormat = AAC_SUPPORTED ? "mp4" : "3gp";
160 |
161 |
162 | public static boolean isAAC_SUPPORTED() {
163 | return AAC_SUPPORTED;
164 | }
165 |
166 | public static void setAAC_SUPPORTED(boolean aAC_SUPPORTED) {
167 | AAC_SUPPORTED = aAC_SUPPORTED;
168 | }
169 |
170 | public String getVideoOutputFormat() {
171 | return videoOutputFormat;
172 | }
173 |
174 | public void setVideoOutputFormat(String videoOutputFormat) {
175 | this.videoOutputFormat = videoOutputFormat;
176 | }
177 |
178 | public int getAudioSamplingRate() {
179 | return audioSamplingRate;
180 | }
181 |
182 | public void setAudioSamplingRate(int audioSamplingRate) {
183 | this.audioSamplingRate = audioSamplingRate;
184 | }
185 |
186 | public int getVideoCodec() {
187 | return videoCodec;
188 | }
189 |
190 | public void setVideoCodec(int videoCodec) {
191 | this.videoCodec = videoCodec;
192 | }
193 |
194 | public int getVideoFrameRate() {
195 | return videoFrameRate;
196 | }
197 |
198 | public void setVideoFrameRate(int videoFrameRate) {
199 | this.videoFrameRate = videoFrameRate;
200 | }
201 |
202 |
203 | public int getVideoQuality() {
204 | return videoQuality;
205 | }
206 |
207 | public void setVideoQuality(int videoQuality) {
208 | this.videoQuality = videoQuality;
209 | }
210 |
211 | public int getAudioCodec() {
212 | return audioCodec;
213 | }
214 |
215 | public void setAudioCodec(int audioCodec) {
216 | this.audioCodec = audioCodec;
217 | }
218 |
219 | public int getAudioChannel() {
220 | return audioChannel;
221 | }
222 |
223 | public void setAudioChannel(int audioChannel) {
224 | this.audioChannel = audioChannel;
225 | }
226 |
227 | public int getAudioBitrate() {
228 | return audioBitrate;
229 | }
230 |
231 | public void setAudioBitrate(int audioBitrate) {
232 | this.audioBitrate = audioBitrate;
233 | }
234 |
235 | public int getVideoBitrate() {
236 | return videoBitrate;
237 | }
238 |
239 | public void setVideoBitrate(int videoBitrate) {
240 | this.videoBitrate = videoBitrate;
241 | }
242 | }
243 | }
244 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/MyCameraGLSurfaceView.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.graphics.SurfaceTexture;
6 | import android.opengl.GLES20;
7 | import android.os.Handler;
8 | import android.os.Looper;
9 | import android.os.Message;
10 | import android.util.AttributeSet;
11 | import android.util.Log;
12 | import android.view.SurfaceHolder;
13 |
14 | import com.lmy.lycommon.camera.CameraInstance;
15 | import com.lmy.lycommon.gles.widget.CameraGLSurfaceView;
16 | import com.lmy.samples.camera.recorder.Frame;
17 | import com.lmy.samples.camera.recorder.Util;
18 | import com.lmy.samples.camera.recorder.VideoRecordManager;
19 |
20 | import org.bytedeco.javacpp.opencv_core;
21 |
22 | import javax.microedition.khronos.egl.EGLConfig;
23 | import javax.microedition.khronos.opengles.GL10;
24 |
25 | import static org.bytedeco.javacpp.opencv_core.cvCopy;
26 | import static org.bytedeco.javacpp.opencv_core.cvCreateImage;
27 | import static org.bytedeco.javacpp.opencv_core.cvFlip;
28 | import static org.bytedeco.javacpp.opencv_imgproc.cvResize;
29 |
30 | /**
31 | * Created by 李明艺 on 2016/2/29.
32 | *
33 | * @author lrlmy@foxmail.com
34 | *
35 | * 用OpenGL渲染的摄像头采集控件
36 | */
37 | public class MyCameraGLSurfaceView extends CameraGLSurfaceView {
38 | private final static String TAG = "MyCameraGLSurfaceView";
39 | private final static int RESIZE_FIT = 0x0000;
40 |
41 | public int videoWidth = 720, videoHeight = 480;
42 | public int mWidth = 1080, mHeight = 720;
43 | private opencv_core.IplImage srcImage;//帧缓存
44 | private long maxLenght = 30000000;
45 | private VideoRecordManager recoderManager;
46 | private boolean fitVideoSize = false;
47 | private String path;
48 | private String coverPath;
49 |
50 | public MyCameraGLSurfaceView(Context context) {
51 | super(context);
52 | }
53 |
54 | public MyCameraGLSurfaceView(Context context, AttributeSet attrs) {
55 | super(context, attrs);
56 | }
57 |
58 | @Override
59 | public CameraInstance cameraInstance() {
60 | return CameraInstance.getInstance(1080, 720);
61 | }
62 |
63 | private Handler mHandler = new Handler(Looper.getMainLooper()) {
64 | @Override
65 | public void handleMessage(Message msg) {
66 | super.handleMessage(msg);
67 | if (msg.what == RESIZE_FIT)
68 | reSize(mWidth, mHeight);
69 | }
70 | };
71 |
72 | public void initecoder() {
73 | initecoder(Util.root() + "/test.mp4");
74 | }
75 |
76 | public void initecoder(String path) {
77 | this.path = path;
78 | recoderManager = new VideoRecordManager(videoWidth, videoHeight, path);
79 | }
80 |
81 | private void fitVideo() {
82 | mWidth = viewWidth;
83 | mHeight = (int) (mWidth * videoHeight / (float) videoWidth);
84 | if (fitVideoSize)
85 | mHandler.sendEmptyMessage(RESIZE_FIT);
86 | }
87 |
88 | @Override
89 | public void onFrameAvailable(SurfaceTexture surfaceTexture) {
90 | super.onFrameAvailable(surfaceTexture);
91 | }
92 |
93 | @Override
94 | public void onSurfaceCreated(GL10 gl, EGLConfig config) {
95 | Log.v(TAG, "onSurfaceCreated...");
96 | super.onSurfaceCreated(gl, config);
97 | }
98 |
99 | @Override
100 | public void onSurfaceChanged(GL10 gl, int width, int height) {
101 | Log.i(TAG, "onSurfaceChanged...");
102 | super.onSurfaceChanged(gl, width, height);
103 | fitVideo();
104 | //分辨率改变,重新初始化缓存帧
105 | this.srcImage = cvCreateImage(new opencv_core.CvSize(mWidth, mHeight), opencv_core.IPL_DEPTH_8U, 4);
106 | }
107 |
108 | @Override
109 | public void surfaceDestroyed(SurfaceHolder holder) {
110 | super.surfaceDestroyed(holder);
111 | }
112 |
113 | private long htime = 0;
114 |
115 | @Override
116 | public void onDrawFrame(GL10 gl) {
117 | super.onDrawFrame(gl);
118 | // Log.i(TAG, "onDrawFrame..., time=" + (System.currentTimeMillis() - htime));
119 | // htime = System.currentTimeMillis();
120 | recordFrame();
121 | mLastStamp = getSurfaceTexture().getTimestamp();
122 | // mTextureDrawer.draw(mtx);
123 | }
124 |
125 | private long mLastStamp = 0;
126 | private long mStampCount = -1;
127 |
128 | private void recordFrame() {
129 | synchronized (recoderManager.mRecordStateLock) {
130 | if (recoderManager.mShouldRecord) {// && mVideoRecorder != null && mVideoRecorder.isRecording()
131 | mStampCount += (mStampCount > -1 ? (getSurfaceTexture().getTimestamp() - mLastStamp) : 1);//计算时间戳
132 | long time = System.currentTimeMillis();
133 | Frame frame = recoderManager.getImageCache();
134 | if (frame == null)
135 | frame = Frame.create(videoWidth, videoHeight, opencv_core.IPL_DEPTH_8U, 4);
136 | if (frame != null) {
137 | GLES20.glReadPixels(getDrawViewport().x, (getDrawViewport().height - mHeight) / 2 + getDrawViewport().y, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, srcImage.getByteBuffer());
138 | cvResize(srcImage, frame.image);
139 | cvFlip(frame.image, frame.image, 0);
140 | // takeShot(frame.image);//保存封面
141 | // frame.image = cvEncodeImage(".jpg",flippedImage.asCvMat()).asIplImage();
142 | frame.frameTimeMillis = mStampCount / 1000;
143 | recoderManager.pushCachedFrame(frame);
144 | Log.i("000", String.format("frame: time=%d, timestamp=%d, frameSize=%d", System.currentTimeMillis() - time, frame.frameTimeMillis, frame.image.imageSize()));
145 | if (recoderManager.getRecordingThread() != null) {
146 | synchronized (recoderManager.getRecordingThread()) {
147 | try {
148 | recoderManager.getRecordingThread().notifyAll();
149 | } catch (Exception e) {
150 | Log.e(TAG, "Notify failed: " + e.getMessage());
151 | }
152 | }
153 | }
154 | } else {
155 | Log.d(TAG, "Frame loss...");
156 | }
157 | if (frame.frameTimeMillis >= maxLenght)
158 | pauseRecording();
159 | }
160 | }
161 | }
162 |
163 | public void takeShot(final opencv_core.IplImage image) {
164 | if (coverPath == null) {
165 | this.coverPath = path + ".jpg";
166 | new Thread(new Runnable() {
167 | @Override
168 | public void run() {
169 | opencv_core.IplImage t;
170 | synchronized (image) {
171 | t = cvCreateImage(new opencv_core.CvSize(videoWidth, videoHeight), opencv_core.IPL_DEPTH_8U, 4);
172 | }
173 | cvCopy(image, t);
174 | Bitmap bmp = Bitmap.createBitmap(videoWidth, videoHeight, Bitmap.Config.ARGB_8888);
175 | bmp.copyPixelsFromBuffer(t.getByteBuffer());
176 | ImageUtil.saveBitmap(bmp, coverPath);
177 | }
178 | }).start();
179 | }
180 | }
181 |
182 | public synchronized void startRecording() {
183 | if (mStampCount < maxLenght * 1000)
184 | recoderManager.startRecording();
185 | }
186 |
187 | public synchronized void pauseRecording() {
188 | recoderManager.pauseRecording();
189 | }
190 |
191 | public synchronized void stopRecording() {
192 | recoderManager.endRecording();
193 | }
194 |
195 | public synchronized boolean isRecording() {
196 | return recoderManager.mShouldRecord;
197 | }
198 |
199 | public String getPath() {
200 | return path;
201 | }
202 |
203 | public String getCoverPath() {
204 | return coverPath;
205 | }
206 |
207 | public VideoRecordManager getRecoderManager() {
208 | return recoderManager;
209 | }
210 |
211 | public long getMaxLenght() {
212 | return maxLenght;
213 | }
214 |
215 | public void setMaxLenght(long maxLenght) {
216 | if (mStampCount < 0)
217 | this.maxLenght = maxLenght;
218 | }
219 |
220 | public boolean isFitVideoSize() {
221 | return fitVideoSize;
222 | }
223 |
224 | public void setFitVideoSize(boolean fitVideoSize) {
225 | this.fitVideoSize = fitVideoSize;
226 | }
227 |
228 | }
229 |
--------------------------------------------------------------------------------
/samples/src/main/res/layout/activity_render.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
13 |
14 |
20 |
21 |
27 |
28 |
29 |
30 |
34 |
35 |
39 |
40 |
48 |
49 |
55 |
56 |
57 |
61 |
62 |
70 |
71 |
77 |
78 |
79 |
83 |
84 |
92 |
93 |
99 |
100 |
101 |
102 |
107 |
108 |
112 |
113 |
121 |
122 |
128 |
129 |
130 |
134 |
135 |
143 |
144 |
150 |
151 |
152 |
156 |
157 |
165 |
166 |
172 |
173 |
174 |
175 |
180 |
181 |
185 |
186 |
194 |
195 |
201 |
202 |
203 |
207 |
208 |
216 |
217 |
223 |
224 |
225 |
229 |
230 |
238 |
239 |
245 |
246 |
247 |
248 |
252 |
253 |
258 |
259 |
264 |
265 |
266 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/recorder/VideoRecordManager.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera.recorder;
2 |
3 | import android.os.AsyncTask;
4 | import android.os.Handler;
5 | import android.os.Looper;
6 | import android.os.Message;
7 | import android.util.Log;
8 |
9 | import org.bytedeco.javacpp.opencv_core;
10 |
11 | import java.util.LinkedList;
12 | import java.util.Queue;
13 |
14 | import static org.bytedeco.javacpp.opencv_core.cvCreateImage;
15 |
16 | /**
17 | * Created by 李明艺 on 2016/3/2.
18 | *
19 | * @author lrlmy@foxmail.com
20 | * 视频录制缓存层封装
21 | */
22 | public class VideoRecordManager {
23 | private final static String TAG = "VideoRecordManager";
24 | private static final int MAX_CACHED_FRAMES = 15;
25 | private static final int MAX_ENCODE_FRAMES = 5;
26 | private final static int RECORDING_PROGRESS = 0x0001;
27 | private final static int RECORDING_CONTROL = 0x0002;
28 | private int taskPoolSize = 0;
29 | public int[] mRecordStateLock = new int[0];
30 | public boolean mShouldRecord = false;
31 | private LinkedList mImageList;//帧回收队列
32 | private Queue mFrameQueue;//帧队列
33 |
34 | public RecordingThread mRecordingThread;
35 | private VideoRecorderWrapper mVideoRecorder;
36 | public int mWidth = 1080, mHeight = 720;
37 | private String path;
38 | private EncodeListener encodeListener;
39 | private RecordingLintener recordingLintener;
40 | private boolean hasInit = false;
41 |
42 | public VideoRecordManager(int width, int height, String path) {
43 | this.mWidth = width;
44 | this.mHeight = height;
45 | this.path = path;
46 | init();
47 | }
48 |
49 | private Handler mHandler = new Handler(Looper.getMainLooper()) {
50 | @Override
51 | public void handleMessage(Message msg) {
52 | super.handleMessage(msg);
53 | if (msg.what == RECORDING_PROGRESS) {
54 | if (recordingLintener != null)
55 | recordingLintener.onProgress(msg.arg1);
56 | } else if (msg.what == RECORDING_CONTROL) {
57 | if (recordingLintener != null)
58 | recordingLintener.onControl(msg.arg1);
59 | }
60 | }
61 | };
62 |
63 | public void init() {
64 | this.hasInit = false;
65 | if (mVideoRecorder != null) {
66 | mVideoRecorder.stopRecording();
67 | mVideoRecorder = null;
68 | }
69 | initCache();
70 | mRecordingThread = new RecordingThread();
71 | mRecordingThread.start();
72 | //NOTE 初始化录像器
73 | mVideoRecorder = new VideoRecorderWrapper(mWidth, mHeight, 30, path);
74 |
75 | }
76 |
77 | private void initCache() {
78 | mImageList = new LinkedList<>();
79 | mFrameQueue = new LinkedList<>();
80 | for (int i = 0; i != MAX_CACHED_FRAMES; ++i) {
81 | mImageList.add(Frame.create(mWidth, mHeight, opencv_core.IPL_DEPTH_8U, 4));
82 | }
83 |
84 | this.hasInit = true;
85 | }
86 |
87 | public void pushCachedFrame(Frame frame) {
88 | synchronized (mFrameQueue) {
89 | mHandler.sendMessage(createMessage(RECORDING_PROGRESS, (int) frame.frameTimeMillis));//时间回调
90 | mFrameQueue.offer(frame);
91 | }
92 | }
93 |
94 | public Frame getCachedFrame() {
95 | synchronized (mFrameQueue) {
96 | return mFrameQueue.poll();
97 | }
98 | }
99 |
100 | public long getCachedSize() {
101 | synchronized (mFrameQueue) {
102 | if (mFrameQueue.size() <= 0) return 0;
103 | return mFrameQueue.size() * mFrameQueue.peek().image.imageSize();
104 | }
105 | }
106 |
107 | public long getRecycleCachedSize() {
108 | synchronized (mImageList) {
109 | if (mImageList.size() <= 0) return 0;
110 | return mImageList.size() * mImageList.peek().image.imageSize();
111 | }
112 | }
113 |
114 | //回收使用过的缓存帧
115 | public void recycleCachedFrame(Frame frame) {
116 | synchronized (mImageList) {
117 | if (mShouldRecord)
118 | mImageList.offer(frame);
119 | else {
120 | if (mImageList.size() < MAX_CACHED_FRAMES)
121 | mImageList.offer(frame);
122 | else {
123 | frame.image.release();
124 | }
125 | }
126 | }
127 | }
128 |
129 | //获取空闲的缓存帧
130 | public Frame getImageCache() {
131 | synchronized (mImageList) {
132 | return mImageList.poll();
133 | }
134 | }
135 |
136 | private int totalFrameTemp = 0;
137 |
138 | public class RecordingThread extends Thread {
139 | private boolean isStart = true;
140 |
141 | public void stopRun() {
142 | isStart = false;
143 | }
144 |
145 | public boolean isStart() {
146 | return isStart;
147 | }
148 |
149 | @Override
150 | public void run() {
151 | super.run();
152 | while (isStart) {
153 | //等待
154 | // while (taskPoolSize >= MAX_ENCODE_FRAMES) {
155 | //// stopRun();
156 | //// return;
157 | // synchronized (this) {
158 | // try {
159 | // this.wait(30);
160 | // } catch (InterruptedException e) {
161 | // Log.e(TAG, "Recording runnable wait() : " + e.getMessage());
162 | // }
163 | // }
164 | // }
165 | if (mShouldRecord) totalFrameTemp = mFrameQueue.size();
166 | Frame frame = getCachedFrame();
167 |
168 | if (frame == null) {
169 | synchronized (this) {
170 | try {
171 | this.wait(30);
172 | } catch (InterruptedException e) {
173 | Log.e(TAG, "Recording runnable wait() : " + e.getMessage());
174 | }
175 | }
176 | continue;
177 | }
178 | //NOTE 帧录制
179 | if (frame != null) {
180 | // fiveTimeCount = System.currentTimeMillis();
181 | // ++taskPoolSize;
182 | // new EncodeThread(frame).start();
183 | // Log.v(TAG, "task start, frame:" + frame.frameTimeMillis);
184 | // new EncodeTask().execute(frame);
185 | if (mVideoRecorder != null && mVideoRecorder.isStarting()) {
186 | long time = System.currentTimeMillis();
187 | mVideoRecorder.write(frame);
188 | if (totalFrameTemp > 0)
189 | onEncodeProgress((totalFrameTemp - mFrameQueue.size()) * 100 / totalFrameTemp, false);
190 | Log.v(TAG, String.format("frame:" + frame.frameTimeMillis + ", end. taskCount: %d, consume: %d, cacheSize: %d, recycleCachedSize: %d", taskPoolSize, (System.currentTimeMillis() - time), getCachedSize(), getRecycleCachedSize()));
191 | }
192 | // frame.image.release();
193 | recycleCachedFrame(frame);
194 | }
195 | }
196 | }
197 | }
198 |
199 | private long fiveTimeCount = 0;
200 |
201 | private class EncodeTask extends AsyncTask {
202 |
203 | @Override
204 | protected Boolean doInBackground(Frame... params) {
205 | //NOTE 帧录制
206 | if (params != null) {
207 | if (mVideoRecorder != null && mVideoRecorder.isStarting()) {
208 | // Log.v(TAG, "frame:" + params[0].frameTimeMillis + ", start");
209 | // long time = System.currentTimeMillis();
210 | mVideoRecorder.write(params[0]);
211 | // Log.v(TAG, String.format("frame:" + params[0].frameTimeMillis + ", end. taskCount: %d, consume: %d, cacheSize: %d, recycleCachedSize: %d", taskPoolSize, (System.currentTimeMillis() - time), getCachedSize(), getRecycleCachedSize()));
212 | }
213 | // frame.image.release();
214 | recycleCachedFrame(params[0]);
215 | }
216 | --taskPoolSize;
217 | return true;
218 | }
219 |
220 | @Override
221 | protected void onPostExecute(Boolean result) {
222 | if (taskPoolSize == 0)
223 | Log.v(TAG, String.format("5 consume: %d", System.currentTimeMillis() - fiveTimeCount));
224 | }
225 |
226 | @Override
227 | protected void onProgressUpdate(Integer... values) {
228 | }
229 | }
230 |
231 | private Message createMessage(int what, int arg1) {
232 | Message msg = new Message();
233 | msg.what = what;
234 | msg.arg1 = arg1;
235 | return msg;
236 | }
237 |
238 | public synchronized void startRecording() {
239 | if (!hasInit) return;
240 | if (!mShouldRecord) {
241 | synchronized (mRecordStateLock) {
242 | mShouldRecord = true;
243 | mVideoRecorder.startRecording();
244 | mHandler.sendMessage(createMessage(RECORDING_CONTROL, 1));
245 | }
246 | }
247 | }
248 |
249 | public synchronized void pauseRecording() {
250 | if (!hasInit) return;
251 | if (mShouldRecord) {
252 | synchronized (mRecordStateLock) {
253 | mShouldRecord = false;
254 | mVideoRecorder.pauseRecording();
255 | mHandler.sendMessage(createMessage(RECORDING_CONTROL, 0));
256 | }
257 | }
258 | }
259 |
260 | public synchronized void endRecording() {
261 | if (!hasInit) return;
262 | Log.i(TAG, "notify quit...");
263 | synchronized (mRecordStateLock) {
264 | mShouldRecord = false;
265 | mVideoRecorder.stopRecording();
266 | }
267 |
268 | synchronized (mRecordingThread) {
269 | mRecordingThread.stopRun();
270 | }
271 |
272 | Log.i(TAG, "joining thread...");
273 | try {
274 | mRecordingThread.join();
275 | } catch (InterruptedException e) {
276 | Log.e(TAG, "Join recording thread err: " + e.getMessage());
277 | }
278 |
279 | mRecordingThread = null;
280 |
281 | Log.i(TAG, "saving recoring...");
282 |
283 | mImageList.clear();
284 | mImageList = null;
285 | mFrameQueue.clear();
286 | mFrameQueue = null;
287 | mVideoRecorder = null;
288 | onEncodeProgress(100, true);
289 | this.hasInit = false;
290 |
291 | Log.i(TAG, "recording OK");
292 | }
293 |
294 | public RecordingThread getRecordingThread() {
295 | return mRecordingThread;
296 | }
297 |
298 | //是否已经开始录制
299 | public boolean isStarting() {
300 | if (!hasInit) return false;
301 | return mVideoRecorder.isStarting();
302 | }
303 |
304 | //获取当前编码进度,进度值不一定是从小到大的顺序
305 | public boolean encodeCompeleted() {
306 | return getCachedSize() <= 0 ? true : false;
307 | }
308 |
309 | //更新编码进度
310 | private void onEncodeProgress(int progress, boolean saved) {
311 | if (encodeListener == null) return;
312 | encodeListener.onProgress(progress, saved);
313 | }
314 |
315 | public void setEncodeListener(EncodeListener encodeListener) {
316 | this.encodeListener = encodeListener;
317 | }
318 |
319 | public void setRecordingLintener(RecordingLintener recordingLintener) {
320 | this.recordingLintener = recordingLintener;
321 | }
322 |
323 | public interface EncodeListener {
324 | void onProgress(int progress, boolean saved);
325 | }
326 |
327 | public interface RecordingLintener {
328 | void onProgress(long time);
329 |
330 | void onControl(int state);
331 | }
332 | }
333 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/VideoPlayer.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media;
2 |
3 | import android.graphics.Bitmap;
4 | import android.graphics.Canvas;
5 | import android.graphics.Rect;
6 | import android.util.Log;
7 | import android.view.SurfaceHolder;
8 |
9 | import com.lmy.lymedia.utils.FrameUtil;
10 |
11 | import org.bytedeco.javacv.AndroidFrameConverter;
12 | import org.bytedeco.javacv.Frame;
13 | import org.bytedeco.javacv.FrameGrabber;
14 |
15 | import java.util.LinkedList;
16 | import java.util.Queue;
17 |
18 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB565;
19 |
20 | /**
21 | * Created by lmy on 2016/3/24.
22 | */
23 | public class VideoPlayer extends Player {
24 | private final static String TAG = "VideoPlayer";
25 | private SurfaceHolder mHolder;
26 | private FFmpegFrameGrabber mFrameGrabber;//解码器
27 | private AudioDevice audioDevice;
28 | private AndroidFrameConverter mFrameConverter;
29 | private DecodeThread mDecodeThread;
30 | private PlayImageThread mPlayImageThread;
31 | private PlaySampleThread mPlaySampleThread;
32 | private String sourcePath;
33 | //状态相关
34 | private boolean hasInit = false;
35 |
36 | public static VideoPlayer create(SurfaceHolder mHolder) {
37 | return new VideoPlayer(mHolder);
38 | }
39 |
40 | public static VideoPlayer create(SurfaceHolder mHolder, String path) {
41 | return new VideoPlayer(mHolder, path);
42 | }
43 |
44 | public VideoPlayer(SurfaceHolder mHolder) {
45 | this.mHolder = mHolder;
46 | }
47 |
48 | public VideoPlayer(SurfaceHolder mHolder, String path) {
49 | this.mHolder = mHolder;
50 | setDataSource(path);
51 | }
52 |
53 | public void setDataSource(String path) {
54 | this.sourcePath = path;
55 | this.curFrameNumber = 0;
56 | this.hasInit = false;
57 | this.mFrameConverter = new AndroidFrameConverter();
58 | try {
59 | if (mFrameGrabber != null) {//如果已经有实例,则先释放资源再初始化
60 | mFrameGrabber.stop();
61 | mFrameGrabber.release();
62 | mFrameGrabber = null;
63 | }
64 | if (audioDevice != null) {
65 | audioDevice.release();
66 | audioDevice = null;
67 | }
68 | mFrameGrabber = FFmpegFrameGrabber.createDefault(path);
69 | mFrameGrabber.setPixelFormat(AV_PIX_FMT_RGB565);
70 | mFrameGrabber.start();
71 | //开始初始化一些信息
72 | this.rate = Math.round(1000d / mFrameGrabber.getFrameRate());
73 |
74 | } catch (FrameGrabber.Exception e) {
75 | e.printStackTrace();
76 | }
77 | audioDevice = new AudioDevice(mFrameGrabber.getSampleRate(), mFrameGrabber.getAudioChannels());
78 | mDecodeThread = new DecodeThread();
79 | mDecodeThread.start();
80 | mPlayImageThread = new PlayImageThread();
81 | mPlaySampleThread = new PlaySampleThread();
82 | this.hasInit = true;//标记初始化完成状态
83 | if (isAutoPlay()) play();//如果允许自动播放
84 | }
85 |
86 | @Override
87 | public void play() {
88 | if (!this.hasInit) return;
89 | super.play();
90 | }
91 |
92 | @Override
93 | public void pause() {
94 | if (!this.hasInit) return;
95 | super.pause();
96 | }
97 |
98 | @Override
99 | public void stop() {
100 | if (!this.hasInit) return;
101 | super.stop();
102 | mDecodeThread.stopRun();
103 | mPlayImageThread.stopRun();
104 | mPlaySampleThread.stopRun();
105 | try {
106 | synchronized (mFrameGrabber) {
107 | if (mFrameGrabber != null) {
108 | mFrameGrabber.stop();
109 | mFrameGrabber.release();
110 | mFrameGrabber = null;
111 | }
112 | }
113 | } catch (FrameGrabber.Exception e) {
114 | e.printStackTrace();
115 | }
116 | if (imageQueue != null) {
117 | imageQueue.clear();
118 | imageQueue = null;
119 | }
120 | if (sampleQueue != null) {
121 | sampleQueue.clear();
122 | sampleQueue = null;
123 | }
124 | }
125 |
126 | @Override
127 | public void seek(int number) {
128 | if (number > mFrameGrabber.getLengthInFrames()) return;
129 | super.seek(number);
130 | synchronized (mFrameGrabber) {
131 | try {
132 | mFrameGrabber.setFrameNumber(curFrameNumber);
133 | } catch (FrameGrabber.Exception e) {
134 | e.printStackTrace();
135 | }
136 | }
137 |
138 | }
139 |
140 | @Override
141 | public int getWidth() {
142 | if (!this.hasInit) return -1;
143 | return mFrameGrabber.getImageWidth();
144 | }
145 |
146 | @Override
147 | public int getHeight() {
148 | if (!this.hasInit) return -1;
149 | return mFrameGrabber.getImageHeight();
150 | }
151 |
152 | private boolean draw(Frame frame) {
153 | if (frame == null || frame.image == null) {
154 | return false;
155 | }
156 | Bitmap bmp = mFrameConverter.convert(frame);
157 | if (bmp == null) return false;
158 | synchronized (mHolder) {
159 | Canvas canvas = mHolder.lockCanvas();
160 | if (canvas == null) return true;
161 | canvas.drawBitmap(bmp, null, new Rect(0, 0, canvas.getWidth(), frame.imageHeight * canvas.getWidth() / frame.imageWidth), null);
162 | mHolder.unlockCanvasAndPost(canvas);
163 | }
164 | return true;
165 | }
166 |
167 | private class BaseThread extends Thread {
168 | protected boolean run = false;
169 |
170 | protected void sleepFunc(long time) {
171 | try {
172 | Thread.sleep(time);
173 | } catch (InterruptedException e) {
174 | e.printStackTrace();
175 | }
176 | }
177 |
178 | @Override
179 | public void run() {
180 | super.run();
181 | run = true;
182 | }
183 |
184 | public boolean isRun() {
185 | return run;
186 | }
187 |
188 | public void stopRun() {
189 | this.run = false;
190 | }
191 | }
192 |
193 | private final static int FRAME_CACHE_LIMIT = 15;
194 | private final static int SAMPLE_CACHE_LIMIT = 50;
195 | private Queue imageQueue;
196 | private Queue sampleQueue;
197 |
198 | private boolean offerImage(Frame frame) {
199 | if (imageQueue == null)
200 | imageQueue = new LinkedList<>();
201 | synchronized (imageQueue) {
202 | if (imageQueue.size() >= FRAME_CACHE_LIMIT) return false;
203 | imageQueue.offer(frame);
204 | }
205 | return true;
206 | }
207 |
208 | private Frame pollImage() {
209 | synchronized (imageQueue) {
210 | return imageQueue.poll();
211 | }
212 | }
213 |
214 | private int imageQueueSize() {
215 | synchronized (imageQueue) {
216 | return imageQueue.size();
217 | }
218 | }
219 |
220 | private boolean offerSample(Frame frame) {
221 | if (sampleQueue == null)
222 | sampleQueue = new LinkedList<>();
223 | synchronized (sampleQueue) {
224 | if (sampleQueue.size() >= SAMPLE_CACHE_LIMIT) return false;
225 | sampleQueue.offer(frame);
226 | }
227 | return true;
228 | }
229 |
230 | private Frame pollSample() {
231 | synchronized (sampleQueue) {
232 | return sampleQueue.poll();
233 | }
234 | }
235 |
236 | private int sampleQueueSize() {
237 | synchronized (sampleQueue) {
238 | return sampleQueue.size();
239 | }
240 | }
241 |
242 | private void tryPlay() {
243 | synchronized (mPlayImageThread) {
244 | if (!mPlayImageThread.isAlive())
245 | mPlayImageThread.start();
246 | }
247 | // synchronized (mPlaySampleThread) {
248 | // if (!mPlaySampleThread.isAlive())
249 | // mPlaySampleThread.start();
250 | // }
251 | }
252 |
253 | private class DecodeThread extends BaseThread {
254 | @Override
255 | public void run() {
256 | super.run();
257 | synchronized (mFrameGrabber) {
258 | imageQueue = new LinkedList<>();
259 | sampleQueue = new LinkedList<>();
260 | seek(0);
261 | try {
262 | Frame image = mFrameGrabber.grabImage();
263 | Frame sample = mFrameGrabber.grabSamples();
264 | while (run && curFrameNumber < mFrameGrabber.getLengthInFrames() - 5) {
265 | if (!play) {
266 | sleepFunc(rate);
267 | continue;
268 | }
269 | long time = System.currentTimeMillis();
270 | if (FrameUtil.frameType(image) == 0) {
271 | if (offerImage(FrameUtil.copy(image))) {
272 | // Log.v(TAG, "offer image!");
273 | image = mFrameGrabber.grabImage();
274 | }
275 | }
276 | if (FrameUtil.frameType(sample) == 1) {
277 | // if (offerSample(sample)) {
278 | // Log.v(TAG, "offer sample!");
279 | // sample = mFrameGrabber.grabSamples();
280 | // }
281 | if (play) {
282 | // Log.v(TAG, "offer sample!");
283 | audioDevice.writeSamples(sample.samples);
284 | sample = mFrameGrabber.grabSamples();
285 | }
286 | }
287 | Log.v(TAG, "time=" + (System.currentTimeMillis() - time));
288 | if (imageQueueSize() >= FRAME_CACHE_LIMIT)
289 | tryPlay();
290 |
291 | // Log.v(TAG, "time=" + (System.currentTimeMillis() - time));
292 | // Frame frame = mFrameGrabber.grab();
293 | // int type = frameType(frame);
294 | // if (type == 0) {
295 | // frame = copy(frame);
296 | // while (isRun() && !offerImage(frame)) {
297 | //// if (sampleQueueSize() >= SAMPLE_CACHE_LIMIT)
298 | // tryPlay();
299 | // Log.v(TAG, "try offer image!");
300 | // sleepFunc(2);
301 | // }
302 | // } else if (type == 1) {
303 | // frame = copy(frame);
304 | // while (isRun() && !offerSample(frame)) {
305 | //// if (imageQueueSize() >= FRAME_CACHE_LIMIT)
306 | // tryPlay();
307 | //// Log.v(TAG, "try offer sample!");
308 | // sleepFunc(2);
309 | // }
310 | // }
311 | }
312 | } catch (FrameGrabber.Exception e) {
313 | e.printStackTrace();
314 | }
315 | }
316 | }
317 | }
318 |
319 | long lastTime = 0;
320 |
321 | private class PlayImageThread extends BaseThread {
322 | @Override
323 | public void run() {
324 | super.run();
325 | while (run && curFrameNumber < mFrameGrabber.getLengthInFrames() - 5) {
326 | if (!play) {
327 | lastTime = 0;
328 | sleepFunc(rate);
329 | continue;
330 | }
331 | // long wait = rate - System.currentTimeMillis() + lastTime;
332 | // Log.v(TAG, "wait=" + wait);
333 | // if (lastTime == 0 || (wait > -1000000 && wait < 10)) {
334 | // sleepFunc(wait < 0 ? 0 : wait);
335 | // lastTime = System.currentTimeMillis();
336 | // if (imageQueueSize() > 0)
337 | // if (draw(pollImage())) {
338 | //// Log.v(TAG, "draw image");
339 | //// long wait = rate - System.currentTimeMillis() + lastTime;
340 | //// sleepFunc(wait < 0 ? 0 : wait);
341 | // }
342 | // }
343 | // if (sampleQueueSize() > 0)
344 | // audioDevice.writeSamples(pollSample().samples);
345 |
346 | long time = System.currentTimeMillis();
347 | if (imageQueueSize() > 0)
348 | if (draw(pollImage())) {
349 | ++curFrameNumber;
350 | // Log.v(TAG, "draw image");
351 | long wait = rate - System.currentTimeMillis() + time;
352 | sleepFunc(wait < 0 ? 0 : wait);
353 | }
354 | }
355 | }
356 | }
357 |
358 | private class PlaySampleThread extends BaseThread {
359 |
360 | @Override
361 | public void run() {
362 | super.run();
363 | while (run && curFrameNumber < mFrameGrabber.getLengthInFrames() - 5) {
364 | if (!play) {
365 | sleepFunc(rate);
366 | continue;
367 | }
368 | if (sampleQueueSize() > 0)
369 | audioDevice.writeSamples(pollSample().samples);
370 | // else
371 | // sleepFunc(5);
372 | }
373 | }
374 | }
375 | }
376 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/media/FFmpegPlayer.java:
--------------------------------------------------------------------------------
1 | package com.lmy.lymedia.media;
2 |
3 | import android.graphics.Bitmap;
4 | import android.graphics.Canvas;
5 | import android.graphics.Color;
6 | import android.graphics.Rect;
7 | import android.util.Log;
8 | import android.view.SurfaceHolder;
9 |
10 | import com.lmy.lymedia.media.render.Filter;
11 |
12 | import org.bytedeco.javacv.AndroidFrameConverter;
13 | import org.bytedeco.javacv.FFmpegFrameFilter;
14 | import org.bytedeco.javacv.FFmpegFrameGrabber;
15 | import org.bytedeco.javacv.Frame;
16 | import org.bytedeco.javacv.FrameGrabber;
17 | import org.bytedeco.javacv.OpenCVFrameConverter;
18 |
19 | import java.nio.Buffer;
20 | import java.nio.FloatBuffer;
21 | import java.util.LinkedList;
22 | import java.util.Queue;
23 |
24 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB565;
25 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGBA;
26 |
27 | /**
28 | * Created by Administrator on 2016/3/23.
29 | */
30 | public class FFmpegPlayer extends Player {
31 | private final static String TAG = "FFmpegPlayer";
32 | private SurfaceHolder mHolder;
33 | private FFmpegFrameGrabber mFrameGrabber;//解码器
34 | private FFmpegFrameGrabber mAudioGrabber;//解码器
35 | private AndroidFrameConverter mFrameConverter;
36 | private Frame cacheFrame;//缓存帧
37 | private PlayerThread mPlayerThread;
38 | private AudioThread audioThread;
39 |
40 | //状态相关
41 | private boolean hasInit = false;
42 |
43 | //设置相关
44 | private String sourcePath;
45 | private Filter mFilter;
46 |
47 | public static FFmpegPlayer create(SurfaceHolder mHolder) {
48 | return new FFmpegPlayer(mHolder);
49 | }
50 |
51 | public static FFmpegPlayer create(SurfaceHolder mHolder, String path) {
52 | return new FFmpegPlayer(mHolder, path);
53 | }
54 |
55 | public FFmpegPlayer(SurfaceHolder mHolder) {
56 | this.mHolder = mHolder;
57 | }
58 |
59 | public FFmpegPlayer(SurfaceHolder mHolder, String path) {
60 | this.mHolder = mHolder;
61 | setDataSource(path);
62 | }
63 |
64 | public void setDataSource(String path) {
65 | this.sourcePath = path;
66 | this.curFrameNumber = 0;
67 | this.hasInit = false;
68 | clearView();
69 | mFrameConverter = new AndroidFrameConverter();
70 | try {
71 | //如果已经有实例,则先释放资源再初始化
72 | if (mFrameGrabber != null) {
73 | mFrameGrabber.stop();
74 | mFrameGrabber.release();
75 | mFrameGrabber = null;
76 | }
77 | if (mAudioGrabber != null) {
78 | mAudioGrabber.stop();
79 | mAudioGrabber.release();
80 | mAudioGrabber = null;
81 | }
82 | mFrameGrabber = FFmpegFrameGrabber.createDefault(path);
83 | mFrameGrabber.setPixelFormat(AV_PIX_FMT_RGBA);
84 | mAudioGrabber = FFmpegFrameGrabber.createDefault(path);
85 | mFrameGrabber.start();
86 | this.rate = Math.round(1000d / mFrameGrabber.getFrameRate());
87 | Log.v("000", String.format("width=%d, height=%d, delay=%d, frame lenght=%d", mFrameGrabber.getImageWidth(), mFrameGrabber.getImageHeight(), rate, mFrameGrabber.getLengthInFrames()));
88 | Log.v("init", "AudioChannels=" + mFrameGrabber.getAudioChannels() + ", AudioBitrate=" + mFrameGrabber.getSampleRate());
89 | mAudioGrabber.start();
90 | } catch (FrameGrabber.Exception e) {
91 | e.printStackTrace();
92 | }
93 | mPlayerThread = new PlayerThread();
94 | audioThread = new AudioThread();
95 | audioThread.initTrack(mFrameGrabber.getSampleRate(), mFrameGrabber.getAudioChannels());
96 | mPlayerThread.start();
97 | this.hasInit = true;
98 | if (isAutoPlay()) play();
99 | }
100 |
101 | private int frameType(Frame frame) {
102 | if (frame == null) return -1;
103 | if (frame.image != null) return 0;
104 | else if (frame.samples != null) return 1;
105 | else return -1;
106 | }
107 |
108 | private class AudioThread extends BaseThread {
109 | private AudioDevice audioDevice;
110 | private Queue buffers;
111 |
112 | public AudioThread() {
113 | buffers = new LinkedList<>();
114 | }
115 |
116 | public void initTrack(int sampleRate, int channels) {
117 | audioDevice = new AudioDevice(sampleRate, channels);
118 | }
119 |
120 | public AudioDevice getAudioDevice() {
121 | return audioDevice;
122 | }
123 |
124 | @Override
125 | public void run() {
126 | super.run();
127 | sleepFunc(rate);
128 | while (run) {
129 | if (!play) {//|| size() <= 1
130 | sleepFunc(rate);
131 | continue;
132 | }
133 | synchronized (mAudioGrabber) {
134 | if (mAudioGrabber.getFrameNumber() >= curFrameNumber + 1) {
135 | sleepFunc(rate);
136 | continue;
137 | }
138 | try {
139 | if (!play) continue;
140 | Frame f = mAudioGrabber.grabSamples();
141 | if (frameType(f) == 1) {
142 | audioDevice.writeSamples(f.samples);
143 | } else {
144 | sleepFunc(5);
145 | }
146 | } catch (FrameGrabber.Exception e) {
147 | e.printStackTrace();
148 | }
149 | }
150 | }
151 | }
152 |
153 | public int size() {
154 | synchronized (this.buffers) {
155 | return this.buffers.size();
156 | }
157 | }
158 |
159 | public Buffer poll() {
160 | synchronized (this.buffers) {
161 | return this.buffers.poll();
162 | }
163 | }
164 |
165 | public void offer(Buffer buffer) {
166 | synchronized (this.buffers) {
167 | this.buffers.offer(buffer);
168 | }
169 | }
170 |
171 | public void write(Buffer[] buffers) {
172 | synchronized (this.buffers) {
173 | for (int i = 0; i < buffers.length; i++) {
174 | FloatBuffer fb = (FloatBuffer) buffers[0];
175 | fb.rewind();
176 | float[] data = new float[fb.capacity()];
177 | fb.get(data);
178 | offer(FloatBuffer.wrap(data));
179 | }
180 | }
181 | }
182 |
183 | @Override
184 | public void stopRun() {
185 | super.stopRun();
186 | audioDevice.release();
187 | }
188 | }
189 |
190 | private void clearView() {
191 | synchronized (mHolder) {
192 | Canvas canvas = mHolder.lockCanvas();
193 | if (canvas == null) return;
194 | canvas.drawColor(Color.BLACK);
195 | mHolder.unlockCanvasAndPost(canvas);
196 | }
197 | }
198 |
199 |
200 | private Frame filter(Frame frame) {
201 | if (mFilter != null && mFilter.isStarting())
202 | return mFilter.filter(frame);
203 | return frame;
204 | }
205 |
206 | private boolean draw(Frame frame) {
207 | if (frame == null || frame.image == null) {
208 | return false;
209 | }
210 | // try {
211 | // mFilter.push(frame);
212 | // while ((frame = mFilter.pull()) != null) {
213 | //
214 | // }
215 | // } catch (FrameFilter.Exception e) {
216 | // e.printStackTrace();
217 | // }
218 | frame = filter(frame);
219 | Bitmap bmp = mFrameConverter.convert(frame);
220 | if (bmp == null) return false;
221 | synchronized (mHolder) {
222 | Canvas canvas = mHolder.lockCanvas();
223 | if (canvas == null) return true;
224 | canvas.drawBitmap(bmp, null, new Rect(0, 0, canvas.getWidth(), frame.imageHeight * canvas.getWidth() / frame.imageWidth), null);
225 | mHolder.unlockCanvasAndPost(canvas);
226 | }
227 | return true;
228 | }
229 |
230 | private class PlayerThread extends BaseThread {
231 |
232 | @Override
233 | public void run() {
234 | super.run();
235 | try {
236 | seek(5);
237 | audioThread.start();
238 | synchronized (mFrameGrabber) {
239 | while (run && curFrameNumber < mFrameGrabber.getLengthInFrames() - 5) {
240 | if (!play) {
241 | sleepFunc(rate);
242 | continue;
243 | }
244 | long time = System.currentTimeMillis();
245 | if (!play) continue;
246 | cacheFrame = mFrameGrabber.grabImage();
247 | long time2 = System.currentTimeMillis();
248 | if (draw(cacheFrame)) {//frameType(cacheFrame) == 0
249 | ++curFrameNumber;
250 | if (isLooping() && curFrameNumber >= mFrameGrabber.getLengthInFrames() - 5) {//重复播放
251 | Log.w(TAG, "rePlay!!!");
252 | seek(0);
253 | continue;
254 | }
255 | long wait = rate - System.currentTimeMillis() + time;
256 | // Log.v(TAG, "grabber time=" + (time2 - time) + ", draw time=" + (rate - wait - time2 + time) + ", wait=" + wait);
257 | // if (wait < 0)
258 | // Log.w(TAG, "wait=" + wait + ", Rendering time is low!");
259 | sleepFunc(wait < 0 ? 0 : wait);
260 | } else {//强制跳帧播放
261 | ++curFrameNumber;
262 | seek(curFrameNumber);
263 | }
264 | }
265 | // else if (frameType(cacheFrame) == 1) {
266 | // audioThread.getAudioDevice().writeSamples(cacheFrame.samples);
267 | // audioThread.write(cacheFrame.samples);
268 | // }
269 | }
270 | } catch (FrameGrabber.Exception e) {
271 | e.printStackTrace();
272 | }
273 | }
274 | }
275 |
276 | private class BaseThread extends Thread {
277 | protected boolean run = false;
278 |
279 | protected void sleepFunc(long time) {
280 | try {
281 | Thread.sleep(time);
282 | } catch (InterruptedException e) {
283 | e.printStackTrace();
284 | }
285 | }
286 |
287 | @Override
288 | public void run() {
289 | super.run();
290 | run = true;
291 | }
292 |
293 | public boolean isRun() {
294 | return run;
295 | }
296 |
297 | public void stopRun() {
298 | this.run = false;
299 | }
300 | }
301 |
302 | @Override
303 | public void seek(int number) {
304 | synchronized (mFrameGrabber) {
305 | if (number > mFrameGrabber.getLengthInFrames()) return;
306 | super.seek(number);
307 | this.curFrameNumber = number;
308 | try {
309 | mFrameGrabber.setFrameNumber(curFrameNumber);
310 | synchronized (mAudioGrabber) {
311 | mAudioGrabber.setFrameNumber(curFrameNumber);
312 | }
313 | } catch (FrameGrabber.Exception e) {
314 | e.printStackTrace();
315 | }
316 | }
317 |
318 | }
319 |
320 | @Override
321 | public void play() {
322 | if (!hasInit) return;
323 | super.play();
324 | }
325 |
326 | @Override
327 | public void pause() {
328 | if (!hasInit) return;
329 | super.pause();
330 | }
331 |
332 | @Override
333 | public void stop() {
334 | if (!hasInit) return;
335 | super.stop();
336 | mPlayerThread.stopRun();
337 | audioThread.stopRun();
338 | if (mFilter != null) {
339 | mFilter.onStop();
340 | mFilter = null;
341 | }
342 | try {
343 | synchronized (mFrameGrabber) {
344 | if (mFrameGrabber != null) {
345 | mFrameGrabber.stop();
346 | mFrameGrabber.release();
347 | mFrameGrabber = null;
348 | }
349 | }
350 | synchronized (mAudioGrabber) {
351 | if (mAudioGrabber != null) {
352 | mAudioGrabber.stop();
353 | mAudioGrabber.release();
354 | mAudioGrabber = null;
355 | }
356 | }
357 | } catch (FrameGrabber.Exception e) {
358 | e.printStackTrace();
359 | }
360 | }
361 |
362 | public void setFilter(Filter filter) {
363 | this.mFilter = filter;
364 | mFilter.onCreate(getWidth(), getHeight());
365 | }
366 |
367 | @Override
368 | public int getWidth() {
369 | return mFrameGrabber.getImageWidth();
370 | }
371 |
372 | @Override
373 | public int getHeight() {
374 | return mFrameGrabber.getImageHeight();
375 | }
376 |
377 | @Override
378 | public boolean isLooping() {
379 | return looping;
380 | }
381 |
382 | @Override
383 | public void setLooping(boolean looping) {
384 | this.looping = looping;
385 | }
386 |
387 | @Override
388 | public boolean isAutoPlay() {
389 | return autoPlay;
390 | }
391 |
392 | @Override
393 | public void setAutoPlay(boolean autoPlay) {
394 | this.autoPlay = autoPlay;
395 | }
396 |
397 | public String getSourcePath() {
398 | return sourcePath;
399 | }
400 | }
401 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/camera/CameraInstance.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.camera;
2 |
3 | import android.graphics.PixelFormat;
4 | import android.graphics.Rect;
5 | import android.graphics.SurfaceTexture;
6 | import android.hardware.Camera;
7 | import android.os.Build;
8 | import android.util.Log;
9 |
10 | import java.io.IOException;
11 | import java.util.ArrayList;
12 | import java.util.Collections;
13 | import java.util.Comparator;
14 | import java.util.List;
15 |
16 | /**
17 | * Created by wangyang on 15/7/27.
18 | */
19 |
20 |
21 | // Camera 仅适用单例
22 | public class CameraInstance {
23 | public static final String TAG = "CameraInstance";
24 |
25 | private static final String ASSERT_MSG = "检测到CameraDevice 为 null! 请检查";
26 |
27 | private Camera mCameraDevice;
28 | private Camera.Parameters mParams;
29 |
30 | public static final int DEFAULT_PREVIEW_RATE = 30;
31 | public static final int MAX_PREVIEW_RATE = 60;
32 | public static final int MIN_PREVIEW_RATE = 30;
33 |
34 |
35 | private boolean mIsPreviewing = false;
36 |
37 | private int mDefaultCameraID = -1;
38 |
39 | private static CameraInstance mThisInstance;
40 | private int mPreviewWidth;
41 | private int mPreviewHeight;
42 |
43 | private int mPictureWidth = 1000;
44 | private int mPictureHeight = 1000;
45 |
46 | private int mPreferPreviewWidth = 640;
47 | private int mPreferPreviewHeight = 640;
48 |
49 | private int mFacing = 0;
50 |
51 | private CameraInstance() {
52 | }
53 |
54 | public static synchronized CameraInstance getInstance() {
55 | if (mThisInstance == null) {
56 | mThisInstance = new CameraInstance();
57 | }
58 | return mThisInstance;
59 | }
60 |
61 | public boolean isPreviewing() {
62 | return mIsPreviewing;
63 | }
64 |
65 | public int previewWidth() {
66 | return mPreviewWidth;
67 | }
68 |
69 | public int previewHeight() {
70 | return mPreviewHeight;
71 | }
72 |
73 | public int pictureWidth() {
74 | return mPictureWidth;
75 | }
76 |
77 | public int pictureHeight() {
78 | return mPictureHeight;
79 | }
80 |
81 | public void setPreferPreviewSize(int w, int h) {
82 | mPreferPreviewHeight = w;
83 | mPreferPreviewWidth = h;
84 | }
85 |
86 | public interface CameraOpenCallback {
87 | void cameraReady();
88 | }
89 |
90 | public boolean tryOpenCamera(CameraOpenCallback callback) {
91 | return tryOpenCamera(callback, Camera.CameraInfo.CAMERA_FACING_BACK);
92 | }
93 |
94 | public int getFacing() {
95 | return mFacing;
96 | }
97 |
98 | public synchronized boolean tryOpenCamera(CameraOpenCallback callback, int facing) {
99 | Log.i(TAG, "try open camera...");
100 |
101 | try {
102 | if (Build.VERSION.SDK_INT > Build.VERSION_CODES.FROYO) {
103 | int numberOfCameras = Camera.getNumberOfCameras();
104 |
105 | Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
106 | for (int i = 0; i < numberOfCameras; i++) {
107 | Camera.getCameraInfo(i, cameraInfo);
108 | if (cameraInfo.facing == facing) {
109 | mDefaultCameraID = i;
110 | mFacing = facing;
111 | }
112 | }
113 | }
114 | stopPreview();
115 | if (mCameraDevice != null)
116 | mCameraDevice.release();
117 |
118 | if (mDefaultCameraID >= 0)
119 | mCameraDevice = Camera.open(mDefaultCameraID);
120 | else
121 | mCameraDevice = Camera.open();
122 | } catch (Exception e) {
123 | Log.e(TAG, "Open Camera Failed!");
124 | e.printStackTrace();
125 | mCameraDevice = null;
126 | return false;
127 | }
128 |
129 | if (mCameraDevice != null) {
130 | Log.i(TAG, "Camera opened!");
131 |
132 | try {
133 | initCamera(DEFAULT_PREVIEW_RATE);
134 | } catch (Exception e) {
135 | mCameraDevice.release();
136 | mCameraDevice = null;
137 | return false;
138 | }
139 |
140 | if (callback != null) {
141 | callback.cameraReady();
142 | }
143 |
144 | return true;
145 | }
146 |
147 | return false;
148 | }
149 |
150 | public synchronized void stopCamera() {
151 | if (mCameraDevice != null) {
152 | mIsPreviewing = false;
153 | mCameraDevice.stopPreview();
154 | mCameraDevice.setPreviewCallback(null);
155 | mCameraDevice.release();
156 | mCameraDevice = null;
157 | }
158 | }
159 |
160 | public boolean isCameraOpened() {
161 | return mCameraDevice != null;
162 | }
163 |
164 | public synchronized void startPreview(SurfaceTexture texture) {
165 | Log.i(TAG, "Camera startPreview...");
166 | if (mIsPreviewing) {
167 | Log.e(TAG, "Err: camera is previewing...");
168 | // stopPreview();
169 | return;
170 | }
171 |
172 | if (mCameraDevice != null) {
173 | try {
174 | mCameraDevice.setPreviewTexture(texture);
175 | } catch (IOException e) {
176 | e.printStackTrace();
177 | }
178 |
179 | mCameraDevice.startPreview();
180 | mIsPreviewing = true;
181 | }
182 | }
183 |
184 | public synchronized void stopPreview() {
185 | if (mIsPreviewing && mCameraDevice != null) {
186 | Log.i(TAG, "Camera stopPreview...");
187 | mIsPreviewing = false;
188 | mCameraDevice.stopPreview();
189 | }
190 | }
191 |
192 | public synchronized Camera.Parameters getParams() {
193 | if (mCameraDevice != null)
194 | return mCameraDevice.getParameters();
195 | assert mCameraDevice != null : ASSERT_MSG;
196 | return null;
197 | }
198 |
199 | public synchronized void setParams(Camera.Parameters param) {
200 | if (mCameraDevice != null) {
201 | mParams = param;
202 | mCameraDevice.setParameters(mParams);
203 | }
204 | assert mCameraDevice != null : ASSERT_MSG;
205 | }
206 |
207 | public Camera getCameraDevice() {
208 | return mCameraDevice;
209 | }
210 |
211 | //保证从大到小排列
212 | private Comparator comparatorBigger = new Comparator() {
213 | @Override
214 | public int compare(Camera.Size lhs, Camera.Size rhs) {
215 | int w = rhs.width - lhs.width;
216 | if (w == 0)
217 | return rhs.height - lhs.height;
218 | return w;
219 | }
220 | };
221 |
222 | //保证从小到大排列
223 | private Comparator comparatorSmaller = new Comparator() {
224 | @Override
225 | public int compare(Camera.Size lhs, Camera.Size rhs) {
226 | int w = lhs.width - rhs.width;
227 | if (w == 0)
228 | return lhs.height - rhs.height;
229 | return w;
230 | }
231 | };
232 |
233 | public void initCamera(int previewRate) {
234 | if (mCameraDevice == null) {
235 | Log.e(TAG, "initCamera: Camera is not opened!");
236 | return;
237 | }
238 |
239 | mParams = mCameraDevice.getParameters();
240 | List supportedPictureFormats = mParams.getSupportedPictureFormats();
241 |
242 | for (int fmt : supportedPictureFormats) {
243 | Log.i(TAG, String.format("Picture Format: %x", fmt));
244 | }
245 |
246 | mParams.setPictureFormat(PixelFormat.JPEG);
247 |
248 | List picSizes = mParams.getSupportedPictureSizes();
249 | Camera.Size picSz = null;
250 |
251 | Collections.sort(picSizes, comparatorBigger);
252 |
253 | for (Camera.Size sz : picSizes) {
254 | Log.i(TAG, String.format("Supported picture size: %d x %d", sz.width, sz.height));
255 | if (picSz == null || (sz.width >= mPictureWidth && sz.height >= mPictureHeight)) {
256 | picSz = sz;
257 | }
258 | }
259 |
260 | List prevSizes = mParams.getSupportedPreviewSizes();
261 | Camera.Size prevSz = null;
262 |
263 | Collections.sort(prevSizes, comparatorBigger);
264 |
265 | for (Camera.Size sz : prevSizes) {
266 | Log.i(TAG, String.format("Supported preview size: %d x %d", sz.width, sz.height));
267 | if (prevSz == null || (sz.width >= mPreferPreviewWidth && sz.height >= mPreferPreviewHeight)) {
268 | prevSz = sz;
269 | }
270 | }
271 | //设置相机预览帧率
272 | int frameRatesRang[] = new int[2];
273 | mParams.getPreviewFpsRange(frameRatesRang);
274 | int maxRate = MAX_PREVIEW_RATE * 1000;
275 | int minRate = MIN_PREVIEW_RATE * 1000;
276 | Log.i(TAG, "Supported max frame rate: " + frameRatesRang[1] + ",min frame rate: " + frameRatesRang[0]);
277 | if (minRate < frameRatesRang[0])
278 | minRate = frameRatesRang[0];
279 | if (maxRate > frameRatesRang[1])
280 | maxRate = frameRatesRang[1];
281 | mParams.setPreviewFpsRange(minRate, maxRate);
282 |
283 | // List frameRates = mParams.getSupportedPreviewFrameRates();
284 | // int fpsMax = 0;
285 | //
286 | // for (Integer n : frameRates) {
287 | // Log.i(TAG, "Supported frame rate: " + n);
288 | // if (fpsMax < n) {
289 | // fpsMax = n;
290 | // }
291 | // }
292 | // previewRate = fpsMax;
293 | // mParams.setPreviewFrameRate(previewRate); //设置相机预览帧率
294 | // mParams.setPreviewFpsRange(20, 60);
295 |
296 | mParams.setPreviewSize(prevSz.width, prevSz.height);
297 | mParams.setPictureSize(picSz.width, picSz.height);
298 |
299 | List focusModes = mParams.getSupportedFocusModes();
300 | if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
301 | mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
302 | }
303 |
304 | try {
305 | mCameraDevice.setParameters(mParams);
306 | } catch (Exception e) {
307 | e.printStackTrace();
308 | }
309 |
310 |
311 | mParams = mCameraDevice.getParameters();
312 |
313 | Camera.Size szPic = mParams.getPictureSize();
314 | Camera.Size szPrev = mParams.getPreviewSize();
315 |
316 | mPreviewWidth = szPrev.width;
317 | mPreviewHeight = szPrev.height;
318 |
319 | mPictureWidth = szPic.width;
320 | mPictureHeight = szPic.height;
321 |
322 | Log.i(TAG, String.format("Camera Picture Size: %d x %d", szPic.width, szPic.height));
323 | Log.i(TAG, String.format("Camera Preview Size: %d x %d", szPrev.width, szPrev.height));
324 | Log.i(TAG, String.format("Camera Preview Min Rate: %d Max Rate: %d", minRate,maxRate));
325 | }
326 |
327 | public synchronized void setFocusMode(String focusMode) {
328 |
329 | if (mCameraDevice == null)
330 | return;
331 |
332 | mParams = mCameraDevice.getParameters();
333 | List focusModes = mParams.getSupportedFocusModes();
334 | if (focusModes.contains(focusMode)) {
335 | mParams.setFocusMode(focusMode);
336 | }
337 | }
338 |
339 | public synchronized void setPictureSize(int width, int height, boolean isBigger) {
340 |
341 | if (mCameraDevice == null) {
342 | mPictureWidth = width;
343 | mPictureHeight = height;
344 | return;
345 | }
346 |
347 | mParams = mCameraDevice.getParameters();
348 |
349 |
350 | List picSizes = mParams.getSupportedPictureSizes();
351 | Camera.Size picSz = null;
352 |
353 | if (isBigger) {
354 | Collections.sort(picSizes, comparatorBigger);
355 | for (Camera.Size sz : picSizes) {
356 | if (picSz == null || (sz.width >= width && sz.height >= height)) {
357 | picSz = sz;
358 | }
359 | }
360 | } else {
361 | Collections.sort(picSizes, comparatorSmaller);
362 | for (Camera.Size sz : picSizes) {
363 | if (picSz == null || (sz.width <= width && sz.height <= height)) {
364 | picSz = sz;
365 | }
366 | }
367 | }
368 |
369 | mPictureWidth = picSz.width;
370 | mPictureHeight = picSz.height;
371 |
372 | try {
373 | mParams.setPictureSize(mPictureWidth, mPictureHeight);
374 | mCameraDevice.setParameters(mParams);
375 | } catch (Exception e) {
376 | e.printStackTrace();
377 | }
378 | }
379 |
380 | public void focusAtPoint(float x, float y, final Camera.AutoFocusCallback callback) {
381 | focusAtPoint(x, y, 0.2f, callback);
382 | }
383 |
384 | public synchronized void focusAtPoint(float x, float y, float radius, final Camera.AutoFocusCallback callback) {
385 | if (mCameraDevice == null) {
386 | Log.e(TAG, "Error: focus after release.");
387 | return;
388 | }
389 |
390 | mParams = mCameraDevice.getParameters();
391 |
392 | if (mParams.getMaxNumMeteringAreas() > 0) {
393 |
394 | int focusRadius = (int) (radius * 1000.0f);
395 | int left = (int) (x * 2000.0f - 1000.0f) - focusRadius;
396 | int top = (int) (y * 2000.0f - 1000.0f) - focusRadius;
397 |
398 | Rect focusArea = new Rect();
399 | focusArea.left = Math.max(left, -1000);
400 | focusArea.top = Math.max(top, -1000);
401 | focusArea.right = Math.min(left + focusRadius, 1000);
402 | focusArea.bottom = Math.min(top + focusRadius, 1000);
403 | List meteringAreas = new ArrayList();
404 | meteringAreas.add(new Camera.Area(focusArea, 800));
405 |
406 | try {
407 | mCameraDevice.cancelAutoFocus();
408 | mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
409 | mParams.setFocusAreas(meteringAreas);
410 | mCameraDevice.setParameters(mParams);
411 | mCameraDevice.autoFocus(callback);
412 | } catch (Exception e) {
413 | Log.e(TAG, "Error: focusAtPoint failed: " + e.toString());
414 | }
415 | } else {
416 | Log.i(TAG, "The device does not support metering areas...");
417 | try {
418 | mCameraDevice.autoFocus(callback);
419 | } catch (Exception e) {
420 | Log.e(TAG, "Error: focusAtPoint failed: " + e.toString());
421 | }
422 | }
423 |
424 | }
425 | }
426 |
--------------------------------------------------------------------------------
/samples/src/main/java/com/lmy/samples/ui/RenderActivity.java:
--------------------------------------------------------------------------------
1 | package com.lmy.samples.ui;
2 |
3 | import android.graphics.Bitmap;
4 | import android.graphics.BitmapFactory;
5 | import android.graphics.Color;
6 | import android.support.v7.app.AppCompatActivity;
7 | import android.os.Bundle;
8 | import android.util.Log;
9 | import android.view.View;
10 | import android.widget.Button;
11 | import android.widget.ImageView;
12 | import android.widget.SeekBar;
13 | import android.widget.TextView;
14 |
15 | import com.lmy.lymedia.utils.FrameUtil;
16 | import com.lmy.lymedia.utils.Util;
17 | import com.lmy.samples.R;
18 |
19 | import org.bytedeco.javacpp.indexer.FloatIndexer;
20 | import org.bytedeco.javacpp.opencv_core;
21 | import org.bytedeco.javacv.AndroidFrameConverter;
22 | import org.bytedeco.javacv.FFmpegFrameFilter;
23 | import org.bytedeco.javacv.FFmpegFrameGrabber;
24 | import org.bytedeco.javacv.Frame;
25 | import org.bytedeco.javacv.FrameFilter;
26 | import org.bytedeco.javacv.FrameGrabber;
27 | import org.bytedeco.javacv.OpenCVFrameConverter;
28 |
29 | import java.nio.ByteBuffer;
30 | import java.text.DecimalFormat;
31 |
32 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB24;
33 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB32;
34 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB4_BYTE;
35 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB555;
36 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB565;
37 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGB8;
38 | import static org.bytedeco.javacpp.avutil.AV_PIX_FMT_RGBA;
39 | import static org.bytedeco.javacpp.opencv_core.cvCreateImage;
40 | import static org.bytedeco.javacpp.opencv_core.cvGetSize;
41 | import static org.bytedeco.javacpp.opencv_imgproc.CV_HSV2RGB;
42 | import static org.bytedeco.javacpp.opencv_imgproc.CV_RGB2HSV;
43 | import static org.bytedeco.javacpp.opencv_imgproc.CV_RGB2RGBA;
44 | import static org.bytedeco.javacpp.opencv_imgproc.CV_RGBA2RGB;
45 | import static org.bytedeco.javacpp.opencv_imgproc.cvCvtColor;
46 | import static org.bytedeco.javacpp.opencv_imgproc.filter2D;
47 |
48 | public class RenderActivity extends AppCompatActivity {
49 | private ImageView imageView;
50 | private ImageView renderView;
51 |
52 | private SeekBar mSeekBarRR;
53 | private TextView mTextViewRR;
54 | private SeekBar mSeekBarRG;
55 | private TextView mTextViewRG;
56 | private SeekBar mSeekBarRB;
57 | private TextView mTextViewRB;
58 |
59 | private SeekBar mSeekBarGR;
60 | private TextView mTextViewGR;
61 | private SeekBar mSeekBarGG;
62 | private TextView mTextViewGG;
63 | private SeekBar mSeekBarGB;
64 | private TextView mTextViewGB;
65 |
66 | private SeekBar mSeekBarBR;
67 | private TextView mTextViewBR;
68 | private SeekBar mSeekBarBG;
69 | private TextView mTextViewBG;
70 | private SeekBar mSeekBarBB;
71 | private TextView mTextViewBB;
72 |
73 | private Button defaultBtn;
74 | private Button oldBtn;
75 | // private FFmpegFrameGrabber mFrameGrabber;
76 | private AndroidFrameConverter mFrameConverter;
77 | private FFmpegFrameFilter mFilter;
78 | private OpenCVFrameConverter.ToIplImage converter;
79 | private Frame cacheFrame;
80 | private Bitmap srcBitmap;
81 | private DecimalFormat df;
82 |
83 | @Override
84 | protected void onCreate(Bundle savedInstanceState) {
85 | super.onCreate(savedInstanceState);
86 | setContentView(R.layout.activity_render);
87 | initView();
88 | }
89 |
90 | private void initView() {
91 | df = new DecimalFormat("0.000");
92 | imageView = (ImageView) findViewById(R.id.image);
93 | renderView = (ImageView) findViewById(R.id.image1);
94 | defaultBtn = (Button) findViewById(R.id.default_btn);
95 | oldBtn = (Button) findViewById(R.id.old_btn);
96 |
97 | mSeekBarRR = (SeekBar) findViewById(R.id.seek_bar_rr);
98 | mTextViewRR = (TextView) findViewById(R.id.text_rr);
99 | mSeekBarRG = (SeekBar) findViewById(R.id.seek_bar_rg);
100 | mTextViewRG = (TextView) findViewById(R.id.text_rg);
101 | mSeekBarRB = (SeekBar) findViewById(R.id.seek_bar_rb);
102 | mTextViewRB = (TextView) findViewById(R.id.text_rb);
103 |
104 | mSeekBarGR = (SeekBar) findViewById(R.id.seek_bar_gr);
105 | mTextViewGR = (TextView) findViewById(R.id.text_gr);
106 | mSeekBarGG = (SeekBar) findViewById(R.id.seek_bar_gg);
107 | mTextViewGG = (TextView) findViewById(R.id.text_gg);
108 | mSeekBarGB = (SeekBar) findViewById(R.id.seek_bar_gb);
109 | mTextViewGB = (TextView) findViewById(R.id.text_gb);
110 |
111 | mSeekBarBR = (SeekBar) findViewById(R.id.seek_bar_br);
112 | mTextViewBR = (TextView) findViewById(R.id.text_br);
113 | mSeekBarBG = (SeekBar) findViewById(R.id.seek_bar_bg);
114 | mTextViewBG = (TextView) findViewById(R.id.text_bg);
115 | mSeekBarBB = (SeekBar) findViewById(R.id.seek_bar_bb);
116 | mTextViewBB = (TextView) findViewById(R.id.text_bb);
117 |
118 | mFrameConverter = new AndroidFrameConverter();
119 | converter = new OpenCVFrameConverter.ToIplImage();
120 | srcBitmap = BitmapFactory.decodeFile(Util.getSdcardPath() + "/test.jpg");
121 | cacheFrame = mFrameConverter.convert(srcBitmap);
122 | imageView.setImageBitmap(srcBitmap);
123 | renderView.setImageBitmap(srcBitmap);
124 |
125 | mSeekBarRR.setOnSeekBarChangeListener(onSeekBarChangeListener);
126 | mSeekBarRG.setOnSeekBarChangeListener(onSeekBarChangeListener);
127 | mSeekBarRB.setOnSeekBarChangeListener(onSeekBarChangeListener);
128 | mSeekBarGR.setOnSeekBarChangeListener(onSeekBarChangeListener);
129 | mSeekBarGG.setOnSeekBarChangeListener(onSeekBarChangeListener);
130 | mSeekBarGB.setOnSeekBarChangeListener(onSeekBarChangeListener);
131 | mSeekBarBR.setOnSeekBarChangeListener(onSeekBarChangeListener);
132 | mSeekBarBG.setOnSeekBarChangeListener(onSeekBarChangeListener);
133 | mSeekBarBB.setOnSeekBarChangeListener(onSeekBarChangeListener);
134 |
135 | defaultBtn.setOnClickListener(onClickListener);
136 | oldBtn.setOnClickListener(onClickListener);
137 | imageView.setImageBitmap(srcBitmap);
138 | renderView.setImageBitmap(srcBitmap);
139 |
140 | //
141 | // renderView.setImageBitmap(filter(srcBitmap, value));
142 | // try {
143 | // mFrameGrabber = FFmpegFrameGrabber.createDefault(Util.getSdcardPath() + "/test.mp4");
144 | // mFrameGrabber.setPixelFormat(AV_PIX_FMT_RGBA);
145 | // mFrameGrabber.start();
146 | // mFrameGrabber.setFrameNumber(597);
147 | // Bitmap bmp = mFrameConverter.convert(FrameUtil.copy(mFrameGrabber.grabImage()));
148 | // imageView.setImageBitmap(bmp.copy(bmp.getConfig(), bmp.isMutable()));
149 | // cacheFrame = FrameUtil.copy(mFrameGrabber.grabImage());
150 | // renderView.setImageBitmap(mFrameConverter.convert(filter(FrameUtil.copy(cacheFrame), 1, 1, 1)));
151 | // } catch (FrameGrabber.Exception e) {
152 | // e.printStackTrace();
153 | // }
154 | // try {
155 | // mFilter = new FFmpegFrameFilter("crop=w=200:h=100:x=100:y=100", mFrameGrabber.getImageWidth(), mFrameGrabber.getImageHeight());
156 | // mFilter.start();
157 | // } catch (FrameFilter.Exception e) {
158 | // e.printStackTrace();
159 | // }
160 | }
161 |
162 | private Bitmap filter(Bitmap bmp) {
163 | float[] value = new float[9];
164 | value[0] = mSeekBarRR.getProgress() / (float)mSeekBarRR.getMax();
165 | value[1] = mSeekBarRG.getProgress() / (float)mSeekBarRR.getMax();
166 | value[2] = mSeekBarRB.getProgress() / (float)mSeekBarRR.getMax();
167 |
168 | value[3] = mSeekBarGR.getProgress() / (float)mSeekBarRR.getMax();
169 | value[4] = mSeekBarGG.getProgress() / (float)mSeekBarRR.getMax();
170 | value[5] = mSeekBarGB.getProgress() / (float)mSeekBarRR.getMax();
171 |
172 | value[6] = mSeekBarBR.getProgress() / (float)mSeekBarRR.getMax();
173 | value[7] = mSeekBarBG.getProgress() / (float)mSeekBarRR.getMax();
174 | value[8] = mSeekBarBB.getProgress() / (float)mSeekBarRR.getMax();
175 |
176 | mTextViewRR.setText(String.valueOf(df.format(value[0])));
177 | mTextViewRG.setText(String.valueOf(df.format(value[1])));
178 | mTextViewRB.setText(String.valueOf(df.format(value[2])));
179 |
180 | mTextViewGR.setText(String.valueOf(df.format(value[3])));
181 | mTextViewGG.setText(String.valueOf(df.format(value[4])));
182 | mTextViewGB.setText(String.valueOf(df.format(value[5])));
183 |
184 | mTextViewBR.setText(String.valueOf(df.format(value[6])));
185 | mTextViewBG.setText(String.valueOf(df.format(value[7])));
186 | mTextViewBB.setText(String.valueOf(df.format(value[8])));
187 |
188 | return filter(srcBitmap, value);
189 | }
190 |
191 | private Bitmap filter(Bitmap bmp, float[] value) {// 速度测试
192 | long start = System.currentTimeMillis();
193 | int width = bmp.getWidth();
194 | int height = bmp.getHeight();
195 | Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
196 | int pixColor = 0;
197 | int pixR = 0;
198 | int pixG = 0;
199 | int pixB = 0;
200 | int newR = 0;
201 | int newG = 0;
202 | int newB = 0;
203 | int[] pixels = new int[width * height];
204 | bmp.getPixels(pixels, 0, width, 0, 0, width, height);
205 | for (int i = 0; i < height; i++) {
206 | for (int k = 0; k < width; k++) {
207 | pixColor = pixels[width * i + k];
208 | pixR = Color.red(pixColor);
209 | pixG = Color.green(pixColor);
210 | pixB = Color.blue(pixColor);
211 | newR = (int) (value[0] * pixR + value[1] * pixG + value[2] * pixB);
212 | newG = (int) (value[3] * pixR + value[4] * pixG + value[5] * pixB);
213 | newB = (int) (value[6] * pixR + value[7] * pixG + value[8] * pixB);
214 | int newColor = Color.argb(255, newR > 255 ? 255 : newR, newG > 255 ? 255 : newG, newB > 255 ? 255 : newB);
215 | pixels[width * i + k] = newColor;
216 | }
217 | }
218 |
219 | bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
220 | long end = System.currentTimeMillis();
221 | Log.d("may", "used time=" + (end - start));
222 | return bitmap;
223 | }
224 |
225 | private Frame filter(Frame frame) {
226 | float[] value = new float[9];
227 | value[0] = mSeekBarRR.getProgress() / (float)mSeekBarRR.getMax();
228 | value[1] = mSeekBarRG.getProgress() / (float)mSeekBarRR.getMax();
229 | value[2] = mSeekBarRB.getProgress() / (float)mSeekBarRR.getMax();
230 |
231 | value[3] = mSeekBarGR.getProgress() / (float)mSeekBarRR.getMax();
232 | value[4] = mSeekBarGG.getProgress() / (float)mSeekBarRR.getMax();
233 | value[5] = mSeekBarGB.getProgress() / (float)mSeekBarRR.getMax();
234 |
235 | value[6] = mSeekBarBR.getProgress() / (float)mSeekBarRR.getMax();
236 | value[7] = mSeekBarBG.getProgress() / (float)mSeekBarRR.getMax();
237 | value[8] = mSeekBarBB.getProgress() / (float)mSeekBarRR.getMax();
238 |
239 | mTextViewRR.setText(String.valueOf(df.format(value[0])));
240 | mTextViewRG.setText(String.valueOf(df.format(value[1])));
241 | mTextViewRB.setText(String.valueOf(df.format(value[2])));
242 |
243 | mTextViewGR.setText(String.valueOf(df.format(value[3])));
244 | mTextViewGG.setText(String.valueOf(df.format(value[4])));
245 | mTextViewGB.setText(String.valueOf(df.format(value[5])));
246 |
247 | mTextViewBR.setText(String.valueOf(df.format(value[6])));
248 | mTextViewBG.setText(String.valueOf(df.format(value[7])));
249 | mTextViewBB.setText(String.valueOf(df.format(value[8])));
250 |
251 | return filter(frame, value);
252 | }
253 |
254 | private Frame filter(Frame frame, float[] value) {
255 | opencv_core.IplImage src = converter.convertToIplImage(frame);
256 | opencv_core.IplImage rgb = cvCreateImage(cvGetSize(src), 8, 3);//给rgb色系的图像申请空间
257 | opencv_core.IplImage hsv = cvCreateImage(cvGetSize(src), 8, 3);//给hsv色系的图像申请空间
258 |
259 | cvCvtColor(src, rgb, CV_RGBA2RGB);
260 | // cvCvtColor(rgb, hsv, CV_RGB2HSV);//将RGB色系转为HSV色系
261 | ByteBuffer buffer = rgb.getByteBuffer();
262 | for (int i = 0; i < buffer.capacity(); i++) {
263 | byte r = buffer.get(i);
264 | byte g = buffer.get(i);
265 | byte b = buffer.get(i);
266 | if (i % 3 == 0)//R
267 | buffer.put(i, (byte) (buffer.get(i) * value[0] + buffer.get(i + 1) * value[1] + buffer.get(i + 2) * value[2]));
268 |
269 | if (i % 3 == 1)//G
270 | buffer.put(i, (byte) (buffer.get(i - 1) * value[3] + buffer.get(i) * value[4] + buffer.get(i + 1) * value[5]));
271 |
272 | if (i % 3 == 2)//B
273 | buffer.put(i, (byte) (buffer.get(i - 2) * value[6] + buffer.get(i - 1) * value[7] + buffer.get(i) * value[8]));
274 | }
275 | // cvCvtColor(hsv, rgb, CV_HSV2RGB);//将RGB色系转为HSV色系
276 | cvCvtColor(rgb, src, CV_RGB2RGBA);
277 | return converter.convert(src);
278 | }
279 |
280 | private View.OnClickListener onClickListener = new View.OnClickListener() {
281 | @Override
282 | public void onClick(View v) {
283 | float[] value = new float[9];
284 | switch (v.getId()) {
285 | case R.id.default_btn:
286 | mSeekBarRR.setProgress((int) value[0] * mSeekBarRR.getMax());
287 | mSeekBarRG.setProgress((int) value[1] * mSeekBarRR.getMax());
288 | mSeekBarRB.setProgress((int) value[2] * mSeekBarRR.getMax());
289 |
290 | mSeekBarGR.setProgress((int) value[3] * mSeekBarRR.getMax());
291 | mSeekBarGG.setProgress((int) value[4] * mSeekBarRR.getMax());
292 | mSeekBarGB.setProgress((int) value[5] * mSeekBarRR.getMax());
293 |
294 | mSeekBarBR.setProgress((int) value[6] * mSeekBarRR.getMax());
295 | mSeekBarBG.setProgress((int) value[7] * mSeekBarRR.getMax());
296 | mSeekBarBB.setProgress((int) value[8] * mSeekBarRR.getMax());
297 |
298 | mTextViewRR.setText(String.valueOf(df.format(value[0])));
299 | mTextViewRG.setText(String.valueOf(df.format(value[1])));
300 | mTextViewRB.setText(String.valueOf(df.format(value[2])));
301 |
302 | mTextViewGR.setText(String.valueOf(df.format(value[3])));
303 | mTextViewGG.setText(String.valueOf(df.format(value[4])));
304 | mTextViewGB.setText(String.valueOf(df.format(value[5])));
305 |
306 | mTextViewBR.setText(String.valueOf(df.format(value[6])));
307 | mTextViewBG.setText(String.valueOf(df.format(value[7])));
308 | mTextViewBB.setText(String.valueOf(df.format(value[8])));
309 |
310 | imageView.setImageBitmap(srcBitmap);
311 | renderView.setImageBitmap(srcBitmap);
312 | break;
313 | case R.id.old_btn:
314 | value = new float[]{0.393f, 0.769f, 0.189f, 0.349f, 0.686f, 0.168f, 0.272f, 0.534f, 0.131f};
315 | mSeekBarRR.setProgress((int) value[0] * mSeekBarRR.getMax());
316 | mSeekBarRG.setProgress((int) value[1] * mSeekBarRR.getMax());
317 | mSeekBarRB.setProgress((int) value[2] * mSeekBarRR.getMax());
318 |
319 | mSeekBarGR.setProgress((int) value[3] * mSeekBarRR.getMax());
320 | mSeekBarGG.setProgress((int) value[4] * mSeekBarRR.getMax());
321 | mSeekBarGB.setProgress((int) value[5] * mSeekBarRR.getMax());
322 |
323 | mSeekBarBR.setProgress((int) value[6] * mSeekBarRR.getMax());
324 | mSeekBarBG.setProgress((int) value[7] * mSeekBarRR.getMax());
325 | mSeekBarBB.setProgress((int) value[8] * mSeekBarRR.getMax());
326 |
327 | mTextViewRR.setText(String.valueOf(df.format(value[0])));
328 | mTextViewRG.setText(String.valueOf(df.format(value[1])));
329 | mTextViewRB.setText(String.valueOf(df.format(value[2])));
330 |
331 | mTextViewGR.setText(String.valueOf(df.format(value[3])));
332 | mTextViewGG.setText(String.valueOf(df.format(value[4])));
333 | mTextViewGB.setText(String.valueOf(df.format(value[5])));
334 |
335 | mTextViewBR.setText(String.valueOf(df.format(value[6])));
336 | mTextViewBG.setText(String.valueOf(df.format(value[7])));
337 | mTextViewBB.setText(String.valueOf(df.format(value[8])));
338 | renderView.setImageBitmap(filter(srcBitmap, value));
339 | break;
340 | }
341 | }
342 | };
343 |
344 | private SeekBar.OnSeekBarChangeListener onSeekBarChangeListener = new SeekBar.OnSeekBarChangeListener() {
345 | @Override
346 | public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
347 | // renderView.setImageBitmap(mFrameConverter.convert(filter(mFrameConverter.convert(srcBitmap))));
348 | renderView.setImageBitmap(filter(srcBitmap));
349 | }
350 |
351 | @Override
352 | public void onStartTrackingTouch(SeekBar seekBar) {
353 |
354 | }
355 |
356 | @Override
357 | public void onStopTrackingTouch(SeekBar seekBar) {
358 |
359 | }
360 | };
361 | }
362 |
--------------------------------------------------------------------------------
/library/src/main/java/com/lmy/lymedia/widget/VideoPlayerGLSurfaceView.java:
--------------------------------------------------------------------------------
1 | //package com.lmy.lymedia.widget;
2 | //
3 | //import android.content.Context;
4 | //import android.graphics.Bitmap;
5 | //import android.graphics.Canvas;
6 | //import android.graphics.Matrix;
7 | //import android.graphics.PixelFormat;
8 | //import android.graphics.SurfaceTexture;
9 | //import android.media.MediaPlayer;
10 | //import android.net.Uri;
11 | //import android.opengl.GLES20;
12 | //import android.opengl.GLSurfaceView;
13 | //import android.opengl.GLUtils;
14 | //import android.util.AttributeSet;
15 | //import android.util.Log;
16 | //import android.view.Surface;
17 | //
18 | //import com.lmy.player.texture.TextureRenderer;
19 | //import com.lmy.player.texture.TextureRendererDrawOrigin;
20 | //import com.lmy.player.texture.TextureRendererMask;
21 | //import com.lmy.player.texture.Viewport;
22 | //
23 | //import java.nio.IntBuffer;
24 | //
25 | //import javax.microedition.khronos.egl.EGLConfig;
26 | //import javax.microedition.khronos.opengles.GL10;
27 | //
28 | ///**
29 | // * Created by wangyang on 15/8/20.
30 | // */
31 | //public class VideoPlayerGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
32 | // public static final String LOG_TAG = "VideoPlayer";
33 | // private SurfaceTexture mSurfaceTexture;
34 | // private int mVideoTextureID;
35 | // private TextureRenderer mDrawer;
36 | // private Viewport mRenderViewport = new Viewport();
37 | //
38 | // private MediaPlayer mPlayer;
39 | // private Context mContext;
40 | // private Uri mVideoUri;
41 | //
42 | // //setTextureRenderer 必须在OpenGL 线程调用!
43 | // public void setTextureRenderer(TextureRenderer drawer) {
44 | // if (mDrawer == null) {
45 | // Log.v(LOG_TAG, "Invalid Drawer!");
46 | // return;
47 | // }
48 | //
49 | // if (mDrawer != drawer) {
50 | // mDrawer.release();
51 | // mDrawer = drawer;
52 | // calcViewport();
53 | // }
54 | // }
55 | //
56 | // private float[] mTransformMatrix = new float[16];
57 | // private boolean mIsUsingMask = false;
58 | //
59 | // public boolean isUsingMask() {
60 | // return mIsUsingMask;
61 | // }
62 | //
63 | // private float mMaskAspectRatio = 1.0f;
64 | // private float mDrawerFlipScaleX = 1.0f;
65 | // private float mDrawerFlipScaleY = 1.0f;
66 | //
67 | // private int mViewWidth = 1000;
68 | // private int mViewHeight = 1000;
69 | //
70 | // public int getViewWidth() {
71 | // return mViewWidth;
72 | // }
73 | //
74 | // public int getViewheight() {
75 | // return mViewHeight;
76 | // }
77 | //
78 | // private int mVideoWidth = 1000;
79 | // private int mVideoHeight = 1000;
80 | //
81 | // private boolean mFitFullView = false;
82 | //
83 | // public void setFitFullView(boolean fit) {
84 | // mFitFullView = fit;
85 | // if (mDrawer != null)
86 | // calcViewport();
87 | // }
88 | //
89 | // public interface PlayerInitializeCallback {
90 | //
91 | // //对player 进行初始化设置, 设置未默认启动的listener, 比如 bufferupdateListener.
92 | // void initPlayer(MediaPlayer player);
93 | // }
94 | //
95 | // public void setPlayerInitializeCallback(PlayerInitializeCallback callback) {
96 | // mPlayerInitCallback = callback;
97 | // }
98 | //
99 | // PlayerInitializeCallback mPlayerInitCallback;
100 | //
101 | // public interface PlayPreparedCallback {
102 | // void playPrepared(MediaPlayer player);
103 | // }
104 | //
105 | // PlayPreparedCallback mPreparedCallback;
106 | //
107 | // public interface PlayCompletionCallback {
108 | // void playComplete(MediaPlayer player);
109 | //
110 | //
111 | // /*
112 | //
113 | // what 取值: MEDIA_ERROR_UNKNOWN,
114 | // MEDIA_ERROR_SERVER_DIED
115 | //
116 | // extra 取值 MEDIA_ERROR_IO
117 | // MEDIA_ERROR_MALFORMED
118 | // MEDIA_ERROR_UNSUPPORTED
119 | // MEDIA_ERROR_TIMED_OUT
120 | //
121 | // returning false would cause the 'playComplete' to be called
122 | // */
123 | // boolean playFailed(MediaPlayer mp, int what, int extra);
124 | // }
125 | //
126 | // PlayCompletionCallback mPlayCompletionCallback;
127 | //
128 | // public synchronized void setVideoUri(final Uri uri, final PlayPreparedCallback preparedCallback, final PlayCompletionCallback completionCallback) {
129 | //
130 | // mVideoUri = uri;
131 | // mPreparedCallback = preparedCallback;
132 | // mPlayCompletionCallback = completionCallback;
133 | //
134 | // if (mDrawer != null) {
135 | // queueEvent(new Runnable() {
136 | // @Override
137 | // public void run() {
138 | // Log.i(LOG_TAG, "setVideoUri...");
139 | // if (mSurfaceTexture == null || mVideoTextureID == 0) {
140 | // mVideoTextureID = TextureRenderer.createTextureID();
141 | // mSurfaceTexture = new SurfaceTexture(mVideoTextureID);
142 | // mSurfaceTexture.setOnFrameAvailableListener(VideoPlayerGLSurfaceView.this);
143 | // }
144 | // _useUri();
145 | // }
146 | // });
147 | // }
148 | // }
149 | //
150 | // //根据传入bmp回调不同
151 | // //若设置之后使用mask, 则调用 setMaskOK
152 | // //否则调用 unsetMaskOK
153 | // public interface SetMaskBitmapCallback {
154 | // void setMaskOK(TextureRendererMask renderer);
155 | //
156 | // void unsetMaskOK(TextureRenderer renderer);
157 | // }
158 | //
159 | // public synchronized void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle) {
160 | // setMaskBitmap(bmp, shouldRecycle, null);
161 | // }
162 | //
163 | // public synchronized void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle, final SetMaskBitmapCallback callback) {
164 | // queueEvent(new Runnable() {
165 | // @Override
166 | // public void run() {
167 | //
168 | // if (bmp == null) {
169 | // Log.i(LOG_TAG, "Cancel Mask Bitmap!");
170 | //
171 | // setMaskTexture(0, 1.0f);
172 | //
173 | // if (callback != null) {
174 | // callback.unsetMaskOK(mDrawer);
175 | // }
176 | //
177 | // return;
178 | // }
179 | //
180 | // Log.i(LOG_TAG, "Use Mask Bitmap!");
181 | //
182 | // int texID[] = {0};
183 | // GLES20.glGenTextures(1, texID, 0);
184 | // GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texID[0]);
185 | // GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
186 | // GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
187 | // GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
188 | // GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
189 | // GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
190 | //
191 | // setMaskTexture(texID[0], bmp.getWidth() / (float) bmp.getHeight());
192 | //
193 | // if (callback != null && mDrawer instanceof TextureRendererMask) {
194 | // callback.setMaskOK((TextureRendererMask) mDrawer);
195 | // }
196 | // if (shouldRecycle)
197 | // bmp.recycle();
198 | //
199 | // }
200 | // });
201 | // }
202 | //
203 | // public synchronized void setMaskTexture(int texID, float aspectRatio) {
204 | // Log.i(LOG_TAG, "setMaskTexture... ");
205 | //
206 | // if (texID == 0) {
207 | // if (mDrawer instanceof TextureRendererMask) {
208 | // mDrawer.release();
209 | // mDrawer = TextureRendererDrawOrigin.create(true);
210 | // }
211 | // mIsUsingMask = false;
212 | // } else {
213 | // if (!(mDrawer instanceof TextureRendererMask)) {
214 | // mDrawer.release();
215 | // TextureRendererMask drawer = TextureRendererMask.create(true);
216 | // assert drawer != null : "Drawer Create Failed!";
217 | // drawer.setMaskTexture(texID);
218 | // mDrawer = drawer;
219 | // }
220 | // mIsUsingMask = true;
221 | // }
222 | //
223 | // mMaskAspectRatio = aspectRatio;
224 | // calcViewport();
225 | // }
226 | //
227 | // public synchronized MediaPlayer getPlayer() {
228 | // if (mPlayer == null) {
229 | // Log.e(LOG_TAG, "Player is not initialized!");
230 | // }
231 | // return mPlayer;
232 | // }
233 | //
234 | // public interface OnCreateCallback {
235 | // void createOK();
236 | // }
237 | //
238 | // private OnCreateCallback mOnCreateCallback;
239 | //
240 | // //定制一些初始化操作
241 | // public void setOnCreateCallback(final OnCreateCallback callback) {
242 | //
243 | // assert callback != null : "无意义操作!";
244 | //
245 | // if (mDrawer == null) {
246 | // mOnCreateCallback = callback;
247 | // } else {
248 | // // 已经创建完毕, 直接执行
249 | // queueEvent(new Runnable() {
250 | // @Override
251 | // public void run() {
252 | // callback.createOK();
253 | // }
254 | // });
255 | // }
256 | // }
257 | //
258 | // public VideoPlayerGLSurfaceView(Context context, AttributeSet attrs) {
259 | // super(context, attrs);
260 | //
261 | // Log.i(LOG_TAG, "MyGLSurfaceView Construct...");
262 | // mContext = context;
263 | //
264 | // setEGLContextClientVersion(2);
265 | // setEGLConfigChooser(8, 8, 8, 8, 8, 0);
266 | // getHolder().setFormat(PixelFormat.RGBA_8888);
267 | // setRenderer(this);
268 | // setRenderMode(RENDERMODE_WHEN_DIRTY);
269 | // setZOrderOnTop(true);
270 | //
271 | // Log.i(LOG_TAG, "MyGLSurfaceView Construct OK...");
272 | // }
273 | //
274 | // @Override
275 | // public void onSurfaceCreated(GL10 gl, EGLConfig config) {
276 | //
277 | // Log.i(LOG_TAG, "video player onSurfaceCreated...");
278 | //
279 | // GLES20.glDisable(GLES20.GL_DEPTH_TEST);
280 | // GLES20.glDisable(GLES20.GL_STENCIL_TEST);
281 | //
282 | // mDrawer = TextureRendererDrawOrigin.create(true);
283 | // if (mDrawer == null) {
284 | // Log.e(LOG_TAG, "Create Drawer Failed!");
285 | // return;
286 | // }
287 | // if (mOnCreateCallback != null) {
288 | // mOnCreateCallback.createOK();
289 | // }
290 | //
291 | // if (mVideoUri != null && (mSurfaceTexture == null || mVideoTextureID == 0)) {
292 | // mVideoTextureID = TextureRenderer.createTextureID();
293 | // mSurfaceTexture = new SurfaceTexture(mVideoTextureID);
294 | // mSurfaceTexture.setOnFrameAvailableListener(VideoPlayerGLSurfaceView.this);
295 | // _useUri();
296 | // }
297 | // }
298 | //
299 | // @Override
300 | // public void onSurfaceChanged(GL10 gl, int width, int height) {
301 | // GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
302 | //
303 | // mViewWidth = width;
304 | // mViewHeight = height;
305 | //
306 | // calcViewport();
307 | // }
308 | //
309 | // //must be in the OpenGL thread!
310 | // public void release() {
311 | //
312 | // Log.i(LOG_TAG, "Video player view release...");
313 | //
314 | // if (mPlayer != null) {
315 | // queueEvent(new Runnable() {
316 | // @Override
317 | // public void run() {
318 | //
319 | // Log.i(LOG_TAG, "Video player view release run...");
320 | //
321 | // if (mPlayer != null) {
322 | //
323 | // mPlayer.setSurface(null);
324 | // if (mPlayer.isPlaying())
325 | // mPlayer.stop();
326 | // mPlayer.release();
327 | // mPlayer = null;
328 | // }
329 | //
330 | // if (mDrawer != null) {
331 | // mDrawer.release();
332 | // mDrawer = null;
333 | // }
334 | //
335 | // if (mSurfaceTexture != null) {
336 | // mSurfaceTexture.release();
337 | // mSurfaceTexture = null;
338 | // }
339 | //
340 | // if (mVideoTextureID != 0) {
341 | // GLES20.glDeleteTextures(1, new int[]{mVideoTextureID}, 0);
342 | // mVideoTextureID = 0;
343 | // }
344 | //
345 | // mIsUsingMask = false;
346 | // mPreparedCallback = null;
347 | // mPlayCompletionCallback = null;
348 | //
349 | // Log.i(LOG_TAG, "Video player view release OK");
350 | // }
351 | // });
352 | // }
353 | // }
354 | //
355 | // @Override
356 | // public void onPause() {
357 | // Log.i(LOG_TAG, "surfaceview onPause ...");
358 | //
359 | // super.onPause();
360 | // }
361 | //
362 | // @Override
363 | // public void onDrawFrame(GL10 gl) {
364 | //
365 | // GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
366 | // GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
367 | // GLES20.glViewport(0, 0, mViewWidth, mViewHeight);
368 | //
369 | // if (mSurfaceTexture == null) {
370 | // return;
371 | // }
372 | //
373 | // mSurfaceTexture.updateTexImage();
374 | // mSurfaceTexture.getTransformMatrix(mTransformMatrix);
375 | // mDrawer.setTransform(mTransformMatrix);
376 | //
377 | // mDrawer.renderTexture(mVideoTextureID, mRenderViewport);
378 | //
379 | // }
380 | //
381 | // private long mTimeCount2 = 0;
382 | // private long mFramesCount2 = 0;
383 | // private long mLastTimestamp2 = 0;
384 | //
385 | // @Override
386 | // public void onFrameAvailable(SurfaceTexture surfaceTexture) {
387 | // requestRender();
388 | //
389 | // if (mLastTimestamp2 == 0)
390 | // mLastTimestamp2 = System.currentTimeMillis();
391 | //
392 | // long currentTimestamp = System.currentTimeMillis();
393 | //
394 | // ++mFramesCount2;
395 | // mTimeCount2 += currentTimestamp - mLastTimestamp2;
396 | // mLastTimestamp2 = currentTimestamp;
397 | // if (mTimeCount2 >= 1e3) {
398 | // Log.i(LOG_TAG, String.format("播放帧率: %d", mFramesCount2));
399 | // mTimeCount2 -= 1e3;
400 | // mFramesCount2 = 0;
401 | // }
402 | // }
403 | //
404 | // private void calcViewport() {
405 | // float scaling;
406 | //
407 | // if (mIsUsingMask) {
408 | // flushMaskAspectRatio();
409 | // scaling = mMaskAspectRatio;
410 | // } else {
411 | // mDrawer.setFlipscale(mDrawerFlipScaleX, mDrawerFlipScaleY);
412 | // scaling = mVideoWidth / (float) mVideoHeight;
413 | // }
414 | //
415 | // float viewRatio = mViewWidth / (float) mViewHeight;
416 | // float s = scaling / viewRatio;
417 | //
418 | // int w, h;
419 | //
420 | // if (mFitFullView) {
421 | // //撑满全部view(内容大于view)
422 | // if (s > 1.0) {
423 | // w = (int) (mViewHeight * scaling);
424 | // h = mViewHeight;
425 | // } else {
426 | // w = mViewWidth;
427 | // h = (int) (mViewWidth / scaling);
428 | // }
429 | // } else {
430 | // //显示全部内容(内容小于view)
431 | // if (s > 1.0) {
432 | // w = mViewWidth;
433 | // h = (int) (mViewWidth / scaling);
434 | // } else {
435 | // h = mViewHeight;
436 | // w = (int) (mViewHeight * scaling);
437 | // }
438 | // }
439 | //
440 | // mRenderViewport.width = w;
441 | // mRenderViewport.height = h;
442 | // mRenderViewport.x = (mViewWidth - mRenderViewport.width) / 2;
443 | // mRenderViewport.y = (mViewHeight - mRenderViewport.height) / 2;
444 | // Log.i(LOG_TAG, String.format("View port: %d, %d, %d, %d", mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height));
445 | // }
446 | //
447 | // private void _useUri() {
448 | // if (mPlayer != null) {
449 | // mPlayer.stop();
450 | // mPlayer.reset();
451 | // } else {
452 | // mPlayer = new MediaPlayer();
453 | // }
454 | // try {
455 | // mPlayer.setDataSource(mContext, mVideoUri);
456 | //// mPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
457 | // mPlayer.setSurface(new Surface(mSurfaceTexture));
458 | // } catch (Exception e) {
459 | // e.printStackTrace();
460 | // Log.e(LOG_TAG, "useUri failed");
461 | // if (mPlayCompletionCallback != null) {
462 | // this.post(new Runnable() {
463 | // @Override
464 | // public void run() {
465 | // if (mPlayCompletionCallback != null) {
466 | // if (!mPlayCompletionCallback.playFailed(mPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, MediaPlayer.MEDIA_ERROR_UNSUPPORTED))
467 | // mPlayCompletionCallback.playComplete(mPlayer);
468 | // }
469 | // }
470 | // });
471 | // }
472 | // return;
473 | // }
474 | //
475 | // if (mPlayerInitCallback != null) {
476 | // mPlayerInitCallback.initPlayer(mPlayer);
477 | // }
478 | //
479 | // mPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
480 | // @Override
481 | // public void onCompletion(MediaPlayer mp) {
482 | // if (mPlayCompletionCallback != null) {
483 | // mPlayCompletionCallback.playComplete(mPlayer);
484 | // }
485 | // Log.i(LOG_TAG, "Video Play Over");
486 | // }
487 | // });
488 | //
489 | // mPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
490 | // @Override
491 | // public void onPrepared(MediaPlayer mp) {
492 | // mVideoWidth = mp.getVideoWidth();
493 | // mVideoHeight = mp.getVideoHeight();
494 | //
495 | // queueEvent(new Runnable() {
496 | // @Override
497 | // public void run() {
498 | // calcViewport();
499 | // }
500 | // });
501 | //
502 | // if (mPreparedCallback != null) {
503 | // mPreparedCallback.playPrepared(mPlayer);
504 | // } else {
505 | // mp.start();
506 | // }
507 | //// requestRender(); //可能导致第一帧过快渲染 (先于surface texture 准备完成
508 | // Log.i(LOG_TAG, String.format("Video resolution 1: %d x %d", mVideoWidth, mVideoHeight));
509 | // }
510 | // });
511 | //
512 | // mPlayer.setOnErrorListener(new MediaPlayer.OnErrorListener() {
513 | // @Override
514 | // public boolean onError(MediaPlayer mp, int what, int extra) {
515 | //
516 | // if (mPlayCompletionCallback != null)
517 | // return mPlayCompletionCallback.playFailed(mp, what, extra);
518 | // return false;
519 | // }
520 | // });
521 | //
522 | // try {
523 | // mPlayer.prepareAsync();
524 | // } catch (Exception e) {
525 | // Log.i(LOG_TAG, String.format("Error handled: %s, play failure handler would be called!", e.toString()));
526 | // if (mPlayCompletionCallback != null) {
527 | // this.post(new Runnable() {
528 | // @Override
529 | // public void run() {
530 | // if (mPlayCompletionCallback != null) {
531 | // if (!mPlayCompletionCallback.playFailed(mPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, MediaPlayer.MEDIA_ERROR_UNSUPPORTED))
532 | // mPlayCompletionCallback.playComplete(mPlayer);
533 | // }
534 | // }
535 | // });
536 | // }
537 | // }
538 | //
539 | // }
540 | //
541 | // private void flushMaskAspectRatio() {
542 | //
543 | // float dstRatio = mVideoWidth / (float) mVideoHeight;
544 | //
545 | // float s = dstRatio / mMaskAspectRatio;
546 | //
547 | // if (s > 1.0f) {
548 | // mDrawer.setFlipscale(mDrawerFlipScaleX / s, mDrawerFlipScaleY);
549 | // } else {
550 | // mDrawer.setFlipscale(mDrawerFlipScaleX, s * mDrawerFlipScaleY);
551 | // }
552 | // }
553 | //
554 | // public interface TakeShotCallback {
555 | // //传入的bmp可以由接收者recycle
556 | // void takeShotOK(Bitmap bmp);
557 | // }
558 | //
559 | // public synchronized void takeShot(final TakeShotCallback callback) {
560 | // assert callback != null : "callback must not be null!";
561 | //
562 | // if (mDrawer == null) {
563 | // Log.e(LOG_TAG, "Drawer not initialized!");
564 | // callback.takeShotOK(null);
565 | // return;
566 | // }
567 | //
568 | // queueEvent(new Runnable() {
569 | // @Override
570 | // public void run() {
571 | //
572 | // IntBuffer buffer = IntBuffer.allocate(mRenderViewport.width * mRenderViewport.height);
573 | //
574 | // GLES20.glReadPixels(mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
575 | // Bitmap bmp = Bitmap.createBitmap(mRenderViewport.width, mRenderViewport.height, Bitmap.Config.ARGB_8888);
576 | // bmp.copyPixelsFromBuffer(buffer);
577 | //
578 | // Bitmap bmp2 = Bitmap.createBitmap(mRenderViewport.width, mRenderViewport.height, Bitmap.Config.ARGB_8888);
579 | //
580 | // Canvas canvas = new Canvas(bmp2);
581 | // Matrix mat = new Matrix();
582 | // mat.setTranslate(0.0f, -mRenderViewport.height / 2.0f);
583 | // mat.postScale(1.0f, -1.0f);
584 | // mat.postTranslate(0.0f, mRenderViewport.height / 2.0f);
585 | //
586 | // canvas.drawBitmap(bmp, mat, null);
587 | // bmp.recycle();
588 | //
589 | // callback.takeShotOK(bmp2);
590 | // }
591 | // });
592 | //
593 | // }
594 | //}
595 |
--------------------------------------------------------------------------------