├── .gitignore
├── README.md
├── app
├── .gitignore
├── build.gradle
├── libs
│ ├── armeabi-v7a
│ │ ├── libffmpeg.so
│ │ └── libffmpeginvoke.so
│ └── armeabi
│ │ ├── libffmpeg.so
│ │ └── libffmpeginvoke.so
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── com
│ │ └── tangyx
│ │ └── video
│ │ ├── MainActivity.java
│ │ ├── MakeVideoActivity.java
│ │ ├── MultiRecordActivity.java
│ │ ├── MusicActivity.java
│ │ ├── PermissionsActivity.java
│ │ ├── SelectRecordActivity.java
│ │ ├── adapter
│ │ └── MusicAdapter.java
│ │ ├── ffmpeg
│ │ ├── FFmpegCommands.java
│ │ └── FFmpegRun.java
│ │ ├── helper
│ │ ├── MediaHelper.java
│ │ └── PermissionHelper.java
│ │ ├── model
│ │ └── Music.java
│ │ └── utils
│ │ └── FileUtils.java
│ ├── jni
│ ├── Android.mk
│ ├── Application.mk
│ ├── cmdutils.c
│ ├── cmdutils.h
│ ├── cmdutils_common_opts.h
│ ├── com_tangyx_video_ffmpeg_FFmpegRun.c
│ ├── com_tangyx_video_ffmpeg_FFmpegRun.h
│ ├── ffmpeg.c
│ ├── ffmpeg.h
│ ├── ffmpeg_filter.c
│ ├── ffmpeg_opt.c
│ ├── libffmpeg.so
│ └── logjam.h
│ └── res
│ ├── drawable
│ ├── progress_color_horizontal.xml
│ ├── ripple_circle.xml
│ └── video_seekbar.xml
│ ├── layout
│ ├── activity_main.xml
│ ├── activity_make_video.xml
│ ├── activity_music.xml
│ ├── activity_permissions.xml
│ ├── activity_select_record.xml
│ └── adapter_music.xml
│ ├── mipmap-hdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── mipmap-mdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── mipmap-xhdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── mipmap-xxhdpi
│ ├── bt_start.png
│ ├── ic_launcher.png
│ ├── ic_launcher_round.png
│ ├── icon_back_white.png
│ ├── icon_fanzhuan.png
│ ├── icon_video_ing.png
│ ├── kaibo_icon_huakuai.png
│ └── live_close_icon.png
│ ├── mipmap-xxxhdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ └── values
│ ├── colors.xml
│ ├── strings.xml
│ └── styles.xml
├── build.gradle
├── gradle.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/workspace.xml
5 | /.idea/libraries
6 | .DS_Store
7 | /build
8 | /captures
9 | .externalNativeBuild
10 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FFmpegVideo
2 | android ffmpeg小视频功能
3 | 项目的介绍和使用以及更多框架的支持请查看下面的文章
4 | https://www.jianshu.com/p/6c51b11550be
5 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 26
5 | buildToolsVersion "26.0.2"
6 | defaultConfig {
7 | applicationId "com.tangyx.video"
8 | minSdkVersion 15
9 | targetSdkVersion 26
10 | versionCode 1
11 | versionName "1.0"
12 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
13 | }
14 | buildTypes {
15 | release {
16 | minifyEnabled false
17 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
18 | }
19 | }
20 | sourceSets {
21 | main {
22 | jniLibs.srcDirs = ['libs']
23 | jni.srcDirs = []
24 | }
25 | }
26 | }
27 |
28 | dependencies {
29 | compile fileTree(dir: 'libs', include: ['*.jar'])
30 | androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
31 | exclude group: 'com.android.support', module: 'support-annotations'
32 | })
33 | compile 'com.android.support:appcompat-v7:26.1.0'
34 | compile 'com.android.support.constraint:constraint-layout:1.0.2'
35 | testCompile 'junit:junit:4.12'
36 | }
37 |
--------------------------------------------------------------------------------
/app/libs/armeabi-v7a/libffmpeg.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/libs/armeabi-v7a/libffmpeg.so
--------------------------------------------------------------------------------
/app/libs/armeabi-v7a/libffmpeginvoke.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/libs/armeabi-v7a/libffmpeginvoke.so
--------------------------------------------------------------------------------
/app/libs/armeabi/libffmpeg.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/libs/armeabi/libffmpeg.so
--------------------------------------------------------------------------------
/app/libs/armeabi/libffmpeginvoke.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/libs/armeabi/libffmpeginvoke.so
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in /Users/tangyx/Library/Android/sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
19 | # Uncomment this to preserve the line number information for
20 | # debugging stack traces.
21 | #-keepattributes SourceFile,LineNumberTable
22 |
23 | # If you keep the line number information, uncomment this to
24 | # hide the original source file name.
25 | #-renamesourcefileattribute SourceFile
26 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video;
2 |
3 | import android.content.Intent;
4 | import android.os.Handler;
5 | import android.os.Message;
6 | import android.support.v7.app.AppCompatActivity;
7 | import android.os.Bundle;
8 | import android.view.SurfaceView;
9 | import android.view.View;
10 | import android.view.Window;
11 | import android.view.WindowManager;
12 | import android.widget.ImageView;
13 | import android.widget.ProgressBar;
14 | import android.widget.TextView;
15 | import android.widget.Toast;
16 |
17 | import com.tangyx.video.helper.PermissionHelper;
18 | import com.tangyx.video.utils.FileUtils;
19 | import com.tangyx.video.helper.MediaHelper;
20 |
21 | import java.io.File;
22 | import java.util.UUID;
23 |
24 | public class MainActivity extends AppCompatActivity implements View.OnClickListener{
25 | /**
26 | * 相机预览
27 | */
28 | private SurfaceView mSurfaceView;
29 | /**
30 | * 开始录制按钮
31 | */
32 | private ImageView mStartVideo;
33 | /**
34 | * 正在录制按钮,再次点击,停止录制
35 | */
36 | private ImageView mStartVideoIng;
37 | /**
38 | * 录制时间
39 | */
40 | private TextView mTime;
41 | /**
42 | * 录制进度条
43 | */
44 | private ProgressBar mProgress;
45 | private MediaHelper mMediaHelper;
46 | private int mProgressNumber=0;
47 | private PermissionHelper mPermissionHelper;
48 |
49 | @Override
50 | protected void onCreate(Bundle savedInstanceState) {
51 | super.onCreate(savedInstanceState);
52 | requestWindowFeature(Window.FEATURE_NO_TITLE);
53 | WindowManager.LayoutParams p = this.getWindow().getAttributes();
54 | p.flags |= WindowManager.LayoutParams.FLAG_FULLSCREEN;//|=:或等于,取其一
55 | getWindow().setAttributes(p);
56 | setContentView(R.layout.activity_main);
57 |
58 | mSurfaceView = (SurfaceView) findViewById(R.id.video_surface_view);
59 | mStartVideo = (ImageView) findViewById(R.id.start_video);
60 | mStartVideoIng = (ImageView) findViewById(R.id.start_video_ing);
61 | mProgress = (ProgressBar) findViewById(R.id.progress);
62 | mTime = (TextView) findViewById(R.id.time);
63 | findViewById(R.id.close).setOnClickListener(this);
64 | findViewById(R.id.inversion).setOnClickListener(this);
65 |
66 | mStartVideo.setOnClickListener(this);
67 | mStartVideoIng.setOnClickListener(this);
68 | //初始化工具类
69 | mMediaHelper = new MediaHelper(this);
70 | //设置视频存放地址的主目录
71 | mMediaHelper.setTargetDir(new File(new FileUtils(this).getStorageDirectory()));
72 | //设置录制视频的名字
73 | mMediaHelper.setTargetName(UUID.randomUUID() + ".mp4");
74 | mPermissionHelper = new PermissionHelper(this);
75 | }
76 |
77 | @Override
78 | public void onClick(View view) {
79 | switch (view.getId()){
80 | case R.id.close:
81 | mMediaHelper.stopRecordUnSave();
82 | finish();
83 | break;
84 | case R.id.start_video:
85 | mProgressNumber = 0;
86 | mProgress.setProgress(0);
87 | mMediaHelper.record();
88 | startView();
89 | break;
90 | case R.id.start_video_ing:
91 | if(mProgressNumber == 0){
92 | stopView(false);
93 | break;
94 | }
95 |
96 | if (mProgressNumber < 8) {
97 | //时间太短不保存
98 | Toast.makeText(this,"请至少录制到红线位置",Toast.LENGTH_LONG).show();
99 | mMediaHelper.stopRecordUnSave();
100 | stopView(false);
101 | break;
102 | }
103 | //停止录制
104 | mMediaHelper.stopRecordSave();
105 | stopView(true);
106 | break;
107 | case R.id.inversion:
108 | mMediaHelper.stopRecordUnSave();
109 | stopView(false);
110 | mMediaHelper.autoChangeCamera();
111 | break;
112 | }
113 | }
114 |
115 | private void startView(){
116 | mStartVideo.setVisibility(View.GONE);
117 | mStartVideoIng.setVisibility(View.VISIBLE);
118 | mProgressNumber = 0;
119 | mTime.setText("00:00");
120 | handler.removeMessages(0);
121 | handler.sendMessage(handler.obtainMessage(0));
122 | }
123 |
124 | private void stopView(boolean isSave){
125 | int timer = mProgressNumber;
126 | mProgressNumber = 0;
127 | mProgress.setProgress(0);
128 | handler.removeMessages(0);
129 | mTime.setText("00:00");
130 | if(isSave) {
131 | String path = mMediaHelper.getTargetFilePath();
132 | Intent intent = new Intent(this,MakeVideoActivity.class);
133 | intent.putExtra("path",path);
134 | intent.putExtra("time",timer);
135 | startActivity(intent);
136 | }
137 | mStartVideoIng.setVisibility(View.GONE);
138 | mStartVideo.setVisibility(View.VISIBLE);
139 | }
140 |
141 | Handler handler = new Handler() {
142 | @Override
143 | public void handleMessage(Message msg) {
144 | switch (msg.what) {
145 | case 0:
146 | mProgress.setProgress(mProgressNumber);
147 | mTime.setText("00:"+(mProgressNumber<10?"0"+mProgressNumber:mProgressNumber));
148 | if(mProgress.getProgress() >= mProgress.getMax()){
149 | mMediaHelper.stopRecordSave();
150 | stopView(true);
151 | }else if (mMediaHelper.isRecording()){
152 | mProgressNumber = mProgressNumber + 1;
153 | sendMessageDelayed(handler.obtainMessage(0), 1000);
154 | }
155 | break;
156 | }
157 | }
158 | };
159 |
160 | @Override
161 | protected void onResume() {
162 | super.onResume();
163 | if(mPermissionHelper.lacksPermissions(PermissionsActivity.PERMISSIONS)){
164 | PermissionsActivity.startActivityForResult(this,PermissionsActivity.REQUEST_CODE,PermissionsActivity.PERMISSIONS);
165 | }else{
166 | //启动相机
167 | mMediaHelper.setSurfaceView(mSurfaceView);
168 | }
169 | }
170 |
171 | @Override
172 | protected void onActivityResult(int requestCode, int resultCode, Intent data) {
173 | super.onActivityResult(requestCode, resultCode, data);
174 | if(resultCode == PermissionsActivity.PERMISSIONS_GRANTED){
175 | //启动相机
176 | mMediaHelper.setSurfaceView(mSurfaceView);
177 | }else if(resultCode == -100){
178 | finish();
179 | }
180 | }
181 | }
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/MakeVideoActivity.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video;
2 |
3 | import android.annotation.SuppressLint;
4 | import android.content.Intent;
5 | import android.graphics.Color;
6 | import android.media.MediaPlayer;
7 | import android.os.Bundle;
8 | import android.os.Handler;
9 | import android.os.Message;
10 | import android.support.annotation.Nullable;
11 | import android.support.v7.app.AppCompatActivity;
12 | import android.support.v7.widget.AppCompatSeekBar;
13 | import android.util.Log;
14 | import android.view.View;
15 | import android.widget.SeekBar;
16 | import android.widget.TextView;
17 | import android.widget.VideoView;
18 |
19 | import com.tangyx.video.ffmpeg.FFmpegCommands;
20 | import com.tangyx.video.ffmpeg.FFmpegRun;
21 | import com.tangyx.video.utils.FileUtils;
22 |
23 | import java.io.IOException;
24 | import java.util.ArrayList;
25 | import java.util.List;
26 |
27 | /**
28 | * Created by tangyx
29 | * Date 2017/8/2
30 | * email tangyx@live.com
31 | */
32 | public class MakeVideoActivity extends AppCompatActivity implements View.OnClickListener,SeekBar.OnSeekBarChangeListener{
33 | private final static String TAG="SLog";
34 | private VideoView mVideoView;
35 | private TextView mNext;
36 | private AppCompatSeekBar mAudioSeekBar;
37 | private AppCompatSeekBar mMusicSeekBar;
38 | private MediaPlayer mAudioPlayer;
39 | private MediaPlayer mMusicPlayer;
40 | private List mMediaPath;
41 | private String mTargetPath;
42 | private FileUtils mFileUtils;
43 |
44 | @Override
45 | protected void onCreate(@Nullable Bundle savedInstanceState) {
46 | super.onCreate(savedInstanceState);
47 | setContentView(R.layout.activity_make_video);
48 | mVideoView = (VideoView) findViewById(R.id.video);
49 | mAudioSeekBar = (AppCompatSeekBar) findViewById(R.id.video_seek_bar);
50 | mMusicSeekBar = (AppCompatSeekBar) findViewById(R.id.music_seek_bar);
51 | mNext = (TextView) findViewById(R.id.next);
52 | mAudioSeekBar.setOnSeekBarChangeListener(this);
53 | mMusicSeekBar.setOnSeekBarChangeListener(this);
54 | mNext.setOnClickListener(this);
55 | findViewById(R.id.back).setOnClickListener(this);
56 | findViewById(R.id.local_music).setOnClickListener(this);
57 | boolean isPlayer = getIntent().getBooleanExtra("isPlayer", false);
58 | Log.e(TAG,"isPlayer:"+isPlayer);
59 | if (isPlayer) {
60 | findViewById(R.id.title_layout).setVisibility(View.GONE);
61 | findViewById(R.id.editor_layout).setVisibility(View.GONE);
62 | mVideoView.setVideoPath(getIntent().getStringExtra("path"));
63 | mVideoView.start();
64 | }else{
65 | mFileUtils = new FileUtils(this);
66 | mTargetPath = mFileUtils.getStorageDirectory();
67 | extractVideo();
68 | }
69 | }
70 |
71 |
72 | /**
73 | * 提取视频
74 | */
75 | private void extractVideo() {
76 | final String outVideo = mTargetPath + "/video.mp4";
77 | String[] commands = FFmpegCommands.extractVideo(getIntent().getStringExtra("path"), outVideo);
78 | FFmpegRun.execute(commands, new FFmpegRun.FFmpegRunListener() {
79 | @Override
80 | public void onStart() {
81 | mMediaPath = new ArrayList<>();
82 | Log.e(TAG,"extractVideo ffmpeg start...");
83 | }
84 |
85 | @Override
86 | public void onEnd(int result) {
87 | Log.e(TAG,"extractVideo ffmpeg end...");
88 | mMediaPath.add(outVideo);
89 | extractAudio();
90 | }
91 | });
92 | }
93 |
94 | /**
95 | * 提取音频
96 | */
97 | private void extractAudio() {
98 | final String outVideo = mTargetPath + "/audio.aac";
99 | String[] commands = FFmpegCommands.extractAudio(getIntent().getStringExtra("path"), outVideo);
100 | FFmpegRun.execute(commands, new FFmpegRun.FFmpegRunListener() {
101 | @Override
102 | public void onStart() {
103 | mAudioPlayer = new MediaPlayer();
104 | }
105 |
106 | @Override
107 | public void onEnd(int result) {
108 | Log.e(TAG,"extractAudio ffmpeg end...");
109 | mMediaPath.add(outVideo);
110 | String path = mMediaPath.get(0);
111 | mVideoView.setVideoPath(path);
112 | mVideoView.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
113 | @Override
114 | public void onCompletion(MediaPlayer mediaPlayer) {
115 | mVideoView.start();
116 | }
117 | });
118 | mVideoView.start();
119 | try {
120 | mAudioPlayer.setDataSource(mMediaPath.get(1));
121 | mAudioPlayer.setLooping(true);
122 | mAudioPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
123 | @Override
124 | public void onPrepared(MediaPlayer mediaPlayer) {
125 | mAudioPlayer.setVolume(0.5f, 0.5f);
126 | mAudioPlayer.start();
127 | }
128 | });
129 | mAudioPlayer.prepare();
130 | } catch (IOException e) {
131 | e.printStackTrace();
132 | }
133 | }
134 | });
135 | }
136 |
137 | private void cutSelectMusic(String musicUrl) {
138 | final String musicPath = mTargetPath + "/bgMusic.aac";
139 | long time = getIntent().getIntExtra("time",0);
140 | String[] commands = FFmpegCommands.cutIntoMusic(musicUrl, time, musicPath);
141 | FFmpegRun.execute(commands, new FFmpegRun.FFmpegRunListener() {
142 | @Override
143 | public void onStart() {
144 | Log.e(TAG,"cutSelectMusic ffmpeg start...");
145 | }
146 |
147 | @Override
148 | public void onEnd(int result) {
149 | Log.e(TAG,"cutSelectMusic ffmpeg end...");
150 | if(mMusicPlayer!=null){//移除上一个选择的音乐背景
151 | mMediaPath.remove(mMediaPath.size()-1);
152 | }
153 | mMediaPath.add(musicPath);
154 | stopMediaPlayer();
155 | mMusicPlayer = new MediaPlayer();
156 | try {
157 | mMusicPlayer.setDataSource(musicPath);
158 | mMusicPlayer.setLooping(true);
159 | mMusicPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
160 | @Override
161 | public void onPrepared(MediaPlayer mediaPlayer) {
162 | mediaPlayer.setVolume(0.5f, 0.5f);
163 | mediaPlayer.start();
164 | mMusicSeekBar.setProgress(50);
165 | }
166 | });
167 | mMusicPlayer.prepareAsync();
168 | } catch (IOException e) {
169 | e.printStackTrace();
170 | }
171 | }
172 | });
173 | }
174 |
175 | @Override
176 | public void onClick(View view) {
177 | switch (view.getId()){
178 | case R.id.back:
179 | finish();
180 | mFileUtils.deleteFile(mTargetPath,null);
181 | break;
182 | case R.id.local_music:
183 | Intent intent = new Intent(this,MusicActivity.class);
184 | startActivityForResult(intent,0);
185 | break;
186 | case R.id.next:
187 | composeVideoAudio();
188 | mNext.setTextColor(Color.parseColor("#999999"));
189 | mNext.setEnabled(false);
190 | break;
191 | }
192 | }
193 |
194 | /**
195 | * 处理视频原声
196 | */
197 | private void composeVideoAudio() {
198 | int mAudioVol = mAudioSeekBar.getProgress();
199 | String audioUrl = mMediaPath.get(1);
200 | final String audioOutUrl = mTargetPath + "/tempAudio.aac";
201 | String[] common = FFmpegCommands.changeAudioOrMusicVol(audioUrl, mAudioVol * 10, audioOutUrl);
202 | FFmpegRun.execute(common, new FFmpegRun.FFmpegRunListener() {
203 | @Override
204 | public void onStart() {
205 | Log.e(TAG,"changeAudioVol ffmpeg start...");
206 | handler.sendEmptyMessage(0);
207 | }
208 |
209 | @Override
210 | public void onEnd(int result) {
211 | Log.e(TAG,"changeAudioVol ffmpeg end...");
212 | if (mMediaPath.size() == 3) {
213 | composeVideoMusic(audioOutUrl);
214 | } else {
215 | composeMusicAndAudio(audioOutUrl);
216 | }
217 | }
218 | });
219 | }
220 |
221 | /**
222 | * 处理背景音乐
223 | */
224 | private void composeVideoMusic(final String audioUrl) {
225 | final int mMusicVol = mMusicSeekBar.getProgress();
226 | String musicUrl;
227 | if (audioUrl == null) {
228 | musicUrl = mMediaPath.get(1);
229 | } else {
230 | musicUrl = mMediaPath.get(2);
231 | }
232 | final String musicOutUrl = mTargetPath + "/tempMusic.aac";
233 | final String[] common = FFmpegCommands.changeAudioOrMusicVol(musicUrl, mMusicVol * 10, musicOutUrl);
234 | FFmpegRun.execute(common, new FFmpegRun.FFmpegRunListener() {
235 | @Override
236 | public void onStart() {
237 | Log.e(TAG,"changeMusicVol ffmpeg start...");
238 | handler.sendEmptyMessage(0);
239 | }
240 |
241 | @Override
242 | public void onEnd(int result) {
243 | Log.e(TAG,"changeMusicVol ffmpeg end...");
244 | composeAudioAndMusic(audioUrl, musicOutUrl);
245 | }
246 | });
247 | }
248 |
249 | /**
250 | * 合成原声和背景音乐
251 | */
252 | public void composeAudioAndMusic(String audioUrl, String musicUrl) {
253 | if (audioUrl == null) {
254 | composeMusicAndAudio(musicUrl);
255 | } else {
256 | final String musicAudioPath = mTargetPath + "/audioMusic.aac";
257 | String[] common = FFmpegCommands.composeAudio(audioUrl, musicUrl, musicAudioPath);
258 | FFmpegRun.execute(common, new FFmpegRun.FFmpegRunListener() {
259 | @Override
260 | public void onStart() {
261 | Log.e(TAG,"composeAudioAndMusic ffmpeg start...");
262 | handler.sendEmptyMessage(0);
263 | }
264 |
265 | @Override
266 | public void onEnd(int result) {
267 | Log.e(TAG,"composeAudioAndMusic ffmpeg end...");
268 | composeMusicAndAudio(musicAudioPath);
269 | }
270 | });
271 | }
272 | }
273 |
274 | /**
275 | * 视频和背景音乐合成
276 | *
277 | * @param bgMusicAndAudio
278 | */
279 | private void composeMusicAndAudio(String bgMusicAndAudio) {
280 | final String videoAudioPath = mTargetPath + "/videoMusicAudio.mp4";
281 | final String videoUrl = mMediaPath.get(0);
282 | final int time = getIntent().getIntExtra("time",0) - 1;
283 | String[] common = FFmpegCommands.composeVideo(videoUrl, bgMusicAndAudio, videoAudioPath, time);
284 | FFmpegRun.execute(common, new FFmpegRun.FFmpegRunListener() {
285 | @Override
286 | public void onStart() {
287 | Log.e(TAG,"videoAndAudio ffmpeg start...");
288 | handler.sendEmptyMessage(0);
289 | }
290 |
291 | @Override
292 | public void onEnd(int result) {
293 | Log.e(TAG,"videoAndAudio ffmpeg end...");
294 | handleVideoNext(videoAudioPath);
295 | }
296 | });
297 | }
298 |
299 | /**
300 | * 适配处理完成,进入下一步
301 | */
302 | private void handleVideoNext(String videoUrl) {
303 | Message message = new Message();
304 | message.what = 1;
305 | message.obj = videoUrl;
306 | handler.sendMessage(message);
307 | }
308 |
309 | @SuppressLint("HandlerLeak")
310 | Handler handler = new Handler() {
311 | @Override
312 | public void handleMessage(Message msg) {
313 | super.handleMessage(msg);
314 | switch (msg.what) {
315 | case 0:
316 | showProgressLoading();
317 | break;
318 | case 1:
319 | dismissProgress();
320 | String videoPath = (String) msg.obj;
321 | Intent intent = new Intent(MakeVideoActivity.this,MakeVideoActivity.class);
322 | intent.putExtra("path",videoPath);
323 | intent.putExtra("isPlayer",true);
324 | startActivity(intent);
325 | finish();
326 | break;
327 | case 2:
328 | dismissProgress();
329 | break;
330 | }
331 | }
332 | };
333 |
334 | private void showProgressLoading(){
335 |
336 | }
337 | private void dismissProgress(){
338 |
339 | }
340 | @Override
341 | protected void onActivityResult(int requestCode, int resultCode, Intent data) {
342 | super.onActivityResult(requestCode, resultCode, data);
343 | if (resultCode == 10000) {
344 | String music = data.getStringExtra("music");
345 | cutSelectMusic(music);
346 | }
347 | }
348 |
349 | @Override
350 | public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
351 | float volume = i / 100f;
352 | if (mAudioSeekBar == seekBar) {
353 | mAudioPlayer.setVolume(volume, volume);
354 | } else if(mMusicPlayer!=null){
355 | mMusicPlayer.setVolume(volume, volume);
356 | }
357 | }
358 |
359 | @Override
360 | public void onStartTrackingTouch(SeekBar seekBar) {
361 |
362 | }
363 |
364 | @Override
365 | public void onStopTrackingTouch(SeekBar seekBar) {
366 |
367 | }
368 |
369 |
370 | private void stopMediaPlayer(){
371 | try {
372 | if (mMusicPlayer != null) {
373 | mMusicPlayer.stop();
374 | mMusicPlayer.release();
375 | mMusicPlayer=null;
376 | }
377 | }catch (Exception e){
378 | e.printStackTrace();
379 | }
380 | }
381 |
382 | @Override
383 | public void onDestroy() {
384 | super.onDestroy();
385 | mVideoView.stopPlayback();
386 | if (mAudioPlayer != null) {
387 | mAudioPlayer.stop();
388 | mAudioPlayer.release();
389 | }
390 | stopMediaPlayer();
391 | }
392 |
393 | @Override
394 | public void onPause() {
395 | super.onPause();
396 | mVideoView.pause();
397 | if (mAudioPlayer != null) {
398 | mAudioPlayer.pause();
399 | }
400 | if (mMusicPlayer != null) {
401 | mMusicPlayer.pause();
402 | }
403 | }
404 |
405 | @Override
406 | public void onResume() {
407 | super.onResume();
408 | mVideoView.start();
409 | if (mAudioPlayer != null) {
410 | mAudioPlayer.start();
411 | }
412 | if (mMusicPlayer != null) {
413 | mMusicPlayer.start();
414 | }
415 | }
416 | }
417 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/MultiRecordActivity.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video;
2 |
3 | import android.annotation.SuppressLint;
4 | import android.content.Intent;
5 | import android.hardware.Camera;
6 | import android.os.Bundle;
7 | import android.os.Handler;
8 | import android.os.Message;
9 | import android.support.annotation.Nullable;
10 | import android.support.v7.app.AppCompatActivity;
11 | import android.util.Log;
12 | import android.view.SurfaceView;
13 | import android.view.View;
14 | import android.view.Window;
15 | import android.view.WindowManager;
16 | import android.widget.ImageView;
17 | import android.widget.ProgressBar;
18 | import android.widget.TextView;
19 | import android.widget.Toast;
20 |
21 | import com.tangyx.video.ffmpeg.FFmpegCommands;
22 | import com.tangyx.video.ffmpeg.FFmpegRun;
23 | import com.tangyx.video.helper.MediaHelper;
24 | import com.tangyx.video.helper.PermissionHelper;
25 | import com.tangyx.video.utils.FileUtils;
26 |
27 | import java.io.File;
28 | import java.util.ArrayList;
29 | import java.util.List;
30 | /**
31 | * Created by tangyx
32 | * Date 2017/12/4
33 | * email tangyx@live.com
34 | */
35 |
36 | public class MultiRecordActivity extends AppCompatActivity implements View.OnClickListener{
37 | /**
38 | * 相机预览
39 | */
40 | private SurfaceView mSurfaceView;
41 | /**
42 | * 开始录制按钮
43 | */
44 | private ImageView mStartVideo;
45 | /**
46 | * 正在录制按钮,再次点击,停止录制
47 | */
48 | private ImageView mStartVideoIng;
49 | /**
50 | * 录制时间
51 | */
52 | private TextView mTime;
53 | /**
54 | * 录制进度条
55 | */
56 | private ProgressBar mProgress;
57 | /**
58 | * 等待视频合成完成提示
59 | */
60 | private ProgressBar mWait;
61 | /**
62 | * 录制主要工具类
63 | */
64 | private MediaHelper mMediaHelper;
65 | /**
66 | * 录制进度值
67 | */
68 | private int mProgressNumber=0;
69 | /**
70 | * 视频段文件编号
71 | */
72 | private int mVideoNumber=1;
73 | private FileUtils mFileUtils;
74 | /**
75 | * 临时记录每段视频的参数内容
76 | */
77 | private List mTsVideo = new ArrayList<>();
78 | /**
79 | * mp4转ts流后的地址,主要合成的文件
80 | */
81 | private List mTsPath = new ArrayList<>();
82 | /**
83 | * 是否已经取消下一步,比如关闭了页面,就不再做线程处理,结束任务
84 | */
85 | private boolean isCancel;
86 | /**
87 | * 权限相关
88 | */
89 | private PermissionHelper mPermissionHelper;
90 |
91 | @Override
92 | protected void onCreate(@Nullable Bundle savedInstanceState) {
93 | super.onCreate(savedInstanceState);
94 | requestWindowFeature(Window.FEATURE_NO_TITLE);
95 | WindowManager.LayoutParams p = this.getWindow().getAttributes();
96 | p.flags |= WindowManager.LayoutParams.FLAG_FULLSCREEN;//|=:或等于,取其一
97 | getWindow().setAttributes(p);
98 | setContentView(R.layout.activity_main);
99 |
100 | mSurfaceView = (SurfaceView) findViewById(R.id.video_surface_view);
101 | mStartVideo = (ImageView) findViewById(R.id.start_video);
102 | mStartVideoIng = (ImageView) findViewById(R.id.start_video_ing);
103 | mProgress = (ProgressBar) findViewById(R.id.progress);
104 | mTime = (TextView) findViewById(R.id.time);
105 | mWait = findViewById(R.id.wait);
106 | findViewById(R.id.close).setOnClickListener(this);
107 | findViewById(R.id.inversion).setOnClickListener(this);
108 |
109 | mStartVideo.setOnClickListener(this);
110 | mStartVideoIng.setOnClickListener(this);
111 |
112 | //录制之前删除所有的多余文件
113 | mFileUtils = new FileUtils(this);
114 | mFileUtils.deleteFile(mFileUtils.getMediaVideoPath(),null);
115 | mFileUtils.deleteFile(mFileUtils.getStorageDirectory(),null);
116 |
117 |
118 | mMediaHelper = new MediaHelper(this);
119 | mMediaHelper.setTargetDir(new File(mFileUtils.getMediaVideoPath()));
120 | //视频段从编号1开始
121 | mMediaHelper.setTargetName(mVideoNumber + ".mp4");
122 | mPermissionHelper = new PermissionHelper(this);
123 | }
124 |
125 | @Override
126 | public void onClick(View v) {
127 | switch (v.getId()){
128 | case R.id.close:
129 | mMediaHelper.stopRecordUnSave();
130 | finish();
131 | break;
132 | case R.id.start_video:
133 | mProgressNumber = 0;
134 | mProgress.setProgress(0);
135 | mMediaHelper.record();
136 | startView();
137 | break;
138 | case R.id.start_video_ing:
139 | if(mProgressNumber == 0){
140 | stopView(false);
141 | break;
142 | }
143 | Log.e("SLog","mProgressNumber:"+mProgressNumber);
144 | if (mProgressNumber < 8) {
145 | //时间太短不保存
146 | Toast.makeText(this,"请至少录制到红线位置",Toast.LENGTH_LONG).show();
147 | mMediaHelper.stopRecordUnSave();
148 | stopView(false);
149 | break;
150 | }
151 | //停止录制
152 | mMediaHelper.stopRecordSave();
153 | stopView(true);
154 | break;
155 | case R.id.inversion:
156 | if(mMediaHelper.isRecording()){
157 | mMediaHelper.stopRecordSave();
158 | addMp4Video();
159 | mVideoNumber++;
160 | mMediaHelper.setTargetName(mVideoNumber+".mp4");
161 | mMediaHelper.autoChangeCamera();
162 | mMediaHelper.record();
163 | }else{
164 | mMediaHelper.autoChangeCamera();
165 | }
166 | break;
167 | }
168 | }
169 |
170 | /**
171 | * 记录这个视频片段并且开始处理。
172 | */
173 | private void addMp4Video(){
174 | Mp4TsVideo mp4TsVideo = new Mp4TsVideo();
175 | mp4TsVideo.setMp4Path(mMediaHelper.getTargetFilePath());
176 | mp4TsVideo.setTsPath(mFileUtils.getMediaVideoPath()+"/"+mVideoNumber+".ts");
177 | mp4TsVideo.setFlip(mMediaHelper.getPosition()== Camera.CameraInfo.CAMERA_FACING_FRONT);
178 | mTsVideo.add(mp4TsVideo);
179 | mp4ToTs();
180 | }
181 |
182 | /**
183 | * 如果发现是多个视频就异步开始合成,节省等待时间。
184 | * 通过递归的模式来处理视频合成。
185 | */
186 | private void mp4ToTs(){
187 | if(isCancel){
188 | return;
189 | }
190 | if(mTsVideo.size()==0){
191 | if(mTsPath.size()>0 && !mMediaHelper.isRecording()){
192 | showProgressLoading();
193 | concatVideo(mTsPath);
194 | }
195 | return;
196 | }
197 | final Mp4TsVideo mp4TsVideo = mTsVideo.get(0);
198 | Mp4TsVideo mp4TsVideoIng = (Mp4TsVideo) mStartVideo.getTag();
199 | if(mp4TsVideo == mp4TsVideoIng){
200 | return;
201 | }
202 | mStartVideo.setTag(mp4TsVideo);
203 | FFmpegRun.execute(FFmpegCommands.mp4ToTs(mp4TsVideo.getMp4Path(), mp4TsVideo.getTsPath(),mp4TsVideo.isFlip()), new FFmpegRun.FFmpegRunListener() {
204 | @Override
205 | public void onStart() {
206 |
207 | }
208 |
209 | @Override
210 | public void onEnd(int result) {
211 | if(mTsVideo.size() == 0 || isCancel){
212 | return;
213 | }
214 | mTsPath.add(mp4TsVideo.getTsPath());
215 | mTsVideo.remove(mp4TsVideo);
216 | mp4ToTs();
217 | }
218 |
219 | });
220 | }
221 |
222 | private void startView(){
223 | mStartVideo.setVisibility(View.GONE);
224 | mStartVideoIng.setVisibility(View.VISIBLE);
225 | mProgressNumber = 0;
226 | mTime.setText("00:00");
227 | handler.removeMessages(0);
228 | handler.sendMessage(handler.obtainMessage(0));
229 | }
230 |
231 | /**
232 | * 停止录制
233 | * @param isSave
234 | */
235 | private void stopView(boolean isSave){
236 | int timer = mProgressNumber;
237 | mProgressNumber = 0;
238 | mProgress.setProgress(0);
239 | handler.removeMessages(0);
240 | mTime.setText("00:00");
241 | mTime.setTag(timer);
242 | if(isSave) {
243 | String videoPath = mFileUtils.getMediaVideoPath();
244 | final File file = new File(videoPath);
245 | if(!file.exists()){
246 | Toast.makeText(this,"文件已损坏或者被删除,请重试!",Toast.LENGTH_SHORT).show();
247 | return;
248 | }
249 | File[] files = file.listFiles();
250 | if(files.length==1){
251 | startMediaVideo(mMediaHelper.getTargetFilePath());
252 | }else{
253 | showProgressLoading();
254 | addMp4Video();
255 | }
256 | }else{
257 | mFileUtils.deleteFile(mFileUtils.getStorageDirectory(),null);
258 | mFileUtils.deleteFile(mFileUtils.getMediaVideoPath(),null);
259 | mVideoNumber=1;
260 | isCancel = true;
261 | }
262 | mStartVideoIng.setVisibility(View.GONE);
263 | mStartVideo.setVisibility(View.VISIBLE);
264 | }
265 |
266 |
267 | /**
268 | * ts合成视频
269 | * @param filePaths
270 | */
271 | private void concatVideo(List filePaths){
272 | StringBuilder ts = new StringBuilder();
273 | for (String s:filePaths) {
274 | ts.append(s).append("|");
275 | }
276 | String tsVideo = ts.substring(0,ts.length()-1);
277 | final String videoPath = mFileUtils.getStorageDirectory()+"/video_ts.mp4";
278 | FFmpegRun.execute(FFmpegCommands.concatTsVideo(tsVideo, videoPath), new FFmpegRun.FFmpegRunListener() {
279 | @Override
280 | public void onStart() {
281 | Log.e("SLog","concatTsVideo start...");
282 | }
283 |
284 | @Override
285 | public void onEnd(int result) {
286 | Log.e("SLog","concatTsVideo end...");
287 | dismissProgress();
288 | startMediaVideo(videoPath);
289 | }
290 | });
291 | }
292 |
293 | /**
294 | * 进入下一步制作页面
295 | * @param path
296 | */
297 | private void startMediaVideo(String path){
298 | int timer = (int) mTime.getTag();
299 | Log.d("SLog","video path:"+path);
300 | Intent intent = new Intent(this,MakeVideoActivity.class);
301 | intent.putExtra("path",path);
302 | intent.putExtra("time",timer);
303 | startActivity(intent);
304 | }
305 |
306 | @Override
307 | protected void onResume() {
308 | super.onResume();
309 | if(mPermissionHelper.lacksPermissions(PermissionsActivity.PERMISSIONS)){
310 | PermissionsActivity.startActivityForResult(this,PermissionsActivity.REQUEST_CODE,PermissionsActivity.PERMISSIONS);
311 | }else{
312 | //启动相机
313 | mMediaHelper.setSurfaceView(mSurfaceView);
314 | }
315 | }
316 |
317 | @SuppressLint("HandlerLeak")
318 | Handler handler = new Handler() {
319 | @Override
320 | public void handleMessage(Message msg) {
321 | switch (msg.what) {
322 | case 0:
323 | mProgress.setProgress(mProgressNumber);
324 | mTime.setText("00:"+(mProgressNumber<10?"0"+mProgressNumber:mProgressNumber));
325 | if(mProgress.getProgress() >= mProgress.getMax()){
326 | mMediaHelper.stopRecordSave();
327 | stopView(true);
328 | }else if (mMediaHelper.isRecording()){
329 | mProgressNumber = mProgressNumber + 1;
330 | sendMessageDelayed(handler.obtainMessage(0), 1000);
331 | }
332 | break;
333 | }
334 | }
335 | };
336 |
337 | private void showProgressLoading(){
338 | mWait.setVisibility(View.VISIBLE);
339 | }
340 |
341 | private void dismissProgress(){
342 | mWait.setVisibility(View.GONE);
343 | }
344 |
345 | /**
346 | * 记录下每段视频
347 | */
348 | private class Mp4TsVideo{
349 | /**
350 | * 视频段的地址
351 | */
352 | private String mp4Path;
353 | /**
354 | * ts地址
355 | */
356 | private String tsPath;
357 | /**
358 | * 是否需要翻转
359 | */
360 | private boolean flip;
361 |
362 | public String getMp4Path() {
363 | return mp4Path;
364 | }
365 |
366 | public void setMp4Path(String mp4Path) {
367 | this.mp4Path = mp4Path;
368 | }
369 |
370 | public String getTsPath() {
371 | return tsPath;
372 | }
373 |
374 | public void setTsPath(String tsPath) {
375 | this.tsPath = tsPath;
376 | }
377 |
378 | public boolean isFlip() {
379 | return flip;
380 | }
381 |
382 | public void setFlip(boolean flip) {
383 | this.flip = flip;
384 | }
385 | }
386 | }
387 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/MusicActivity.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video;
2 |
3 | import android.content.Intent;
4 | import android.database.Cursor;
5 | import android.os.AsyncTask;
6 | import android.os.Bundle;
7 | import android.os.Environment;
8 | import android.provider.MediaStore;
9 | import android.support.annotation.Nullable;
10 | import android.support.v7.app.AppCompatActivity;
11 | import android.util.Log;
12 | import android.view.View;
13 | import android.widget.AdapterView;
14 | import android.widget.ListView;
15 |
16 | import com.tangyx.video.adapter.MusicAdapter;
17 | import com.tangyx.video.model.Music;
18 |
19 | import java.io.File;
20 | import java.util.ArrayList;
21 | import java.util.List;
22 |
23 | /**
24 | * Created by tangyx
25 | * Date 2017/8/4
26 | * email tangyx@live.com
27 | */
28 |
29 | public class MusicActivity extends AppCompatActivity {
30 | private ListView mListView;
31 | private MusicAdapter mAdapter;
32 |
33 | @Override
34 | protected void onCreate(@Nullable Bundle savedInstanceState) {
35 | super.onCreate(savedInstanceState);
36 | setContentView(R.layout.activity_music);
37 | mListView = (ListView) findViewById(R.id.list);
38 | findViewById(R.id.back).setOnClickListener(new View.OnClickListener() {
39 | @Override
40 | public void onClick(View view) {
41 | finish();
42 | }
43 | });
44 | new SongTask().execute();
45 | }
46 | private class SongTask extends AsyncTask> implements AdapterView.OnItemClickListener{
47 | @Override
48 | protected void onPreExecute() {
49 | super.onPreExecute();
50 | }
51 |
52 | @Override
53 | protected List doInBackground(Void... voids) {
54 | List musics = new ArrayList<>();
55 | Cursor cursor = getApplicationContext().getContentResolver().query(
56 | MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, null,
57 | MediaStore.Audio.Media.DATA + " like ?",
58 | new String[]{Environment.getExternalStorageDirectory() + File.separator + "%"},
59 | MediaStore.Audio.Media.DEFAULT_SORT_ORDER);
60 | if (cursor != null) {
61 | for (cursor.moveToFirst(); !cursor.isAfterLast(); cursor.moveToNext()) {
62 | String isMusic = cursor.getString(cursor.getColumnIndexOrThrow(MediaStore.Audio.Media.IS_MUSIC));
63 | if (isMusic != null && isMusic.equals("")) continue;
64 | int duration = cursor.getInt(cursor.getColumnIndexOrThrow(MediaStore.Audio.Media.DURATION));
65 | String path = cursor.getString(cursor.getColumnIndexOrThrow(MediaStore.Audio.Media.DATA));
66 | if (!path.endsWith(".mp3") || duration<60 * 1000) {
67 | continue;
68 | }
69 | Music music = new Music();
70 | String title = cursor.getString(cursor.getColumnIndexOrThrow(MediaStore.Audio.Media.TITLE));
71 | String artist = cursor.getString(cursor.getColumnIndexOrThrow(MediaStore.Audio.Media.ARTIST));
72 | music.setId(cursor.getString(cursor.getColumnIndexOrThrow(MediaStore.Audio.Media._ID)));
73 | music.setName(title);
74 | music.setSingerName(artist);
75 | music.setSongUrl(path);
76 | musics.add(music);
77 | }
78 | cursor.close();
79 | }
80 | return musics;
81 | }
82 |
83 | @Override
84 | protected void onPostExecute(List musics) {
85 | super.onPostExecute(musics);
86 | mAdapter = new MusicAdapter(MusicActivity.this,musics);
87 | mListView.setAdapter(mAdapter);
88 | mListView.setOnItemClickListener(this);
89 | }
90 |
91 | @Override
92 | public void onItemClick(AdapterView> adapterView, View view, int i, long l) {
93 | Music music = mAdapter.getItem(i);
94 | Intent intent = new Intent();
95 | intent.putExtra("music",music.getSongUrl());
96 | setResult(10000,intent);
97 | finish();
98 | }
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/PermissionsActivity.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video;
2 |
3 | import android.Manifest;
4 | import android.app.Activity;
5 | import android.content.DialogInterface;
6 | import android.content.Intent;
7 | import android.content.pm.PackageManager;
8 | import android.net.Uri;
9 | import android.os.Bundle;
10 | import android.provider.Settings;
11 | import android.support.annotation.NonNull;
12 | import android.support.annotation.Nullable;
13 | import android.support.v4.app.ActivityCompat;
14 | import android.support.v7.app.AlertDialog;
15 | import android.support.v7.app.AppCompatActivity;
16 |
17 | import com.tangyx.video.helper.PermissionHelper;
18 |
19 | /**
20 | * 权限获取页面
21 | *
22 | */
23 | public class PermissionsActivity extends AppCompatActivity {
24 | //基本权限必须有
25 | public final static String[] PERMISSIONS = new String[]{
26 | Manifest.permission.WRITE_EXTERNAL_STORAGE,
27 | Manifest.permission.RECORD_AUDIO,
28 | Manifest.permission.CAMERA
29 | };
30 | public static final int PERMISSIONS_GRANTED = 1010; // 权限授权
31 | public static final int PERMISSIONS_DENIED = 1011; // 权限拒绝
32 |
33 | public static final int REQUEST_CODE = 1012; // 请求码
34 |
35 | private static final int PERMISSION_REQUEST_CODE = 0; // 系统权限管理页面的参数
36 | private static final String EXTRA_PERMISSIONS =
37 | "megawave.permission.extra_permission"; // 权限参数
38 | private static final String PACKAGE_URL_SCHEME = "package:"; // 方案
39 |
40 | private PermissionHelper mChecker; // 权限检测器
41 | private boolean isRequireCheck; // 是否需要系统权限检测, 防止和系统提示框重叠
42 | private static boolean isShowSetting=true;
43 |
44 | // 启动当前权限页面的公开接口
45 | public static void startActivityForResult(Activity activity, int requestCode, String... permissions) {
46 | startActivityForResult(activity,requestCode,true,permissions);
47 | }
48 |
49 | public static void startActivityForResult(Activity activity, int requestCode,boolean showSetting,String... permissions) {
50 | Intent intent = new Intent(activity, PermissionsActivity.class);
51 | intent.putExtra(EXTRA_PERMISSIONS, permissions);
52 | ActivityCompat.startActivityForResult(activity, intent, requestCode, null);
53 | isShowSetting = showSetting;
54 | }
55 |
56 | @Override
57 | protected void onCreate(@Nullable Bundle savedInstanceState) {
58 | super.onCreate(savedInstanceState);
59 | if (getIntent() == null || !getIntent().hasExtra(EXTRA_PERMISSIONS)) {
60 | throw new RuntimeException("PermissionsActivity需要使用静态startActivityForResult方法启动!");
61 | }
62 | setContentView(R.layout.activity_permissions);
63 | mChecker = new PermissionHelper(this);
64 | isRequireCheck = true;
65 | }
66 |
67 | @Override
68 | protected void onResume() {
69 | super.onResume();
70 | if (isRequireCheck) {
71 | String[] permissions = getPermissions();
72 | if (mChecker.lacksPermissions(permissions)) {
73 | requestPermissions(permissions); // 请求权限
74 | } else {
75 | allPermissionsGranted(); // 全部权限都已获取
76 | }
77 | } else {
78 | isRequireCheck = true;
79 | }
80 | }
81 |
82 | // 返回传递的权限参数
83 | private String[] getPermissions() {
84 | return getIntent().getStringArrayExtra(EXTRA_PERMISSIONS);
85 | }
86 |
87 | // 请求权限兼容低版本
88 | private void requestPermissions(String... permissions) {
89 | ActivityCompat.requestPermissions(this, permissions, PERMISSION_REQUEST_CODE);
90 | }
91 |
92 | // 全部权限均已获取
93 | private void allPermissionsGranted() {
94 | setResult(PERMISSIONS_GRANTED);
95 | finish();
96 | }
97 |
98 | /**
99 | * 用户权限处理,
100 | * 如果全部获取, 则直接过.
101 | * 如果权限缺失, 则提示Dialog.
102 | *
103 | * @param requestCode 请求码
104 | * @param permissions 权限
105 | * @param grantResults 结果
106 | */
107 | @Override
108 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
109 | if (requestCode == PERMISSION_REQUEST_CODE && hasAllPermissionsGranted(grantResults)) {
110 | isRequireCheck = true;
111 | allPermissionsGranted();
112 | } else {
113 | isRequireCheck = false;
114 | if(isShowSetting){
115 | showMissingPermissionDialog();
116 | }
117 | }
118 | }
119 |
120 | // 含有全部的权限
121 | private boolean hasAllPermissionsGranted(@NonNull int[] grantResults) {
122 | for (int grantResult : grantResults) {
123 | if (grantResult == PackageManager.PERMISSION_DENIED) {
124 | return false;
125 | }
126 | }
127 | return true;
128 | }
129 |
130 | // 显示缺失权限提示
131 | public void showMissingPermissionDialog() {
132 | AlertDialog.Builder builder = new AlertDialog.Builder(PermissionsActivity.this);
133 | builder.setTitle(R.string.label_help);
134 | builder.setMessage(R.string.tips_permissions);
135 |
136 | // 拒绝, 退出应用
137 | builder.setNegativeButton(R.string.label_quit, new DialogInterface.OnClickListener() {
138 | @Override public void onClick(DialogInterface dialog, int which) {
139 | setResult(-100);
140 | finish();
141 | }
142 | });
143 |
144 | builder.setPositiveButton(R.string.label_setting, new DialogInterface.OnClickListener() {
145 | @Override public void onClick(DialogInterface dialog, int which) {
146 | startAppSettings();
147 | }
148 | });
149 |
150 | builder.setCancelable(false);
151 |
152 | builder.show();
153 | }
154 |
155 | // 启动应用的设置
156 | private void startAppSettings() {
157 | Intent intent = new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
158 | intent.setData(Uri.parse(PACKAGE_URL_SCHEME + getPackageName()));
159 | startActivity(intent);
160 | }
161 | }
162 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/SelectRecordActivity.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video;
2 |
3 | import android.content.Intent;
4 | import android.os.Bundle;
5 | import android.support.annotation.Nullable;
6 | import android.support.v7.app.AppCompatActivity;
7 | import android.view.View;
8 |
9 | /**
10 | * Created by tangyx
11 | * Date 2017/12/4
12 | * email tangyx@live.com
13 | */
14 |
15 | public class SelectRecordActivity extends AppCompatActivity implements View.OnClickListener{
16 | @Override
17 | protected void onCreate(@Nullable Bundle savedInstanceState) {
18 | super.onCreate(savedInstanceState);
19 |
20 | setContentView(R.layout.activity_select_record);
21 | findViewById(R.id.single_record).setOnClickListener(this);
22 | findViewById(R.id.multi_record).setOnClickListener(this);
23 | }
24 |
25 | @Override
26 | public void onClick(View view) {
27 | switch (view.getId()){
28 | case R.id.single_record:
29 | startActivity(new Intent(this,MainActivity.class));
30 | break;
31 | case R.id.multi_record:
32 | startActivity(new Intent(this,MultiRecordActivity.class));
33 | break;
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/adapter/MusicAdapter.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video.adapter;
2 |
3 | import android.content.Context;
4 | import android.view.LayoutInflater;
5 | import android.view.View;
6 | import android.view.ViewGroup;
7 | import android.widget.BaseAdapter;
8 | import android.widget.TextView;
9 |
10 | import com.tangyx.video.R;
11 | import com.tangyx.video.model.Music;
12 |
13 | import java.util.List;
14 |
15 | /**
16 | * Created by tangyx
17 | * Date 2017/8/4
18 | * email tangyx@live.com
19 | */
20 |
21 | public class MusicAdapter extends BaseAdapter {
22 |
23 | private Context context;
24 | private List musics;
25 |
26 | public MusicAdapter(Context context, List musics) {
27 | this.context = context;
28 | this.musics = musics;
29 | }
30 |
31 | @Override
32 | public int getCount() {
33 | return musics.size();
34 | }
35 |
36 | @Override
37 | public Music getItem(int i) {
38 | return musics.get(i);
39 | }
40 |
41 | @Override
42 | public long getItemId(int i) {
43 | return i;
44 | }
45 |
46 | @Override
47 | public View getView(int i, View view, ViewGroup viewGroup) {
48 | final MusicHolder holder;
49 | if(view == null){
50 | holder = new MusicHolder();
51 | view = LayoutInflater.from(context).inflate(R.layout.adapter_music,null);
52 | holder.mName = (TextView) view.findViewById(R.id.name);
53 | holder.mSingerName = (TextView) view.findViewById(R.id.singer_name);
54 | view.setTag(holder);
55 | }else{
56 | holder = (MusicHolder) view.getTag();
57 | }
58 | Music m = getItem(i);
59 | holder.mName.setText(m.getName());
60 | holder.mSingerName.setText(m.getSingerName());
61 | return view;
62 | }
63 | private class MusicHolder{
64 | TextView mName;
65 | TextView mSingerName;
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/ffmpeg/FFmpegCommands.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video.ffmpeg;
2 |
3 | import android.util.Log;
4 |
5 | import java.util.ArrayList;
6 |
7 | /**
8 | * Created by tangyx
9 | * Date 2017/8/4
10 | * email tangyx@live.com
11 | */
12 |
13 | public class FFmpegCommands {
14 |
15 | /**
16 | * 提取单独的音频
17 | *
18 | * @param videoUrl
19 | * @param outUrl
20 | * @return
21 | */
22 | public static String[] extractAudio(String videoUrl, String outUrl) {
23 | String[] commands = new String[8];
24 | commands[0] = "ffmpeg";
25 | commands[1] = "-i";
26 | commands[2] = videoUrl;
27 | commands[3] = "-acodec";
28 | commands[4] = "copy";
29 | commands[5] = "-vn";
30 | commands[6] = "-y";
31 | commands[7] = outUrl;
32 | return commands;
33 | }
34 | /**
35 | * 提取单独的视频,没有声音
36 | *
37 | * @param videoUrl
38 | * @param outUrl
39 | * @return
40 | */
41 | public static String[] extractVideo(String videoUrl, String outUrl) {
42 | String[] commands = new String[8];
43 | commands[0] = "ffmpeg";
44 | commands[1] = "-i";
45 | commands[2] = videoUrl;
46 | commands[3] = "-vcodec";
47 | commands[4] = "copy";
48 | commands[5] = "-an";
49 | commands[6] = "-y";
50 | commands[7] = outUrl;
51 | return commands;
52 | }
53 | /**
54 | * 裁剪音频
55 | */
56 | public static String[] cutIntoMusic(String musicUrl, long second, String outUrl) {
57 | Log.e("SLog",musicUrl+"---"+second+"---"+outUrl);
58 | String[] commands = new String[10];
59 | commands[0] = "ffmpeg";
60 | commands[1] = "-i";
61 | commands[2] = musicUrl;
62 | commands[3] = "-ss";
63 | commands[4] = "00:00:10";
64 | commands[5] = "-t";
65 | commands[6] = String.valueOf(second);
66 | commands[7] = "-acodec";
67 | commands[8] = "copy";
68 | commands[9] = outUrl;
69 | return commands;
70 | }
71 | /**
72 | * @param audio1
73 | * @param audio2
74 | * @param outputUrl
75 | * @return
76 | */
77 | public static String[] composeAudio(String audio1, String audio2, String outputUrl) {
78 | Log.w("SLog","audio1:" + audio1 + "\naudio2:" + audio2 + "\noutputUrl:" + outputUrl);
79 | String[] commands = new String[10];
80 | commands[0] = "ffmpeg";
81 | //输入
82 | commands[1] = "-i";
83 | commands[2] = audio1;
84 | //音乐
85 | commands[3] = "-i";
86 | commands[4] = audio2;
87 | //覆盖输出
88 | commands[5] = "-filter_complex";
89 | commands[6] = "amix=inputs=2:duration=first:dropout_transition=2";
90 | commands[7] = "-strict";
91 | commands[8] = "-2";
92 | //输出文件
93 | commands[9] = outputUrl;
94 | return commands;
95 | }
96 |
97 | /**
98 | * 修改音频文件的音量
99 | * @param audioOrMusicUrl
100 | * @param vol
101 | * @param outUrl
102 | * @return
103 | */
104 | public static String[] changeAudioOrMusicVol(String audioOrMusicUrl, int vol, String outUrl) {
105 | Log.w("SLog","audioOrMusicUrl:" + audioOrMusicUrl + "\nvol:" + vol + "\noutUrl:" + outUrl);
106 | String[] commands = new String[8];
107 | commands[0] = "ffmpeg";
108 | commands[1] = "-i";
109 | commands[2] = audioOrMusicUrl;
110 | commands[3] = "-vol";
111 | commands[4] = String.valueOf(vol);
112 | commands[5] = "-acodec";
113 | commands[6] = "copy";
114 | commands[7] = outUrl;
115 | return commands;
116 | }
117 |
118 | /**
119 | * 音频,视频合成
120 | * @param videoUrl
121 | * @param musicOrAudio
122 | * @param outputUrl
123 | * @param second
124 | * @return
125 | */
126 | public static String[] composeVideo(String videoUrl, String musicOrAudio, String outputUrl, long second) {
127 | Log.w("SLog","videoUrl:" + videoUrl + "\nmusicOrAudio:" + musicOrAudio + "\noutputUrl:" + outputUrl + "\nsecond:" + second);
128 | String[] commands = new String[14];
129 | commands[0] = "ffmpeg";
130 | //输入
131 | commands[1] = "-i";
132 | commands[2] = videoUrl;
133 | //音乐
134 | commands[3] = "-i";
135 | commands[4] = musicOrAudio;
136 | commands[5] = "-ss";
137 | commands[6] = "00:00:00";
138 | commands[7] = "-t";
139 | commands[8] = String.valueOf(second);
140 | //覆盖输出
141 | commands[9] = "-vcodec";
142 | commands[10] = "copy";
143 | commands[11] = "-acodec";
144 | commands[12] = "copy";
145 | //输出文件
146 | commands[13] = outputUrl;
147 | return commands;
148 | }
149 |
150 | /**
151 | * mp4转ts
152 | * @param videoUrl
153 | * @param outPath
154 | * @param flip
155 | * @return
156 | */
157 | public static String[] mp4ToTs(String videoUrl,String outPath,boolean flip){
158 | Log.w("SLog","videoUrl:" + videoUrl + "\noutPath:" + outPath);
159 | ArrayList _commands = new ArrayList<>();
160 | _commands.add("ffmpeg");
161 | _commands.add("-i");
162 | _commands.add(videoUrl);
163 | if(flip){
164 | _commands.add("-vf");
165 | //hflip左右翻转,vflip上下翻转
166 | _commands.add("hflip");
167 | }
168 | _commands.add("-b");
169 | _commands.add(String.valueOf(2 * 1024 * 1024));
170 | _commands.add("-s");
171 | _commands.add("720x1280");
172 | _commands.add("-acodec");
173 | _commands.add("copy");
174 | // _commands.add("-vcodec");
175 | // _commands.add("copy");
176 | _commands.add(outPath);
177 | String[] commands = new String[_commands.size()];
178 | for (int i = 0; i < _commands.size(); i++) {
179 | commands[i] = _commands.get(i);
180 | }
181 | return commands;
182 | }
183 | /**
184 | * ts拼接视频
185 | */
186 | public static String[] concatTsVideo(String _filePath, String _outPath) {//-f concat -i list.txt -c copy concat.mp4
187 | Log.w("SLog","_filePath:" + _filePath + "\n_outPath:" + _outPath);
188 | ArrayList _commands = new ArrayList<>();
189 | _commands.add("ffmpeg");
190 | _commands.add("-i");
191 | _commands.add("concat:"+_filePath);
192 | _commands.add("-b");
193 | _commands.add(String.valueOf(2 * 1024 * 1024));
194 | _commands.add("-s");
195 | _commands.add("720x1280");
196 | _commands.add("-acodec");
197 | _commands.add("copy");
198 | _commands.add("-vcodec");
199 | _commands.add("copy");
200 | _commands.add(_outPath);
201 | String[] commands = new String[_commands.size()];
202 | for (int i = 0; i < _commands.size(); i++) {
203 | commands[i] = _commands.get(i);
204 | }
205 | return commands;
206 | }
207 | }
208 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/ffmpeg/FFmpegRun.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video.ffmpeg;
2 |
3 | import android.os.AsyncTask;
4 |
5 | /**
6 | * Created by tangyx
7 | * Date 2017/8/1
8 | * email tangyx@live.com
9 | */
10 |
11 | public class FFmpegRun {
12 | static {
13 | System.loadLibrary("ffmpeg");
14 | System.loadLibrary("ffmpeginvoke");
15 | }
16 |
17 | public static void execute(String[] commands, final FFmpegRunListener fFmpegRunListener) {
18 | new AsyncTask() {
19 | @Override
20 | protected void onPreExecute() {
21 | if (fFmpegRunListener != null) {
22 | fFmpegRunListener.onStart();
23 | }
24 | }
25 |
26 | @Override
27 | protected Integer doInBackground(String[]... params) {
28 | return run(params[0]);
29 | }
30 |
31 | @Override
32 | protected void onPostExecute(Integer integer) {
33 | if (fFmpegRunListener != null) {
34 | fFmpegRunListener.onEnd(integer);
35 | }
36 | }
37 | }.execute(commands);
38 | }
39 |
40 | public native static int run(String[] commands);
41 |
42 | public interface FFmpegRunListener{
43 | void onStart();
44 | void onEnd(int result);
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/helper/MediaHelper.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video.helper;
2 |
3 | import android.app.Activity;
4 | import android.hardware.Camera;
5 | import android.media.MediaRecorder;
6 | import android.view.GestureDetector;
7 | import android.view.MotionEvent;
8 | import android.view.SurfaceHolder;
9 | import android.view.SurfaceView;
10 | import android.view.View;
11 |
12 | import java.io.File;
13 | import java.io.IOException;
14 | import java.util.List;
15 |
16 | /**
17 | * Created by tangyx on 2017/8/2.
18 | *
19 | */
20 |
21 | public class MediaHelper implements SurfaceHolder.Callback {
22 | private Activity activity;
23 | private MediaRecorder mMediaRecorder;
24 | private Camera mCamera;
25 | private SurfaceView mSurfaceView;
26 | private SurfaceHolder mSurfaceHolder;
27 | private File targetDir;
28 | private String targetName;
29 | private File targetFile;
30 | private boolean isRecording;
31 | private GestureDetector mDetector;
32 | private boolean isZoomIn = false;
33 | private int or = 90;
34 | private int position = Camera.CameraInfo.CAMERA_FACING_BACK;
35 |
36 | public MediaHelper(Activity activity) {
37 | this.activity = activity;
38 | }
39 |
40 | public void setTargetDir(File file) {
41 | this.targetDir = file;
42 | }
43 |
44 | public void setTargetName(String name) {
45 | this.targetName = name;
46 | }
47 |
48 | public String getTargetFilePath() {
49 | return targetFile.getPath();
50 | }
51 |
52 | public boolean deleteTargetFile() {
53 | if (targetFile.exists()) {
54 | return targetFile.delete();
55 | } else {
56 | return false;
57 | }
58 | }
59 |
60 | public void setSurfaceView(SurfaceView view) {
61 | this.mSurfaceView = view;
62 | mSurfaceHolder = mSurfaceView.getHolder();
63 | mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
64 | mSurfaceHolder.addCallback(this);
65 | mDetector = new GestureDetector(activity, new ZoomGestureListener());
66 | mSurfaceView.setOnTouchListener(new View.OnTouchListener() {
67 | @Override
68 | public boolean onTouch(View v, MotionEvent event) {
69 | mDetector.onTouchEvent(event);
70 | return true;
71 | }
72 | });
73 | }
74 |
75 | public boolean isRecording() {
76 | return isRecording;
77 | }
78 |
79 | public void record() {
80 | if (isRecording) {
81 | try {
82 | mMediaRecorder.stop(); // stop the recording
83 | } catch (RuntimeException e) {
84 | e.printStackTrace();
85 | targetFile.delete();
86 | }
87 | releaseMediaRecorder(); // release the MediaRecorder object
88 | mCamera.lock(); // take camera access back from MediaRecorder
89 | isRecording = false;
90 | } else {
91 | startRecordThread();
92 | }
93 | }
94 |
95 | private boolean prepareRecord() {
96 | try {
97 | mMediaRecorder = new MediaRecorder();
98 | mCamera.unlock();
99 | mMediaRecorder.setCamera(mCamera);
100 | mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
101 | mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
102 | // mMediaRecorder.setProfile(profile);
103 | mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
104 | mMediaRecorder.setVideoSize(1280, 720);
105 | // mMediaRecorder.setVideoSize(640, 480);
106 | mMediaRecorder.setVideoEncodingBitRate(2 * 1024 * 1024);
107 | mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
108 | mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
109 | if (position == Camera.CameraInfo.CAMERA_FACING_BACK) {
110 | mMediaRecorder.setOrientationHint(or);
111 | } else {
112 | mMediaRecorder.setOrientationHint(270);
113 | }
114 | targetFile = new File(targetDir, targetName);
115 | mMediaRecorder.setOutputFile(targetFile.getPath());
116 |
117 | } catch (Exception e) {
118 | e.printStackTrace();
119 | releaseMediaRecorder();
120 | return false;
121 | }
122 | try {
123 | mMediaRecorder.prepare();
124 | } catch (IllegalStateException e) {
125 | e.printStackTrace();
126 | releaseMediaRecorder();
127 | return false;
128 | } catch (IOException e) {
129 | e.printStackTrace();
130 | releaseMediaRecorder();
131 | return false;
132 | }
133 | return true;
134 | }
135 |
136 | public void stopRecordSave() {
137 | if (isRecording) {
138 | isRecording = false;
139 | try {
140 | mMediaRecorder.stop();
141 | } catch (RuntimeException r) {
142 | r.printStackTrace();
143 | } finally {
144 | releaseMediaRecorder();
145 | }
146 | }
147 | }
148 |
149 | public void stopRecordUnSave() {
150 | if (isRecording) {
151 | isRecording = false;
152 | try {
153 | mMediaRecorder.stop();
154 | } catch (RuntimeException r) {
155 | if (targetFile.exists()) {
156 | //不保存直接删掉
157 | targetFile.delete();
158 | }
159 | } finally {
160 | releaseMediaRecorder();
161 | }
162 | if (targetFile.exists()) {
163 | //不保存直接删掉
164 | targetFile.delete();
165 | }
166 | }
167 | }
168 |
169 | private void startPreView(SurfaceHolder holder) {
170 | if (mCamera == null) {
171 | mCamera = Camera.open(position);
172 | }
173 | if (mCamera != null) {
174 | mCamera.setDisplayOrientation(or);
175 | try {
176 | mCamera.setPreviewDisplay(holder);
177 | Camera.Parameters parameters = mCamera.getParameters();
178 | List mSupportedPreviewSizes = parameters.getSupportedPreviewSizes();
179 | if (mSupportedPreviewSizes != null) {
180 | int width = mSurfaceView.getWidth();
181 | int height = mSurfaceView.getHeight();
182 | Camera.Size mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes,
183 | Math.max(width, height), Math.min(width, height));
184 | parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
185 | }
186 | List focusModes = parameters.getSupportedFocusModes();
187 | if (focusModes != null) {
188 | for (String mode : focusModes) {
189 | if(mode.contains(Camera.Parameters.FOCUS_MODE_AUTO)){
190 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
191 | }
192 | }
193 | }
194 | mCamera.setParameters(parameters);
195 | mCamera.startPreview();
196 | } catch (IOException e) {
197 | e.printStackTrace();
198 | }
199 | }
200 | }
201 |
202 | public Camera.Size getOptimalPreviewSize(List sizes, int w, int h) {
203 | final double ASPECT_TOLERANCE = 0.1;
204 | double targetRatio = (double) w / h;
205 | if (sizes == null) {
206 | return null;
207 | }
208 | Camera.Size optimalSize = null;
209 | double minDiff = Double.MAX_VALUE;
210 | int targetHeight = h;
211 | for (Camera.Size size : sizes) {
212 | double ratio = (double) size.width / size.height;
213 | if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE)
214 | continue;
215 | if (Math.abs(size.height - targetHeight) < minDiff) {
216 | optimalSize = size;
217 | minDiff = Math.abs(size.height - targetHeight);
218 | }
219 | }
220 | if (optimalSize == null) {
221 | minDiff = Double.MAX_VALUE;
222 | for (Camera.Size size : sizes) {
223 | if (Math.abs(size.height - targetHeight) < minDiff) {
224 | optimalSize = size;
225 | minDiff = Math.abs(size.height - targetHeight);
226 | }
227 | }
228 | }
229 | return optimalSize;
230 | }
231 |
232 | private void releaseMediaRecorder() {
233 | if (mMediaRecorder != null) {
234 | mMediaRecorder.reset();
235 | mMediaRecorder.release();
236 | mMediaRecorder = null;
237 | }
238 | }
239 |
240 | public void releaseCamera() {
241 | if (mCamera != null) {
242 | mCamera.release();
243 | mCamera = null;
244 | }
245 | }
246 |
247 | @Override
248 | public void surfaceCreated(SurfaceHolder holder) {
249 | mSurfaceHolder = holder;
250 | startPreView(holder);
251 | }
252 |
253 | @Override
254 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
255 |
256 | }
257 |
258 | @Override
259 | public void surfaceDestroyed(SurfaceHolder holder) {
260 | if (mCamera != null) {
261 | releaseCamera();
262 | }
263 | if (mMediaRecorder != null) {
264 | releaseMediaRecorder();
265 | }
266 | }
267 |
268 | private void startRecordThread() {
269 | if (prepareRecord()) {
270 | try {
271 | mMediaRecorder.start();
272 | isRecording = true;
273 | } catch (RuntimeException r) {
274 | r.printStackTrace();
275 | releaseMediaRecorder();
276 | }
277 | }
278 | }
279 |
280 | private class ZoomGestureListener extends GestureDetector.SimpleOnGestureListener {
281 | //双击手势事件
282 | @Override
283 | public boolean onDoubleTap(MotionEvent e) {
284 | super.onDoubleTap(e);
285 | if (!isZoomIn) {
286 | setZoom(20);
287 | isZoomIn = true;
288 | } else {
289 | setZoom(0);
290 | isZoomIn = false;
291 | }
292 | return true;
293 | }
294 | }
295 |
296 | private void setZoom(int zoomValue) {
297 | if (mCamera != null) {
298 | Camera.Parameters parameters = mCamera.getParameters();
299 | if (parameters.isZoomSupported()) {
300 | int maxZoom = parameters.getMaxZoom();
301 | if (maxZoom == 0) {
302 | return;
303 | }
304 | if (zoomValue > maxZoom) {
305 | zoomValue = maxZoom;
306 | }
307 | parameters.setZoom(zoomValue);
308 | mCamera.setParameters(parameters);
309 | }
310 | }
311 | }
312 |
313 | public void autoChangeCamera() {
314 | if (position == Camera.CameraInfo.CAMERA_FACING_BACK) {
315 | position = Camera.CameraInfo.CAMERA_FACING_FRONT;
316 | } else {
317 | position = Camera.CameraInfo.CAMERA_FACING_BACK;
318 | }
319 | releaseCamera();
320 | stopRecordUnSave();
321 | startPreView(mSurfaceHolder);
322 | }
323 |
324 | public int getPosition() {
325 | return position;
326 | }
327 | }
328 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/helper/PermissionHelper.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video.helper;
2 |
3 | import android.content.Context;
4 | import android.content.pm.PackageManager;
5 | import android.support.v4.content.ContextCompat;
6 |
7 | /**
8 | * 检查权限的工具类
9 | *
10 | * Created by tangyx on 2017/8/2.
11 | */
12 | public class PermissionHelper {
13 | private final Context mContext;
14 |
15 | public PermissionHelper(Context context) {
16 | mContext = context.getApplicationContext();
17 | }
18 |
19 | // 判断权限集合
20 | public boolean lacksPermissions(String... permissions) {
21 | for (String permission : permissions) {
22 | if (lacksPermission(permission)) {
23 | return true;
24 | }
25 | }
26 | return false;
27 | }
28 |
29 | // 判断是否缺少权限
30 | private boolean lacksPermission(String permission) {
31 | return ContextCompat.checkSelfPermission(mContext, permission) ==
32 | PackageManager.PERMISSION_DENIED;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/model/Music.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video.model;
2 |
3 | /**
4 | * Created by tangyx
5 | * Date 2017/8/4
6 | * email tangyx@live.com
7 | */
8 |
9 | public class Music {
10 | private String id;
11 | private String name;
12 | private String singerName;
13 | private String songUrl;
14 |
15 | public String getId() {
16 | return id;
17 | }
18 |
19 | public void setId(String id) {
20 | this.id = id;
21 | }
22 |
23 | public String getName() {
24 | return name;
25 | }
26 |
27 | public void setName(String name) {
28 | this.name = name;
29 | }
30 |
31 | public String getSingerName() {
32 | return singerName;
33 | }
34 |
35 | public void setSingerName(String singerName) {
36 | this.singerName = singerName;
37 | }
38 |
39 | public String getSongUrl() {
40 | return songUrl;
41 | }
42 |
43 | public void setSongUrl(String songUrl) {
44 | this.songUrl = songUrl;
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/app/src/main/java/com/tangyx/video/utils/FileUtils.java:
--------------------------------------------------------------------------------
1 | package com.tangyx.video.utils;
2 |
3 | import android.content.Context;
4 | import android.os.Environment;
5 | import java.io.File;
6 |
7 | public class FileUtils {
8 | /**
9 | * sd卡的根目录
10 | */
11 | private static String mSdRootPath = Environment.getExternalStorageDirectory().getPath();
12 | /**
13 | * 手机的缓存根目录
14 | */
15 | private static String mDataRootPath = null;
16 | /**
17 | * 保存Image的目录名
18 | */
19 | private final static String FOLDER_NAME = "/ffmpeg";
20 |
21 | public final static String IMAGE_NAME = "/cache";
22 |
23 | public FileUtils(Context context){
24 | mDataRootPath = context.getCacheDir().getPath();
25 | makeAppDir();
26 | }
27 |
28 | public String makeAppDir(){
29 | String path = getStorageDirectory();
30 | File folderFile = new File(path);
31 | if(!folderFile.exists()){
32 | folderFile.mkdir();
33 | }
34 | path = path + IMAGE_NAME;
35 | folderFile = new File(path);
36 | if(!folderFile.exists()){
37 | folderFile.mkdir();
38 | }
39 | return path;
40 | }
41 |
42 | /**
43 | * 获取储存Image的目录
44 | * @return
45 | */
46 | public String getStorageDirectory(){
47 | String localPath = Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED) ?
48 | mSdRootPath + FOLDER_NAME : mDataRootPath + FOLDER_NAME;
49 | File folderFile = new File(localPath);
50 | if(!folderFile.exists()){
51 | folderFile.mkdir();
52 | }
53 | return localPath;
54 | }
55 |
56 | public String getMediaVideoPath(){
57 | String directory = getStorageDirectory();
58 | directory += "/video";
59 | File file = new File(directory);
60 | if(!file.exists()){
61 | file.mkdir();
62 | }
63 | return directory;
64 | }
65 |
66 | /**
67 | * 删除文件
68 | */
69 | public void deleteFile(String deletePath,String videoPath) {
70 | File file = new File(deletePath);
71 | if (file.exists()) {
72 | File[] files = file.listFiles();
73 | for (File f : files) {
74 | if(f.isDirectory()){
75 | if(f.listFiles().length==0){
76 | f.delete();
77 | }else{
78 | deleteFile(f.getAbsolutePath(),videoPath);
79 | }
80 | }else if(!f.getAbsolutePath().equals(videoPath)){
81 | f.delete();
82 | }
83 | }
84 | }
85 | }
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/app/src/main/jni/Android.mk:
--------------------------------------------------------------------------------
1 | LOCAL_PATH := $(call my-dir)
2 |
3 | include $(CLEAR_VARS)
4 |
5 | LOCAL_MODULE := ffmpeg
6 | LOCAL_SRC_FILES := libffmpeg.so
7 | include $(PREBUILT_SHARED_LIBRARY)
8 |
9 | include $(CLEAR_VARS)
10 | LOCAL_MODULE := ffmpeginvoke
11 | LOCAL_SRC_FILES := com_tangyx_video_ffmpeg_FFmpegRun.c ffmpeg.c ffmpeg_opt.c cmdutils.c ffmpeg_filter.c
12 | LOCAL_C_INCLUDES := /Users/tangyx/Documents/ffmpeg-3.3.2
13 | LOCAL_LDLIBS := -llog -lz -ldl
14 | LOCAL_SHARED_LIBRARIES := ffmpeg
15 |
16 | include $(BUILD_SHARED_LIBRARY)
--------------------------------------------------------------------------------
/app/src/main/jni/Application.mk:
--------------------------------------------------------------------------------
1 | APP_ABI := armeabi armeabi-v7a
2 | APP_BUILD_SCRIPT := Android.mk
3 | APP_PLATFORM := android-15
--------------------------------------------------------------------------------
/app/src/main/jni/cmdutils.h:
--------------------------------------------------------------------------------
1 | /*
2 | * Various utilities for command line tools
3 | * copyright (c) 2003 Fabrice Bellard
4 | *
5 | * This file is part of FFmpeg.
6 | *
7 | * FFmpeg is free software; you can redistribute it and/or
8 | * modify it under the terms of the GNU Lesser General Public
9 | * License as published by the Free Software Foundation; either
10 | * version 2.1 of the License, or (at your option) any later version.
11 | *
12 | * FFmpeg is distributed in the hope that it will be useful,
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 | * Lesser General Public License for more details.
16 | *
17 | * You should have received a copy of the GNU Lesser General Public
18 | * License along with FFmpeg; if not, write to the Free Software
19 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 | */
21 |
22 | #ifndef CMDUTILS_H
23 | #define CMDUTILS_H
24 |
25 | #include
26 |
27 | #include "config.h"
28 | #include "libavcodec/avcodec.h"
29 | #include "libavfilter/avfilter.h"
30 | #include "libavformat/avformat.h"
31 | #include "libswscale/swscale.h"
32 |
33 | #ifdef _WIN32
34 | #undef main /* We don't want SDL to override our main() */
35 | #endif
36 |
37 | /**
38 | * program name, defined by the program for show_version().
39 | */
40 | extern const char program_name[];
41 |
42 | /**
43 | * program birth year, defined by the program for show_banner()
44 | */
45 | extern const int program_birth_year;
46 |
47 | extern AVCodecContext *avcodec_opts[AVMEDIA_TYPE_NB];
48 | extern AVFormatContext *avformat_opts;
49 | extern AVDictionary *sws_dict;
50 | extern AVDictionary *swr_opts;
51 | extern AVDictionary *format_opts, *codec_opts, *resample_opts;
52 | extern int hide_banner;
53 |
54 | /**
55 | * Register a program-specific cleanup routine.
56 | */
57 | void register_exit(void (*cb)(int ret));
58 |
59 | /**
60 | * Wraps exit with a program-specific cleanup routine.
61 | */
62 | int exit_program(int ret);
63 |
64 | /**
65 | * Initialize the cmdutils option system, in particular
66 | * allocate the *_opts contexts.
67 | */
68 | void init_opts(void);
69 | /**
70 | * Uninitialize the cmdutils option system, in particular
71 | * free the *_opts contexts and their contents.
72 | */
73 | void uninit_opts(void);
74 |
75 | /**
76 | * Trivial log callback.
77 | * Only suitable for opt_help and similar since it lacks prefix handling.
78 | */
79 | void log_callback_help(void* ptr, int level, const char* fmt, va_list vl);
80 |
81 | /**
82 | * Override the cpuflags.
83 | */
84 | int opt_cpuflags(void *optctx, const char *opt, const char *arg);
85 |
86 | /**
87 | * Fallback for options that are not explicitly handled, these will be
88 | * parsed through AVOptions.
89 | */
90 | int opt_default(void *optctx, const char *opt, const char *arg);
91 |
92 | /**
93 | * Set the libav* libraries log level.
94 | */
95 | int opt_loglevel(void *optctx, const char *opt, const char *arg);
96 |
97 | int opt_report(const char *opt);
98 |
99 | int opt_max_alloc(void *optctx, const char *opt, const char *arg);
100 |
101 | int opt_codec_debug(void *optctx, const char *opt, const char *arg);
102 |
103 | #if CONFIG_OPENCL
104 | int opt_opencl(void *optctx, const char *opt, const char *arg);
105 |
106 | int opt_opencl_bench(void *optctx, const char *opt, const char *arg);
107 | #endif
108 |
109 | /**
110 | * Limit the execution time.
111 | */
112 | int opt_timelimit(void *optctx, const char *opt, const char *arg);
113 |
114 | /**
115 | * Parse a string and return its corresponding value as a double.
116 | * Exit from the application if the string cannot be correctly
117 | * parsed or the corresponding value is invalid.
118 | *
119 | * @param context the context of the value to be set (e.g. the
120 | * corresponding command line option name)
121 | * @param numstr the string to be parsed
122 | * @param type the type (OPT_INT64 or OPT_FLOAT) as which the
123 | * string should be parsed
124 | * @param min the minimum valid accepted value
125 | * @param max the maximum valid accepted value
126 | */
127 | double parse_number_or_die(const char *context, const char *numstr, int type,
128 | double min, double max);
129 |
130 | /**
131 | * Parse a string specifying a time and return its corresponding
132 | * value as a number of microseconds. Exit from the application if
133 | * the string cannot be correctly parsed.
134 | *
135 | * @param context the context of the value to be set (e.g. the
136 | * corresponding command line option name)
137 | * @param timestr the string to be parsed
138 | * @param is_duration a flag which tells how to interpret timestr, if
139 | * not zero timestr is interpreted as a duration, otherwise as a
140 | * date
141 | *
142 | * @see av_parse_time()
143 | */
144 | int64_t parse_time_or_die(const char *context, const char *timestr,
145 | int is_duration);
146 |
147 | typedef struct SpecifierOpt {
148 | char *specifier; /**< stream/chapter/program/... specifier */
149 | union {
150 | uint8_t *str;
151 | int i;
152 | int64_t i64;
153 | float f;
154 | double dbl;
155 | } u;
156 | } SpecifierOpt;
157 |
158 | typedef struct OptionDef {
159 | const char *name;
160 | int flags;
161 | #define HAS_ARG 0x0001
162 | #define OPT_BOOL 0x0002
163 | #define OPT_EXPERT 0x0004
164 | #define OPT_STRING 0x0008
165 | #define OPT_VIDEO 0x0010
166 | #define OPT_AUDIO 0x0020
167 | #define OPT_INT 0x0080
168 | #define OPT_FLOAT 0x0100
169 | #define OPT_SUBTITLE 0x0200
170 | #define OPT_INT64 0x0400
171 | #define OPT_EXIT 0x0800
172 | #define OPT_DATA 0x1000
173 | #define OPT_PERFILE 0x2000 /* the option is per-file (currently ffmpeg-only).
174 | implied by OPT_OFFSET or OPT_SPEC */
175 | #define OPT_OFFSET 0x4000 /* option is specified as an offset in a passed optctx */
176 | #define OPT_SPEC 0x8000 /* option is to be stored in an array of SpecifierOpt.
177 | Implies OPT_OFFSET. Next element after the offset is
178 | an int containing element count in the array. */
179 | #define OPT_TIME 0x10000
180 | #define OPT_DOUBLE 0x20000
181 | #define OPT_INPUT 0x40000
182 | #define OPT_OUTPUT 0x80000
183 | union {
184 | void *dst_ptr;
185 | int (*func_arg)(void *, const char *, const char *);
186 | size_t off;
187 | } u;
188 | const char *help;
189 | const char *argname;
190 | } OptionDef;
191 |
192 | /**
193 | * Print help for all options matching specified flags.
194 | *
195 | * @param options a list of options
196 | * @param msg title of this group. Only printed if at least one option matches.
197 | * @param req_flags print only options which have all those flags set.
198 | * @param rej_flags don't print options which have any of those flags set.
199 | * @param alt_flags print only options that have at least one of those flags set
200 | */
201 | void show_help_options(const OptionDef *options, const char *msg, int req_flags,
202 | int rej_flags, int alt_flags);
203 |
204 | /**
205 | * Show help for all options with given flags in class and all its
206 | * children.
207 | */
208 | void show_help_children(const AVClass *class, int flags);
209 |
210 | /**
211 | * Per-fftool specific help handler. Implemented in each
212 | * fftool, called by show_help().
213 | */
214 | void show_help_default(const char *opt, const char *arg);
215 |
216 | /**
217 | * Generic -h handler common to all fftools.
218 | */
219 | int show_help(void *optctx, const char *opt, const char *arg);
220 |
221 | /**
222 | * Parse the command line arguments.
223 | *
224 | * @param optctx an opaque options context
225 | * @param argc number of command line arguments
226 | * @param argv values of command line arguments
227 | * @param options Array with the definitions required to interpret every
228 | * option of the form: -option_name [argument]
229 | * @param parse_arg_function Name of the function called to process every
230 | * argument without a leading option name flag. NULL if such arguments do
231 | * not have to be processed.
232 | */
233 | void parse_options(void *optctx, int argc, char **argv, const OptionDef *options,
234 | void (* parse_arg_function)(void *optctx, const char*));
235 |
236 | /**
237 | * Parse one given option.
238 | *
239 | * @return on success 1 if arg was consumed, 0 otherwise; negative number on error
240 | */
241 | int parse_option(void *optctx, const char *opt, const char *arg,
242 | const OptionDef *options);
243 |
244 | /**
245 | * An option extracted from the commandline.
246 | * Cannot use AVDictionary because of options like -map which can be
247 | * used multiple times.
248 | */
249 | typedef struct Option {
250 | const OptionDef *opt;
251 | const char *key;
252 | const char *val;
253 | } Option;
254 |
255 | typedef struct OptionGroupDef {
256 | /**< group name */
257 | const char *name;
258 | /**
259 | * Option to be used as group separator. Can be NULL for groups which
260 | * are terminated by a non-option argument (e.g. ffmpeg output files)
261 | */
262 | const char *sep;
263 | /**
264 | * Option flags that must be set on each option that is
265 | * applied to this group
266 | */
267 | int flags;
268 | } OptionGroupDef;
269 |
270 | typedef struct OptionGroup {
271 | const OptionGroupDef *group_def;
272 | const char *arg;
273 |
274 | Option *opts;
275 | int nb_opts;
276 |
277 | AVDictionary *codec_opts;
278 | AVDictionary *format_opts;
279 | AVDictionary *resample_opts;
280 | AVDictionary *sws_dict;
281 | AVDictionary *swr_opts;
282 | } OptionGroup;
283 |
284 | /**
285 | * A list of option groups that all have the same group type
286 | * (e.g. input files or output files)
287 | */
288 | typedef struct OptionGroupList {
289 | const OptionGroupDef *group_def;
290 |
291 | OptionGroup *groups;
292 | int nb_groups;
293 | } OptionGroupList;
294 |
295 | typedef struct OptionParseContext {
296 | OptionGroup global_opts;
297 |
298 | OptionGroupList *groups;
299 | int nb_groups;
300 |
301 | /* parsing state */
302 | OptionGroup cur_group;
303 | } OptionParseContext;
304 |
305 | /**
306 | * Parse an options group and write results into optctx.
307 | *
308 | * @param optctx an app-specific options context. NULL for global options group
309 | */
310 | int parse_optgroup(void *optctx, OptionGroup *g);
311 |
312 | /**
313 | * Split the commandline into an intermediate form convenient for further
314 | * processing.
315 | *
316 | * The commandline is assumed to be composed of options which either belong to a
317 | * group (those with OPT_SPEC, OPT_OFFSET or OPT_PERFILE) or are global
318 | * (everything else).
319 | *
320 | * A group (defined by an OptionGroupDef struct) is a sequence of options
321 | * terminated by either a group separator option (e.g. -i) or a parameter that
322 | * is not an option (doesn't start with -). A group without a separator option
323 | * must always be first in the supplied groups list.
324 | *
325 | * All options within the same group are stored in one OptionGroup struct in an
326 | * OptionGroupList, all groups with the same group definition are stored in one
327 | * OptionGroupList in OptionParseContext.groups. The order of group lists is the
328 | * same as the order of group definitions.
329 | */
330 | int split_commandline(OptionParseContext *octx, int argc, char *argv[],
331 | const OptionDef *options,
332 | const OptionGroupDef *groups, int nb_groups);
333 |
334 | /**
335 | * Free all allocated memory in an OptionParseContext.
336 | */
337 | void uninit_parse_context(OptionParseContext *octx);
338 |
339 | /**
340 | * Find the '-loglevel' option in the command line args and apply it.
341 | */
342 | void parse_loglevel(int argc, char **argv, const OptionDef *options);
343 |
344 | /**
345 | * Return index of option opt in argv or 0 if not found.
346 | */
347 | int locate_option(int argc, char **argv, const OptionDef *options,
348 | const char *optname);
349 |
350 | /**
351 | * Check if the given stream matches a stream specifier.
352 | *
353 | * @param s Corresponding format context.
354 | * @param st Stream from s to be checked.
355 | * @param spec A stream specifier of the [v|a|s|d]:[\] form.
356 | *
357 | * @return 1 if the stream matches, 0 if it doesn't, <0 on error
358 | */
359 | int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec);
360 |
361 | /**
362 | * Filter out options for given codec.
363 | *
364 | * Create a new options dictionary containing only the options from
365 | * opts which apply to the codec with ID codec_id.
366 | *
367 | * @param opts dictionary to place options in
368 | * @param codec_id ID of the codec that should be filtered for
369 | * @param s Corresponding format context.
370 | * @param st A stream from s for which the options should be filtered.
371 | * @param codec The particular codec for which the options should be filtered.
372 | * If null, the default one is looked up according to the codec id.
373 | * @return a pointer to the created dictionary
374 | */
375 | AVDictionary *filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id,
376 | AVFormatContext *s, AVStream *st, AVCodec *codec);
377 |
378 | /**
379 | * Setup AVCodecContext options for avformat_find_stream_info().
380 | *
381 | * Create an array of dictionaries, one dictionary for each stream
382 | * contained in s.
383 | * Each dictionary will contain the options from codec_opts which can
384 | * be applied to the corresponding stream codec context.
385 | *
386 | * @return pointer to the created array of dictionaries, NULL if it
387 | * cannot be created
388 | */
389 | AVDictionary **setup_find_stream_info_opts(AVFormatContext *s,
390 | AVDictionary *codec_opts);
391 |
392 | /**
393 | * Print an error message to stderr, indicating filename and a human
394 | * readable description of the error code err.
395 | *
396 | * If strerror_r() is not available the use of this function in a
397 | * multithreaded application may be unsafe.
398 | *
399 | * @see av_strerror()
400 | */
401 | void print_error(const char *filename, int err);
402 |
403 | /**
404 | * Print the program banner to stderr. The banner contents depend on the
405 | * current version of the repository and of the libav* libraries used by
406 | * the program.
407 | */
408 | void show_banner(int argc, char **argv, const OptionDef *options);
409 |
410 | /**
411 | * Print the version of the program to stdout. The version message
412 | * depends on the current versions of the repository and of the libav*
413 | * libraries.
414 | * This option processing function does not utilize the arguments.
415 | */
416 | int show_version(void *optctx, const char *opt, const char *arg);
417 |
418 | /**
419 | * Print the build configuration of the program to stdout. The contents
420 | * depend on the definition of FFMPEG_CONFIGURATION.
421 | * This option processing function does not utilize the arguments.
422 | */
423 | int show_buildconf(void *optctx, const char *opt, const char *arg);
424 |
425 | /**
426 | * Print the license of the program to stdout. The license depends on
427 | * the license of the libraries compiled into the program.
428 | * This option processing function does not utilize the arguments.
429 | */
430 | int show_license(void *optctx, const char *opt, const char *arg);
431 |
432 | /**
433 | * Print a listing containing all the formats supported by the
434 | * program (including devices).
435 | * This option processing function does not utilize the arguments.
436 | */
437 | int show_formats(void *optctx, const char *opt, const char *arg);
438 |
439 | /**
440 | * Print a listing containing all the devices supported by the
441 | * program.
442 | * This option processing function does not utilize the arguments.
443 | */
444 | int show_devices(void *optctx, const char *opt, const char *arg);
445 |
446 | #if CONFIG_AVDEVICE
447 | /**
448 | * Print a listing containing audodetected sinks of the output device.
449 | * Device name with options may be passed as an argument to limit results.
450 | */
451 | int show_sinks(void *optctx, const char *opt, const char *arg);
452 |
453 | /**
454 | * Print a listing containing audodetected sources of the input device.
455 | * Device name with options may be passed as an argument to limit results.
456 | */
457 | int show_sources(void *optctx, const char *opt, const char *arg);
458 | #endif
459 |
460 | /**
461 | * Print a listing containing all the codecs supported by the
462 | * program.
463 | * This option processing function does not utilize the arguments.
464 | */
465 | int show_codecs(void *optctx, const char *opt, const char *arg);
466 |
467 | /**
468 | * Print a listing containing all the decoders supported by the
469 | * program.
470 | */
471 | int show_decoders(void *optctx, const char *opt, const char *arg);
472 |
473 | /**
474 | * Print a listing containing all the encoders supported by the
475 | * program.
476 | */
477 | int show_encoders(void *optctx, const char *opt, const char *arg);
478 |
479 | /**
480 | * Print a listing containing all the filters supported by the
481 | * program.
482 | * This option processing function does not utilize the arguments.
483 | */
484 | int show_filters(void *optctx, const char *opt, const char *arg);
485 |
486 | /**
487 | * Print a listing containing all the bit stream filters supported by the
488 | * program.
489 | * This option processing function does not utilize the arguments.
490 | */
491 | int show_bsfs(void *optctx, const char *opt, const char *arg);
492 |
493 | /**
494 | * Print a listing containing all the protocols supported by the
495 | * program.
496 | * This option processing function does not utilize the arguments.
497 | */
498 | int show_protocols(void *optctx, const char *opt, const char *arg);
499 |
500 | /**
501 | * Print a listing containing all the pixel formats supported by the
502 | * program.
503 | * This option processing function does not utilize the arguments.
504 | */
505 | int show_pix_fmts(void *optctx, const char *opt, const char *arg);
506 |
507 | /**
508 | * Print a listing containing all the standard channel layouts supported by
509 | * the program.
510 | * This option processing function does not utilize the arguments.
511 | */
512 | int show_layouts(void *optctx, const char *opt, const char *arg);
513 |
514 | /**
515 | * Print a listing containing all the sample formats supported by the
516 | * program.
517 | */
518 | int show_sample_fmts(void *optctx, const char *opt, const char *arg);
519 |
520 | /**
521 | * Print a listing containing all the color names and values recognized
522 | * by the program.
523 | */
524 | int show_colors(void *optctx, const char *opt, const char *arg);
525 |
526 | /**
527 | * Return a positive value if a line read from standard input
528 | * starts with [yY], otherwise return 0.
529 | */
530 | int read_yesno(void);
531 |
532 | /**
533 | * Get a file corresponding to a preset file.
534 | *
535 | * If is_path is non-zero, look for the file in the path preset_name.
536 | * Otherwise search for a file named arg.ffpreset in the directories
537 | * $FFMPEG_DATADIR (if set), $HOME/.ffmpeg, and in the datadir defined
538 | * at configuration time or in a "ffpresets" folder along the executable
539 | * on win32, in that order. If no such file is found and
540 | * codec_name is defined, then search for a file named
541 | * codec_name-preset_name.avpreset in the above-mentioned directories.
542 | *
543 | * @param filename buffer where the name of the found filename is written
544 | * @param filename_size size in bytes of the filename buffer
545 | * @param preset_name name of the preset to search
546 | * @param is_path tell if preset_name is a filename path
547 | * @param codec_name name of the codec for which to look for the
548 | * preset, may be NULL
549 | */
550 | FILE *get_preset_file(char *filename, size_t filename_size,
551 | const char *preset_name, int is_path, const char *codec_name);
552 |
553 | /**
554 | * Realloc array to hold new_size elements of elem_size.
555 | * Calls exit() on failure.
556 | *
557 | * @param array array to reallocate
558 | * @param elem_size size in bytes of each element
559 | * @param size new element count will be written here
560 | * @param new_size number of elements to place in reallocated array
561 | * @return reallocated array
562 | */
563 | void *grow_array(void *array, int elem_size, int *size, int new_size);
564 |
565 | #define media_type_string av_get_media_type_string
566 |
567 | #define GROW_ARRAY(array, nb_elems)\
568 | array = grow_array(array, sizeof(*array), &nb_elems, nb_elems + 1)
569 |
570 | #define GET_PIX_FMT_NAME(pix_fmt)\
571 | const char *name = av_get_pix_fmt_name(pix_fmt);
572 |
573 | #define GET_SAMPLE_FMT_NAME(sample_fmt)\
574 | const char *name = av_get_sample_fmt_name(sample_fmt)
575 |
576 | #define GET_SAMPLE_RATE_NAME(rate)\
577 | char name[16];\
578 | snprintf(name, sizeof(name), "%d", rate);
579 |
580 | #define GET_CH_LAYOUT_NAME(ch_layout)\
581 | char name[16];\
582 | snprintf(name, sizeof(name), "0x%"PRIx64, ch_layout);
583 |
584 | #define GET_CH_LAYOUT_DESC(ch_layout)\
585 | char name[128];\
586 | av_get_channel_layout_string(name, sizeof(name), 0, ch_layout);
587 |
588 | double get_rotation(AVStream *st);
589 |
590 | #endif /* CMDUTILS_H */
591 |
--------------------------------------------------------------------------------
/app/src/main/jni/cmdutils_common_opts.h:
--------------------------------------------------------------------------------
1 | { "L" , OPT_EXIT, {.func_arg = show_license}, "show license" },
2 | { "h" , OPT_EXIT, {.func_arg = show_help}, "show help", "topic" },
3 | { "?" , OPT_EXIT, {.func_arg = show_help}, "show help", "topic" },
4 | { "help" , OPT_EXIT, {.func_arg = show_help}, "show help", "topic" },
5 | { "-help" , OPT_EXIT, {.func_arg = show_help}, "show help", "topic" },
6 | { "version" , OPT_EXIT, {.func_arg = show_version}, "show version" },
7 | { "buildconf" , OPT_EXIT, {.func_arg = show_buildconf}, "show build configuration" },
8 | { "formats" , OPT_EXIT, {.func_arg = show_formats }, "show available formats" },
9 | { "devices" , OPT_EXIT, {.func_arg = show_devices }, "show available devices" },
10 | { "codecs" , OPT_EXIT, {.func_arg = show_codecs }, "show available codecs" },
11 | { "decoders" , OPT_EXIT, {.func_arg = show_decoders }, "show available decoders" },
12 | { "encoders" , OPT_EXIT, {.func_arg = show_encoders }, "show available encoders" },
13 | { "bsfs" , OPT_EXIT, {.func_arg = show_bsfs }, "show available bit stream filters" },
14 | { "protocols" , OPT_EXIT, {.func_arg = show_protocols}, "show available protocols" },
15 | { "filters" , OPT_EXIT, {.func_arg = show_filters }, "show available filters" },
16 | { "pix_fmts" , OPT_EXIT, {.func_arg = show_pix_fmts }, "show available pixel formats" },
17 | { "layouts" , OPT_EXIT, {.func_arg = show_layouts }, "show standard channel layouts" },
18 | { "sample_fmts", OPT_EXIT, {.func_arg = show_sample_fmts }, "show available audio sample formats" },
19 | { "colors" , OPT_EXIT, {.func_arg = show_colors }, "show available color names" },
20 | { "loglevel" , HAS_ARG, {.func_arg = opt_loglevel}, "set logging level", "loglevel" },
21 | { "v", HAS_ARG, {.func_arg = opt_loglevel}, "set logging level", "loglevel" },
22 | { "report" , 0, {(void*)opt_report}, "generate a report" },
23 | { "max_alloc" , HAS_ARG, {.func_arg = opt_max_alloc}, "set maximum size of a single allocated block", "bytes" },
24 | { "cpuflags" , HAS_ARG | OPT_EXPERT, { .func_arg = opt_cpuflags }, "force specific cpu flags", "flags" },
25 | { "hide_banner", OPT_BOOL | OPT_EXPERT, {&hide_banner}, "do not show program banner", "hide_banner" },
26 | #if CONFIG_OPENCL
27 | { "opencl_bench", OPT_EXIT, {.func_arg = opt_opencl_bench}, "run benchmark on all OpenCL devices and show results" },
28 | { "opencl_options", HAS_ARG, {.func_arg = opt_opencl}, "set OpenCL environment options" },
29 | #endif
30 | #if CONFIG_AVDEVICE
31 | { "sources" , OPT_EXIT | HAS_ARG, { .func_arg = show_sources },
32 | "list sources of the input device", "device" },
33 | { "sinks" , OPT_EXIT | HAS_ARG, { .func_arg = show_sinks },
34 | "list sinks of the output device", "device" },
35 | #endif
36 |
--------------------------------------------------------------------------------
/app/src/main/jni/com_tangyx_video_ffmpeg_FFmpegRun.c:
--------------------------------------------------------------------------------
1 | #include "com_tangyx_video_ffmpeg_FFmpegRun.h"
2 | #include "ffmpeg.h"
3 | #include
4 | /*
5 | * Class: com_example_ffmpeg_FFmpegKit
6 | * Method: run
7 | * Signature: (Ljava/lang/String;)I
8 | */
9 | JNIEXPORT jint JNICALL Java_com_tangyx_video_ffmpeg_FFmpegRun_run(JNIEnv *env,
10 | jclass obj, jobjectArray commands) {
11 |
12 | int argc = (*env)->GetArrayLength(env, commands);
13 | char *argv[argc];
14 |
15 | int i;
16 | for (i = 0; i < argc; i++) {
17 | jstring js = (jstring) (*env)->GetObjectArrayElement(env, commands, i);
18 | argv[i] = (char*) (*env)->GetStringUTFChars(env, js, 0);
19 | }
20 | return run(argc, argv);
21 | }
--------------------------------------------------------------------------------
/app/src/main/jni/com_tangyx_video_ffmpeg_FFmpegRun.h:
--------------------------------------------------------------------------------
1 | /* DO NOT EDIT THIS FILE - it is machine generated */
2 | #include
3 | /* Header for class com_tangyx_video_ffmpeg_FFmpegRun */
4 |
5 | #ifndef _Included_com_tangyx_video_ffmpeg_FFmpegRun
6 | #define _Included_com_tangyx_video_ffmpeg_FFmpegRun
7 | #ifdef __cplusplus
8 | extern "C" {
9 | #endif
10 | /*
11 | * Class: com_tangyx_video_ffmpeg_FFmpegRun
12 | * Method: run
13 | * Signature: ([Ljava/lang/String;)I
14 | */
15 | JNIEXPORT jint JNICALL Java_com_tangyx_video_ffmpeg_FFmpegRun_run
16 | (JNIEnv *, jclass, jobjectArray);
17 |
18 | #ifdef __cplusplus
19 | }
20 | #endif
21 | #endif
22 |
--------------------------------------------------------------------------------
/app/src/main/jni/ffmpeg.h:
--------------------------------------------------------------------------------
1 | /*
2 | * This file is part of FFmpeg.
3 | *
4 | * FFmpeg is free software; you can redistribute it and/or
5 | * modify it under the terms of the GNU Lesser General Public
6 | * License as published by the Free Software Foundation; either
7 | * version 2.1 of the License, or (at your option) any later version.
8 | *
9 | * FFmpeg is distributed in the hope that it will be useful,
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 | * Lesser General Public License for more details.
13 | *
14 | * You should have received a copy of the GNU Lesser General Public
15 | * License along with FFmpeg; if not, write to the Free Software
16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 | */
18 |
19 | #ifndef FFMPEG_H
20 | #define FFMPEG_H
21 |
22 | #include "config.h"
23 |
24 | #include
25 | #include
26 | #include
27 |
28 | #if HAVE_PTHREADS
29 | #include
30 | #endif
31 |
32 | #include "cmdutils.h"
33 |
34 | #include "libavformat/avformat.h"
35 | #include "libavformat/avio.h"
36 |
37 | #include "libavcodec/avcodec.h"
38 |
39 | #include "libavfilter/avfilter.h"
40 |
41 | #include "libavutil/avutil.h"
42 | #include "libavutil/dict.h"
43 | #include "libavutil/eval.h"
44 | #include "libavutil/fifo.h"
45 | #include "libavutil/pixfmt.h"
46 | #include "libavutil/rational.h"
47 | #include "libavutil/threadmessage.h"
48 |
49 | #include "libswresample/swresample.h"
50 |
51 | #define VSYNC_AUTO -1
52 | #define VSYNC_PASSTHROUGH 0
53 | #define VSYNC_CFR 1
54 | #define VSYNC_VFR 2
55 | #define VSYNC_VSCFR 0xfe
56 | #define VSYNC_DROP 0xff
57 |
58 | #define MAX_STREAMS 1024 /* arbitrary sanity check value */
59 |
60 | enum HWAccelID {
61 | HWACCEL_NONE = 0,
62 | HWACCEL_AUTO,
63 | HWACCEL_VDPAU,
64 | HWACCEL_DXVA2,
65 | HWACCEL_VDA,
66 | HWACCEL_VIDEOTOOLBOX,
67 | HWACCEL_QSV,
68 | };
69 |
70 | typedef struct HWAccel {
71 | const char *name;
72 | int (*init)(AVCodecContext *s);
73 | enum HWAccelID id;
74 | enum AVPixelFormat pix_fmt;
75 | } HWAccel;
76 |
77 | /* select an input stream for an output stream */
78 | typedef struct StreamMap {
79 | int disabled; /* 1 is this mapping is disabled by a negative map */
80 | int file_index;
81 | int stream_index;
82 | int sync_file_index;
83 | int sync_stream_index;
84 | char *linklabel; /* name of an output link, for mapping lavfi outputs */
85 | } StreamMap;
86 |
87 | typedef struct {
88 | int file_idx, stream_idx, channel_idx; // input
89 | int ofile_idx, ostream_idx; // output
90 | } AudioChannelMap;
91 |
92 | typedef struct OptionsContext {
93 | OptionGroup *g;
94 |
95 | /* input/output options */
96 | int64_t start_time;
97 | int64_t start_time_eof;
98 | int seek_timestamp;
99 | const char *format;
100 |
101 | SpecifierOpt *codec_names;
102 | int nb_codec_names;
103 | SpecifierOpt *audio_channels;
104 | int nb_audio_channels;
105 | SpecifierOpt *audio_sample_rate;
106 | int nb_audio_sample_rate;
107 | SpecifierOpt *frame_rates;
108 | int nb_frame_rates;
109 | SpecifierOpt *frame_sizes;
110 | int nb_frame_sizes;
111 | SpecifierOpt *frame_pix_fmts;
112 | int nb_frame_pix_fmts;
113 |
114 | /* input options */
115 | int64_t input_ts_offset;
116 | int loop;
117 | int rate_emu;
118 | int accurate_seek;
119 | int thread_queue_size;
120 |
121 | SpecifierOpt *ts_scale;
122 | int nb_ts_scale;
123 | SpecifierOpt *dump_attachment;
124 | int nb_dump_attachment;
125 | SpecifierOpt *hwaccels;
126 | int nb_hwaccels;
127 | SpecifierOpt *hwaccel_devices;
128 | int nb_hwaccel_devices;
129 | SpecifierOpt *autorotate;
130 | int nb_autorotate;
131 |
132 | /* output options */
133 | StreamMap *stream_maps;
134 | int nb_stream_maps;
135 | AudioChannelMap *audio_channel_maps; /* one info entry per -map_channel */
136 | int nb_audio_channel_maps; /* number of (valid) -map_channel settings */
137 | int metadata_global_manual;
138 | int metadata_streams_manual;
139 | int metadata_chapters_manual;
140 | const char **attachments;
141 | int nb_attachments;
142 |
143 | int chapters_input_file;
144 |
145 | int64_t recording_time;
146 | int64_t stop_time;
147 | uint64_t limit_filesize;
148 | float mux_preload;
149 | float mux_max_delay;
150 | int shortest;
151 |
152 | int video_disable;
153 | int audio_disable;
154 | int subtitle_disable;
155 | int data_disable;
156 |
157 | /* indexed by output file stream index */
158 | int *streamid_map;
159 | int nb_streamid_map;
160 |
161 | SpecifierOpt *metadata;
162 | int nb_metadata;
163 | SpecifierOpt *max_frames;
164 | int nb_max_frames;
165 | SpecifierOpt *bitstream_filters;
166 | int nb_bitstream_filters;
167 | SpecifierOpt *codec_tags;
168 | int nb_codec_tags;
169 | SpecifierOpt *sample_fmts;
170 | int nb_sample_fmts;
171 | SpecifierOpt *qscale;
172 | int nb_qscale;
173 | SpecifierOpt *forced_key_frames;
174 | int nb_forced_key_frames;
175 | SpecifierOpt *force_fps;
176 | int nb_force_fps;
177 | SpecifierOpt *frame_aspect_ratios;
178 | int nb_frame_aspect_ratios;
179 | SpecifierOpt *rc_overrides;
180 | int nb_rc_overrides;
181 | SpecifierOpt *intra_matrices;
182 | int nb_intra_matrices;
183 | SpecifierOpt *inter_matrices;
184 | int nb_inter_matrices;
185 | SpecifierOpt *chroma_intra_matrices;
186 | int nb_chroma_intra_matrices;
187 | SpecifierOpt *top_field_first;
188 | int nb_top_field_first;
189 | SpecifierOpt *metadata_map;
190 | int nb_metadata_map;
191 | SpecifierOpt *presets;
192 | int nb_presets;
193 | SpecifierOpt *copy_initial_nonkeyframes;
194 | int nb_copy_initial_nonkeyframes;
195 | SpecifierOpt *copy_prior_start;
196 | int nb_copy_prior_start;
197 | SpecifierOpt *filters;
198 | int nb_filters;
199 | SpecifierOpt *filter_scripts;
200 | int nb_filter_scripts;
201 | SpecifierOpt *reinit_filters;
202 | int nb_reinit_filters;
203 | SpecifierOpt *fix_sub_duration;
204 | int nb_fix_sub_duration;
205 | SpecifierOpt *canvas_sizes;
206 | int nb_canvas_sizes;
207 | SpecifierOpt *pass;
208 | int nb_pass;
209 | SpecifierOpt *passlogfiles;
210 | int nb_passlogfiles;
211 | SpecifierOpt *guess_layout_max;
212 | int nb_guess_layout_max;
213 | SpecifierOpt *apad;
214 | int nb_apad;
215 | SpecifierOpt *discard;
216 | int nb_discard;
217 | SpecifierOpt *disposition;
218 | int nb_disposition;
219 | SpecifierOpt *program;
220 | int nb_program;
221 | } OptionsContext;
222 |
223 | typedef struct InputFilter {
224 | AVFilterContext *filter;
225 | struct InputStream *ist;
226 | struct FilterGraph *graph;
227 | uint8_t *name;
228 | } InputFilter;
229 |
230 | typedef struct OutputFilter {
231 | AVFilterContext *filter;
232 | struct OutputStream *ost;
233 | struct FilterGraph *graph;
234 | uint8_t *name;
235 |
236 | /* temporary storage until stream maps are processed */
237 | AVFilterInOut *out_tmp;
238 | enum AVMediaType type;
239 | } OutputFilter;
240 |
241 | typedef struct FilterGraph {
242 | int index;
243 | const char *graph_desc;
244 |
245 | AVFilterGraph *graph;
246 | int reconfiguration;
247 |
248 | InputFilter **inputs;
249 | int nb_inputs;
250 | OutputFilter **outputs;
251 | int nb_outputs;
252 | } FilterGraph;
253 |
254 | typedef struct InputStream {
255 | int file_index;
256 | AVStream *st;
257 | int discard; /* true if stream data should be discarded */
258 | int user_set_discard;
259 | int decoding_needed; /* non zero if the packets must be decoded in 'raw_fifo', see DECODING_FOR_* */
260 | #define DECODING_FOR_OST 1
261 | #define DECODING_FOR_FILTER 2
262 |
263 | AVCodecContext *dec_ctx;
264 | AVCodec *dec;
265 | AVFrame *decoded_frame;
266 | AVFrame *filter_frame; /* a ref of decoded_frame, to be sent to filters */
267 |
268 | int64_t start; /* time when read started */
269 | /* predicted dts of the next packet read for this stream or (when there are
270 | * several frames in a packet) of the next frame in current packet (in AV_TIME_BASE units) */
271 | int64_t next_dts;
272 | int64_t dts; ///< dts of the last packet read for this stream (in AV_TIME_BASE units)
273 |
274 | int64_t next_pts; ///< synthetic pts for the next decode frame (in AV_TIME_BASE units)
275 | int64_t pts; ///< current pts of the decoded frame (in AV_TIME_BASE units)
276 | int wrap_correction_done;
277 |
278 | int64_t filter_in_rescale_delta_last;
279 |
280 | int64_t min_pts; /* pts with the smallest value in a current stream */
281 | int64_t max_pts; /* pts with the higher value in a current stream */
282 | int64_t nb_samples; /* number of samples in the last decoded audio frame before looping */
283 |
284 | double ts_scale;
285 | int saw_first_ts;
286 | int showed_multi_packet_warning;
287 | AVDictionary *decoder_opts;
288 | AVRational framerate; /* framerate forced with -r */
289 | int top_field_first;
290 | int guess_layout_max;
291 |
292 | int autorotate;
293 | int resample_height;
294 | int resample_width;
295 | int resample_pix_fmt;
296 |
297 | int resample_sample_fmt;
298 | int resample_sample_rate;
299 | int resample_channels;
300 | uint64_t resample_channel_layout;
301 |
302 | int fix_sub_duration;
303 | struct { /* previous decoded subtitle and related variables */
304 | int got_output;
305 | int ret;
306 | AVSubtitle subtitle;
307 | } prev_sub;
308 |
309 | struct sub2video {
310 | int64_t last_pts;
311 | int64_t end_pts;
312 | AVFrame *frame;
313 | int w, h;
314 | } sub2video;
315 |
316 | int dr1;
317 |
318 | /* decoded data from this stream goes into all those filters
319 | * currently video and audio only */
320 | InputFilter **filters;
321 | int nb_filters;
322 |
323 | int reinit_filters;
324 |
325 | /* hwaccel options */
326 | enum HWAccelID hwaccel_id;
327 | char *hwaccel_device;
328 |
329 | /* hwaccel context */
330 | enum HWAccelID active_hwaccel_id;
331 | void *hwaccel_ctx;
332 | void (*hwaccel_uninit)(AVCodecContext *s);
333 | int (*hwaccel_get_buffer)(AVCodecContext *s, AVFrame *frame, int flags);
334 | int (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame);
335 | enum AVPixelFormat hwaccel_pix_fmt;
336 | enum AVPixelFormat hwaccel_retrieved_pix_fmt;
337 |
338 | /* stats */
339 | // combined size of all the packets read
340 | uint64_t data_size;
341 | /* number of packets successfully read for this stream */
342 | uint64_t nb_packets;
343 | // number of frames/samples retrieved from the decoder
344 | uint64_t frames_decoded;
345 | uint64_t samples_decoded;
346 | } InputStream;
347 |
348 | typedef struct InputFile {
349 | AVFormatContext *ctx;
350 | int eof_reached; /* true if eof reached */
351 | int eagain; /* true if last read attempt returned EAGAIN */
352 | int ist_index; /* index of first stream in input_streams */
353 | int loop; /* set number of times input stream should be looped */
354 | int64_t duration; /* actual duration of the longest stream in a file
355 | at the moment when looping happens */
356 | AVRational time_base; /* time base of the duration */
357 | int64_t input_ts_offset;
358 |
359 | int64_t ts_offset;
360 | int64_t last_ts;
361 | int64_t start_time; /* user-specified start time in AV_TIME_BASE or AV_NOPTS_VALUE */
362 | int seek_timestamp;
363 | int64_t recording_time;
364 | int nb_streams; /* number of stream that ffmpeg is aware of; may be different
365 | from ctx.nb_streams if new streams appear during av_read_frame() */
366 | int nb_streams_warn; /* number of streams that the user was warned of */
367 | int rate_emu;
368 | int accurate_seek;
369 |
370 | #if HAVE_PTHREADS
371 | AVThreadMessageQueue *in_thread_queue;
372 | pthread_t thread; /* thread reading from this file */
373 | int non_blocking; /* reading packets from the thread should not block */
374 | int joined; /* the thread has been joined */
375 | int thread_queue_size; /* maximum number of queued packets */
376 | #endif
377 | } InputFile;
378 |
379 | enum forced_keyframes_const {
380 | FKF_N,
381 | FKF_N_FORCED,
382 | FKF_PREV_FORCED_N,
383 | FKF_PREV_FORCED_T,
384 | FKF_T,
385 | FKF_NB
386 | };
387 |
388 | #define ABORT_ON_FLAG_EMPTY_OUTPUT (1 << 0)
389 |
390 | extern const char *const forced_keyframes_const_names[];
391 |
392 | typedef enum {
393 | ENCODER_FINISHED = 1,
394 | MUXER_FINISHED = 2,
395 | } OSTFinished ;
396 |
397 | typedef struct OutputStream {
398 | int file_index; /* file index */
399 | int index; /* stream index in the output file */
400 | int source_index; /* InputStream index */
401 | AVStream *st; /* stream in the output file */
402 | int encoding_needed; /* true if encoding needed for this stream */
403 | int frame_number;
404 | /* input pts and corresponding output pts
405 | for A/V sync */
406 | struct InputStream *sync_ist; /* input stream to sync against */
407 | int64_t sync_opts; /* output frame counter, could be changed to some true timestamp */ // FIXME look at frame_number
408 | /* pts of the first frame encoded for this stream, used for limiting
409 | * recording time */
410 | int64_t first_pts;
411 | /* dts of the last packet sent to the muxer */
412 | int64_t last_mux_dts;
413 | AVBitStreamFilterContext *bitstream_filters;
414 | AVCodecContext *enc_ctx;
415 | AVCodec *enc;
416 | int64_t max_frames;
417 | AVFrame *filtered_frame;
418 | AVFrame *last_frame;
419 | int last_dropped;
420 | int last_nb0_frames[3];
421 |
422 | void *hwaccel_ctx;
423 |
424 | /* video only */
425 | AVRational frame_rate;
426 | int is_cfr;
427 | int force_fps;
428 | int top_field_first;
429 | int rotate_overridden;
430 |
431 | AVRational frame_aspect_ratio;
432 |
433 | /* forced key frames */
434 | int64_t *forced_kf_pts;
435 | int forced_kf_count;
436 | int forced_kf_index;
437 | char *forced_keyframes;
438 | AVExpr *forced_keyframes_pexpr;
439 | double forced_keyframes_expr_const_values[FKF_NB];
440 |
441 | /* audio only */
442 | int *audio_channels_map; /* list of the channels id to pick from the source stream */
443 | int audio_channels_mapped; /* number of channels in audio_channels_map */
444 |
445 | char *logfile_prefix;
446 | FILE *logfile;
447 |
448 | OutputFilter *filter;
449 | char *avfilter;
450 | char *filters; ///< filtergraph associated to the -filter option
451 | char *filters_script; ///< filtergraph script associated to the -filter_script option
452 |
453 | AVDictionary *encoder_opts;
454 | AVDictionary *sws_dict;
455 | AVDictionary *swr_opts;
456 | AVDictionary *resample_opts;
457 | char *apad;
458 | OSTFinished finished; /* no more packets should be written for this stream */
459 | int unavailable; /* true if the steram is unavailable (possibly temporarily) */
460 | int stream_copy;
461 | const char *attachment_filename;
462 | int copy_initial_nonkeyframes;
463 | int copy_prior_start;
464 | char *disposition;
465 |
466 | int keep_pix_fmt;
467 |
468 | AVCodecParserContext *parser;
469 |
470 | /* stats */
471 | // combined size of all the packets written
472 | uint64_t data_size;
473 | // number of packets send to the muxer
474 | uint64_t packets_written;
475 | // number of frames/samples sent to the encoder
476 | uint64_t frames_encoded;
477 | uint64_t samples_encoded;
478 |
479 | /* packet quality factor */
480 | int quality;
481 |
482 | /* packet picture type */
483 | int pict_type;
484 |
485 | /* frame encode sum of squared error values */
486 | int64_t error[4];
487 | } OutputStream;
488 |
489 | typedef struct OutputFile {
490 | AVFormatContext *ctx;
491 | AVDictionary *opts;
492 | int ost_index; /* index of the first stream in output_streams */
493 | int64_t recording_time; ///< desired length of the resulting file in microseconds == AV_TIME_BASE units
494 | int64_t start_time; ///< start time in microseconds == AV_TIME_BASE units
495 | uint64_t limit_filesize; /* filesize limit expressed in bytes */
496 |
497 | int shortest;
498 | } OutputFile;
499 |
500 | extern InputStream **input_streams;
501 | extern int nb_input_streams;
502 | extern InputFile **input_files;
503 | extern int nb_input_files;
504 |
505 | extern OutputStream **output_streams;
506 | extern int nb_output_streams;
507 | extern OutputFile **output_files;
508 | extern int nb_output_files;
509 |
510 | extern FilterGraph **filtergraphs;
511 | extern int nb_filtergraphs;
512 |
513 | extern char *vstats_filename;
514 | extern char *sdp_filename;
515 |
516 | extern float audio_drift_threshold;
517 | extern float dts_delta_threshold;
518 | extern float dts_error_threshold;
519 |
520 | extern int audio_volume;
521 | extern int audio_sync_method;
522 | extern int video_sync_method;
523 | extern float frame_drop_threshold;
524 | extern int do_benchmark;
525 | extern int do_benchmark_all;
526 | extern int do_deinterlace;
527 | extern int do_hex_dump;
528 | extern int do_pkt_dump;
529 | extern int copy_ts;
530 | extern int start_at_zero;
531 | extern int copy_tb;
532 | extern int debug_ts;
533 | extern int exit_on_error;
534 | extern int abort_on_flags;
535 | extern int print_stats;
536 | extern int qp_hist;
537 | extern int stdin_interaction;
538 | extern int frame_bits_per_raw_sample;
539 | extern AVIOContext *progress_avio;
540 | extern float max_error_rate;
541 | extern int vdpau_api_ver;
542 | extern char *videotoolbox_pixfmt;
543 |
544 | extern const AVIOInterruptCB int_cb;
545 |
546 | extern const OptionDef options[];
547 | extern const HWAccel hwaccels[];
548 |
549 |
550 | void term_init(void);
551 | void term_exit(void);
552 |
553 | void reset_options(OptionsContext *o, int is_input);
554 | void show_usage(void);
555 |
556 | void opt_output_file(void *optctx, const char *filename);
557 |
558 | void remove_avoptions(AVDictionary **a, AVDictionary *b);
559 | void assert_avoptions(AVDictionary *m);
560 |
561 | int guess_input_channel_layout(InputStream *ist);
562 |
563 | enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *avctx, AVCodec *codec, enum AVPixelFormat target);
564 | void choose_sample_fmt(AVStream *st, AVCodec *codec);
565 |
566 | int configure_filtergraph(FilterGraph *fg);
567 | int configure_output_filter(FilterGraph *fg, OutputFilter *ofilter, AVFilterInOut *out);
568 | int ist_in_filtergraph(FilterGraph *fg, InputStream *ist);
569 | FilterGraph *init_simple_filtergraph(InputStream *ist, OutputStream *ost);
570 | int init_complex_filtergraph(FilterGraph *fg);
571 |
572 | int ffmpeg_parse_options(int argc, char **argv);
573 |
574 | int vdpau_init(AVCodecContext *s);
575 | int dxva2_init(AVCodecContext *s);
576 | int vda_init(AVCodecContext *s);
577 | int videotoolbox_init(AVCodecContext *s);
578 | int qsv_init(AVCodecContext *s);
579 | int qsv_transcode_init(OutputStream *ost);
580 |
581 | int run(int argc, char **argv);
582 |
583 | #endif /* FFMPEG_H */
584 |
--------------------------------------------------------------------------------
/app/src/main/jni/ffmpeg_filter.c:
--------------------------------------------------------------------------------
1 | /*
2 | * ffmpeg filter configuration
3 | *
4 | * This file is part of FFmpeg.
5 | *
6 | * FFmpeg is free software; you can redistribute it and/or
7 | * modify it under the terms of the GNU Lesser General Public
8 | * License as published by the Free Software Foundation; either
9 | * version 2.1 of the License, or (at your option) any later version.
10 | *
11 | * FFmpeg is distributed in the hope that it will be useful,
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 | * Lesser General Public License for more details.
15 | *
16 | * You should have received a copy of the GNU Lesser General Public
17 | * License along with FFmpeg; if not, write to the Free Software
18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 | */
20 |
21 | #include
22 |
23 | #include "ffmpeg.h"
24 |
25 | #include "libavfilter/avfilter.h"
26 | #include "libavfilter/buffersink.h"
27 |
28 | #include "libavresample/avresample.h"
29 |
30 | #include "libavutil/avassert.h"
31 | #include "libavutil/avstring.h"
32 | #include "libavutil/bprint.h"
33 | #include "libavutil/channel_layout.h"
34 | #include "libavutil/display.h"
35 | #include "libavutil/opt.h"
36 | #include "libavutil/pixdesc.h"
37 | #include "libavutil/pixfmt.h"
38 | #include "libavutil/imgutils.h"
39 | #include "libavutil/samplefmt.h"
40 |
41 |
42 | static const enum AVPixelFormat *get_compliance_unofficial_pix_fmts(enum AVCodecID codec_id, const enum AVPixelFormat default_formats[])
43 | {
44 | static const enum AVPixelFormat mjpeg_formats[] =
45 | { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P,
46 | AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P,
47 | AV_PIX_FMT_NONE };
48 | static const enum AVPixelFormat ljpeg_formats[] =
49 | { AV_PIX_FMT_BGR24 , AV_PIX_FMT_BGRA , AV_PIX_FMT_BGR0,
50 | AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,
51 | AV_PIX_FMT_YUV420P , AV_PIX_FMT_YUV444P , AV_PIX_FMT_YUV422P,
52 | AV_PIX_FMT_NONE};
53 |
54 | if (codec_id == AV_CODEC_ID_MJPEG) {
55 | return mjpeg_formats;
56 | } else if (codec_id == AV_CODEC_ID_LJPEG) {
57 | return ljpeg_formats;
58 | } else {
59 | return default_formats;
60 | }
61 | }
62 |
63 | enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx, AVCodec *codec, enum AVPixelFormat target)
64 | {
65 | if (codec && codec->pix_fmts) {
66 | const enum AVPixelFormat *p = codec->pix_fmts;
67 | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(target);
68 | int has_alpha = desc ? desc->nb_components % 2 == 0 : 0;
69 | enum AVPixelFormat best= AV_PIX_FMT_NONE;
70 |
71 | if (enc_ctx->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
72 | p = get_compliance_unofficial_pix_fmts(enc_ctx->codec_id, p);
73 | }
74 | for (; *p != AV_PIX_FMT_NONE; p++) {
75 | best= avcodec_find_best_pix_fmt_of_2(best, *p, target, has_alpha, NULL);
76 | if (*p == target)
77 | break;
78 | }
79 | if (*p == AV_PIX_FMT_NONE) {
80 | if (target != AV_PIX_FMT_NONE)
81 | av_log(NULL, AV_LOG_WARNING,
82 | "Incompatible pixel format '%s' for codec '%s', auto-selecting format '%s'\n",
83 | av_get_pix_fmt_name(target),
84 | codec->name,
85 | av_get_pix_fmt_name(best));
86 | return best;
87 | }
88 | }
89 | return target;
90 | }
91 |
92 | void choose_sample_fmt(AVStream *st, AVCodec *codec)
93 | {
94 | if (codec && codec->sample_fmts) {
95 | const enum AVSampleFormat *p = codec->sample_fmts;
96 | for (; *p != -1; p++) {
97 | if (*p == st->codec->sample_fmt)
98 | break;
99 | }
100 | if (*p == -1) {
101 | if((codec->capabilities & AV_CODEC_CAP_LOSSLESS) && av_get_sample_fmt_name(st->codec->sample_fmt) > av_get_sample_fmt_name(codec->sample_fmts[0]))
102 | av_log(NULL, AV_LOG_ERROR, "Conversion will not be lossless.\n");
103 | if(av_get_sample_fmt_name(st->codec->sample_fmt))
104 | av_log(NULL, AV_LOG_WARNING,
105 | "Incompatible sample format '%s' for codec '%s', auto-selecting format '%s'\n",
106 | av_get_sample_fmt_name(st->codec->sample_fmt),
107 | codec->name,
108 | av_get_sample_fmt_name(codec->sample_fmts[0]));
109 | st->codec->sample_fmt = codec->sample_fmts[0];
110 | }
111 | }
112 | }
113 |
114 | static char *choose_pix_fmts(OutputStream *ost)
115 | {
116 | AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0);
117 | if (strict_dict)
118 | // used by choose_pixel_fmt() and below
119 | av_opt_set(ost->enc_ctx, "strict", strict_dict->value, 0);
120 |
121 | if (ost->keep_pix_fmt) {
122 | if (ost->filter)
123 | avfilter_graph_set_auto_convert(ost->filter->graph->graph,
124 | AVFILTER_AUTO_CONVERT_NONE);
125 | if (ost->enc_ctx->pix_fmt == AV_PIX_FMT_NONE)
126 | return NULL;
127 | return av_strdup(av_get_pix_fmt_name(ost->enc_ctx->pix_fmt));
128 | }
129 | if (ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE) {
130 | return av_strdup(av_get_pix_fmt_name(choose_pixel_fmt(ost->st, ost->enc_ctx, ost->enc, ost->enc_ctx->pix_fmt)));
131 | } else if (ost->enc && ost->enc->pix_fmts) {
132 | const enum AVPixelFormat *p;
133 | AVIOContext *s = NULL;
134 | uint8_t *ret;
135 | int len;
136 |
137 | if (avio_open_dyn_buf(&s) < 0)
138 | exit_program(1);
139 |
140 | p = ost->enc->pix_fmts;
141 | if (ost->enc_ctx->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
142 | p = get_compliance_unofficial_pix_fmts(ost->enc_ctx->codec_id, p);
143 | }
144 |
145 | for (; *p != AV_PIX_FMT_NONE; p++) {
146 | const char *name = av_get_pix_fmt_name(*p);
147 | avio_printf(s, "%s|", name);
148 | }
149 | len = avio_close_dyn_buf(s, &ret);
150 | ret[len - 1] = 0;
151 | return ret;
152 | } else
153 | return NULL;
154 | }
155 |
156 | /* Define a function for building a string containing a list of
157 | * allowed formats. */
158 | #define DEF_CHOOSE_FORMAT(type, var, supported_list, none, get_name) \
159 | static char *choose_ ## var ## s(OutputStream *ost) \
160 | { \
161 | if (ost->enc_ctx->var != none) { \
162 | get_name(ost->enc_ctx->var); \
163 | return av_strdup(name); \
164 | } else if (ost->enc && ost->enc->supported_list) { \
165 | const type *p; \
166 | AVIOContext *s = NULL; \
167 | uint8_t *ret; \
168 | int len; \
169 | \
170 | if (avio_open_dyn_buf(&s) < 0) \
171 | exit_program(1); \
172 | \
173 | for (p = ost->enc->supported_list; *p != none; p++) { \
174 | get_name(*p); \
175 | avio_printf(s, "%s|", name); \
176 | } \
177 | len = avio_close_dyn_buf(s, &ret); \
178 | ret[len - 1] = 0; \
179 | return ret; \
180 | } else \
181 | return NULL; \
182 | }
183 |
184 | // DEF_CHOOSE_FORMAT(enum AVPixelFormat, pix_fmt, pix_fmts, AV_PIX_FMT_NONE,
185 | // GET_PIX_FMT_NAME)
186 |
187 | DEF_CHOOSE_FORMAT(enum AVSampleFormat, sample_fmt, sample_fmts,
188 | AV_SAMPLE_FMT_NONE, GET_SAMPLE_FMT_NAME)
189 |
190 | DEF_CHOOSE_FORMAT(int, sample_rate, supported_samplerates, 0,
191 | GET_SAMPLE_RATE_NAME)
192 |
193 | DEF_CHOOSE_FORMAT(uint64_t, channel_layout, channel_layouts, 0,
194 | GET_CH_LAYOUT_NAME)
195 |
196 | FilterGraph *init_simple_filtergraph(InputStream *ist, OutputStream *ost)
197 | {
198 | FilterGraph *fg = av_mallocz(sizeof(*fg));
199 |
200 | if (!fg)
201 | exit_program(1);
202 | fg->index = nb_filtergraphs;
203 |
204 | GROW_ARRAY(fg->outputs, fg->nb_outputs);
205 | if (!(fg->outputs[0] = av_mallocz(sizeof(*fg->outputs[0]))))
206 | exit_program(1);
207 | fg->outputs[0]->ost = ost;
208 | fg->outputs[0]->graph = fg;
209 |
210 | ost->filter = fg->outputs[0];
211 |
212 | GROW_ARRAY(fg->inputs, fg->nb_inputs);
213 | if (!(fg->inputs[0] = av_mallocz(sizeof(*fg->inputs[0]))))
214 | exit_program(1);
215 | fg->inputs[0]->ist = ist;
216 | fg->inputs[0]->graph = fg;
217 |
218 | GROW_ARRAY(ist->filters, ist->nb_filters);
219 | ist->filters[ist->nb_filters - 1] = fg->inputs[0];
220 |
221 | GROW_ARRAY(filtergraphs, nb_filtergraphs);
222 | filtergraphs[nb_filtergraphs - 1] = fg;
223 |
224 | return fg;
225 | }
226 |
227 | static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
228 | {
229 | InputStream *ist = NULL;
230 | enum AVMediaType type = avfilter_pad_get_type(in->filter_ctx->input_pads, in->pad_idx);
231 | int i;
232 |
233 | // TODO: support other filter types
234 | if (type != AVMEDIA_TYPE_VIDEO && type != AVMEDIA_TYPE_AUDIO) {
235 | av_log(NULL, AV_LOG_FATAL, "Only video and audio filters supported "
236 | "currently.\n");
237 | exit_program(1);
238 | }
239 |
240 | if (in->name) {
241 | AVFormatContext *s;
242 | AVStream *st = NULL;
243 | char *p;
244 | int file_idx = strtol(in->name, &p, 0);
245 |
246 | if (file_idx < 0 || file_idx >= nb_input_files) {
247 | av_log(NULL, AV_LOG_FATAL, "Invalid file index %d in filtergraph description %s.\n",
248 | file_idx, fg->graph_desc);
249 | exit_program(1);
250 | }
251 | s = input_files[file_idx]->ctx;
252 |
253 | for (i = 0; i < s->nb_streams; i++) {
254 | enum AVMediaType stream_type = s->streams[i]->codec->codec_type;
255 | if (stream_type != type &&
256 | !(stream_type == AVMEDIA_TYPE_SUBTITLE &&
257 | type == AVMEDIA_TYPE_VIDEO /* sub2video hack */))
258 | continue;
259 | if (check_stream_specifier(s, s->streams[i], *p == ':' ? p + 1 : p) == 1) {
260 | st = s->streams[i];
261 | break;
262 | }
263 | }
264 | if (!st) {
265 | av_log(NULL, AV_LOG_FATAL, "Stream specifier '%s' in filtergraph description %s "
266 | "matches no streams.\n", p, fg->graph_desc);
267 | exit_program(1);
268 | }
269 | ist = input_streams[input_files[file_idx]->ist_index + st->index];
270 | } else {
271 | /* find the first unused stream of corresponding type */
272 | for (i = 0; i < nb_input_streams; i++) {
273 | ist = input_streams[i];
274 | if (ist->dec_ctx->codec_type == type && ist->discard)
275 | break;
276 | }
277 | if (i == nb_input_streams) {
278 | av_log(NULL, AV_LOG_FATAL, "Cannot find a matching stream for "
279 | "unlabeled input pad %d on filter %s\n", in->pad_idx,
280 | in->filter_ctx->name);
281 | exit_program(1);
282 | }
283 | }
284 | av_assert0(ist);
285 |
286 | ist->discard = 0;
287 | ist->decoding_needed |= DECODING_FOR_FILTER;
288 | ist->st->discard = AVDISCARD_NONE;
289 |
290 | GROW_ARRAY(fg->inputs, fg->nb_inputs);
291 | if (!(fg->inputs[fg->nb_inputs - 1] = av_mallocz(sizeof(*fg->inputs[0]))))
292 | exit_program(1);
293 | fg->inputs[fg->nb_inputs - 1]->ist = ist;
294 | fg->inputs[fg->nb_inputs - 1]->graph = fg;
295 |
296 | GROW_ARRAY(ist->filters, ist->nb_filters);
297 | ist->filters[ist->nb_filters - 1] = fg->inputs[fg->nb_inputs - 1];
298 | }
299 |
300 | int init_complex_filtergraph(FilterGraph *fg)
301 | {
302 | AVFilterInOut *inputs, *outputs, *cur;
303 | AVFilterGraph *graph;
304 | int ret = 0;
305 |
306 | /* this graph is only used for determining the kinds of inputs
307 | * and outputs we have, and is discarded on exit from this function */
308 | graph = avfilter_graph_alloc();
309 | if (!graph)
310 | return AVERROR(ENOMEM);
311 |
312 | ret = avfilter_graph_parse2(graph, fg->graph_desc, &inputs, &outputs);
313 | if (ret < 0)
314 | goto fail;
315 |
316 | for (cur = inputs; cur; cur = cur->next)
317 | init_input_filter(fg, cur);
318 |
319 | for (cur = outputs; cur;) {
320 | GROW_ARRAY(fg->outputs, fg->nb_outputs);
321 | fg->outputs[fg->nb_outputs - 1] = av_mallocz(sizeof(*fg->outputs[0]));
322 | if (!fg->outputs[fg->nb_outputs - 1])
323 | exit_program(1);
324 |
325 | fg->outputs[fg->nb_outputs - 1]->graph = fg;
326 | fg->outputs[fg->nb_outputs - 1]->out_tmp = cur;
327 | fg->outputs[fg->nb_outputs - 1]->type = avfilter_pad_get_type(cur->filter_ctx->output_pads,
328 | cur->pad_idx);
329 | cur = cur->next;
330 | fg->outputs[fg->nb_outputs - 1]->out_tmp->next = NULL;
331 | }
332 |
333 | fail:
334 | avfilter_inout_free(&inputs);
335 | avfilter_graph_free(&graph);
336 | return ret;
337 | }
338 |
339 | static int insert_trim(int64_t start_time, int64_t duration,
340 | AVFilterContext **last_filter, int *pad_idx,
341 | const char *filter_name)
342 | {
343 | AVFilterGraph *graph = (*last_filter)->graph;
344 | AVFilterContext *ctx;
345 | const AVFilter *trim;
346 | enum AVMediaType type = avfilter_pad_get_type((*last_filter)->output_pads, *pad_idx);
347 | const char *name = (type == AVMEDIA_TYPE_VIDEO) ? "trim" : "atrim";
348 | int ret = 0;
349 |
350 | if (duration == INT64_MAX && start_time == AV_NOPTS_VALUE)
351 | return 0;
352 |
353 | trim = avfilter_get_by_name(name);
354 | if (!trim) {
355 | av_log(NULL, AV_LOG_ERROR, "%s filter not present, cannot limit "
356 | "recording time.\n", name);
357 | return AVERROR_FILTER_NOT_FOUND;
358 | }
359 |
360 | ctx = avfilter_graph_alloc_filter(graph, trim, filter_name);
361 | if (!ctx)
362 | return AVERROR(ENOMEM);
363 |
364 | if (duration != INT64_MAX) {
365 | ret = av_opt_set_int(ctx, "durationi", duration,
366 | AV_OPT_SEARCH_CHILDREN);
367 | }
368 | if (ret >= 0 && start_time != AV_NOPTS_VALUE) {
369 | ret = av_opt_set_int(ctx, "starti", start_time,
370 | AV_OPT_SEARCH_CHILDREN);
371 | }
372 | if (ret < 0) {
373 | av_log(ctx, AV_LOG_ERROR, "Error configuring the %s filter", name);
374 | return ret;
375 | }
376 |
377 | ret = avfilter_init_str(ctx, NULL);
378 | if (ret < 0)
379 | return ret;
380 |
381 | ret = avfilter_link(*last_filter, *pad_idx, ctx, 0);
382 | if (ret < 0)
383 | return ret;
384 |
385 | *last_filter = ctx;
386 | *pad_idx = 0;
387 | return 0;
388 | }
389 |
390 | static int insert_filter(AVFilterContext **last_filter, int *pad_idx,
391 | const char *filter_name, const char *args)
392 | {
393 | AVFilterGraph *graph = (*last_filter)->graph;
394 | AVFilterContext *ctx;
395 | int ret;
396 |
397 | ret = avfilter_graph_create_filter(&ctx,
398 | avfilter_get_by_name(filter_name),
399 | filter_name, args, NULL, graph);
400 | if (ret < 0)
401 | return ret;
402 |
403 | ret = avfilter_link(*last_filter, *pad_idx, ctx, 0);
404 | if (ret < 0)
405 | return ret;
406 |
407 | *last_filter = ctx;
408 | *pad_idx = 0;
409 | return 0;
410 | }
411 |
412 | static int configure_output_video_filter(FilterGraph *fg, OutputFilter *ofilter, AVFilterInOut *out)
413 | {
414 | char *pix_fmts;
415 | OutputStream *ost = ofilter->ost;
416 | OutputFile *of = output_files[ost->file_index];
417 | AVCodecContext *codec = ost->enc_ctx;
418 | AVFilterContext *last_filter = out->filter_ctx;
419 | int pad_idx = out->pad_idx;
420 | int ret;
421 | char name[255];
422 |
423 | snprintf(name, sizeof(name), "output stream %d:%d", ost->file_index, ost->index);
424 | ret = avfilter_graph_create_filter(&ofilter->filter,
425 | avfilter_get_by_name("buffersink"),
426 | name, NULL, NULL, fg->graph);
427 |
428 | if (ret < 0)
429 | return ret;
430 |
431 | if (codec->width || codec->height) {
432 | char args[255];
433 | AVFilterContext *filter;
434 | AVDictionaryEntry *e = NULL;
435 |
436 | snprintf(args, sizeof(args), "%d:%d",
437 | codec->width,
438 | codec->height);
439 |
440 | while ((e = av_dict_get(ost->sws_dict, "", e,
441 | AV_DICT_IGNORE_SUFFIX))) {
442 | av_strlcatf(args, sizeof(args), ":%s=%s", e->key, e->value);
443 | }
444 |
445 | snprintf(name, sizeof(name), "scaler for output stream %d:%d",
446 | ost->file_index, ost->index);
447 | if ((ret = avfilter_graph_create_filter(&filter, avfilter_get_by_name("scale"),
448 | name, args, NULL, fg->graph)) < 0)
449 | return ret;
450 | if ((ret = avfilter_link(last_filter, pad_idx, filter, 0)) < 0)
451 | return ret;
452 |
453 | last_filter = filter;
454 | pad_idx = 0;
455 | }
456 |
457 | if ((pix_fmts = choose_pix_fmts(ost))) {
458 | AVFilterContext *filter;
459 | snprintf(name, sizeof(name), "pixel format for output stream %d:%d",
460 | ost->file_index, ost->index);
461 | ret = avfilter_graph_create_filter(&filter,
462 | avfilter_get_by_name("format"),
463 | "format", pix_fmts, NULL, fg->graph);
464 | av_freep(&pix_fmts);
465 | if (ret < 0)
466 | return ret;
467 | if ((ret = avfilter_link(last_filter, pad_idx, filter, 0)) < 0)
468 | return ret;
469 |
470 | last_filter = filter;
471 | pad_idx = 0;
472 | }
473 |
474 | if (ost->frame_rate.num && 0) {
475 | AVFilterContext *fps;
476 | char args[255];
477 |
478 | snprintf(args, sizeof(args), "fps=%d/%d", ost->frame_rate.num,
479 | ost->frame_rate.den);
480 | snprintf(name, sizeof(name), "fps for output stream %d:%d",
481 | ost->file_index, ost->index);
482 | ret = avfilter_graph_create_filter(&fps, avfilter_get_by_name("fps"),
483 | name, args, NULL, fg->graph);
484 | if (ret < 0)
485 | return ret;
486 |
487 | ret = avfilter_link(last_filter, pad_idx, fps, 0);
488 | if (ret < 0)
489 | return ret;
490 | last_filter = fps;
491 | pad_idx = 0;
492 | }
493 |
494 | snprintf(name, sizeof(name), "trim for output stream %d:%d",
495 | ost->file_index, ost->index);
496 | ret = insert_trim(of->start_time, of->recording_time,
497 | &last_filter, &pad_idx, name);
498 | if (ret < 0)
499 | return ret;
500 |
501 |
502 | if ((ret = avfilter_link(last_filter, pad_idx, ofilter->filter, 0)) < 0)
503 | return ret;
504 |
505 | return 0;
506 | }
507 |
508 | static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter, AVFilterInOut *out)
509 | {
510 | OutputStream *ost = ofilter->ost;
511 | OutputFile *of = output_files[ost->file_index];
512 | AVCodecContext *codec = ost->enc_ctx;
513 | AVFilterContext *last_filter = out->filter_ctx;
514 | int pad_idx = out->pad_idx;
515 | char *sample_fmts, *sample_rates, *channel_layouts;
516 | char name[255];
517 | int ret;
518 |
519 | snprintf(name, sizeof(name), "output stream %d:%d", ost->file_index, ost->index);
520 | ret = avfilter_graph_create_filter(&ofilter->filter,
521 | avfilter_get_by_name("abuffersink"),
522 | name, NULL, NULL, fg->graph);
523 | if (ret < 0)
524 | return ret;
525 | if ((ret = av_opt_set_int(ofilter->filter, "all_channel_counts", 1, AV_OPT_SEARCH_CHILDREN)) < 0)
526 | return ret;
527 |
528 | #define AUTO_INSERT_FILTER(opt_name, filter_name, arg) do { \
529 | AVFilterContext *filt_ctx; \
530 | \
531 | av_log(NULL, AV_LOG_INFO, opt_name " is forwarded to lavfi " \
532 | "similarly to -af " filter_name "=%s.\n", arg); \
533 | \
534 | ret = avfilter_graph_create_filter(&filt_ctx, \
535 | avfilter_get_by_name(filter_name), \
536 | filter_name, arg, NULL, fg->graph); \
537 | if (ret < 0) \
538 | return ret; \
539 | \
540 | ret = avfilter_link(last_filter, pad_idx, filt_ctx, 0); \
541 | if (ret < 0) \
542 | return ret; \
543 | \
544 | last_filter = filt_ctx; \
545 | pad_idx = 0; \
546 | } while (0)
547 | if (ost->audio_channels_mapped) {
548 | int i;
549 | AVBPrint pan_buf;
550 | av_bprint_init(&pan_buf, 256, 8192);
551 | av_bprintf(&pan_buf, "0x%"PRIx64,
552 | av_get_default_channel_layout(ost->audio_channels_mapped));
553 | for (i = 0; i < ost->audio_channels_mapped; i++)
554 | if (ost->audio_channels_map[i] != -1)
555 | av_bprintf(&pan_buf, "|c%d=c%d", i, ost->audio_channels_map[i]);
556 |
557 | AUTO_INSERT_FILTER("-map_channel", "pan", pan_buf.str);
558 | av_bprint_finalize(&pan_buf, NULL);
559 | }
560 |
561 | if (codec->channels && !codec->channel_layout)
562 | codec->channel_layout = av_get_default_channel_layout(codec->channels);
563 |
564 | sample_fmts = choose_sample_fmts(ost);
565 | sample_rates = choose_sample_rates(ost);
566 | channel_layouts = choose_channel_layouts(ost);
567 | if (sample_fmts || sample_rates || channel_layouts) {
568 | AVFilterContext *format;
569 | char args[256];
570 | args[0] = 0;
571 |
572 | if (sample_fmts)
573 | av_strlcatf(args, sizeof(args), "sample_fmts=%s:",
574 | sample_fmts);
575 | if (sample_rates)
576 | av_strlcatf(args, sizeof(args), "sample_rates=%s:",
577 | sample_rates);
578 | if (channel_layouts)
579 | av_strlcatf(args, sizeof(args), "channel_layouts=%s:",
580 | channel_layouts);
581 |
582 | av_freep(&sample_fmts);
583 | av_freep(&sample_rates);
584 | av_freep(&channel_layouts);
585 |
586 | snprintf(name, sizeof(name), "audio format for output stream %d:%d",
587 | ost->file_index, ost->index);
588 | ret = avfilter_graph_create_filter(&format,
589 | avfilter_get_by_name("aformat"),
590 | name, args, NULL, fg->graph);
591 | if (ret < 0)
592 | return ret;
593 |
594 | ret = avfilter_link(last_filter, pad_idx, format, 0);
595 | if (ret < 0)
596 | return ret;
597 |
598 | last_filter = format;
599 | pad_idx = 0;
600 | }
601 |
602 | if (audio_volume != 256 && 0) {
603 | char args[256];
604 |
605 | snprintf(args, sizeof(args), "%f", audio_volume / 256.);
606 | AUTO_INSERT_FILTER("-vol", "volume", args);
607 | }
608 |
609 | if (ost->apad && of->shortest) {
610 | char args[256];
611 | int i;
612 |
613 | for (i=0; ictx->nb_streams; i++)
614 | if (of->ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
615 | break;
616 |
617 | if (ictx->nb_streams) {
618 | snprintf(args, sizeof(args), "%s", ost->apad);
619 | AUTO_INSERT_FILTER("-apad", "apad", args);
620 | }
621 | }
622 |
623 | snprintf(name, sizeof(name), "trim for output stream %d:%d",
624 | ost->file_index, ost->index);
625 | ret = insert_trim(of->start_time, of->recording_time,
626 | &last_filter, &pad_idx, name);
627 | if (ret < 0)
628 | return ret;
629 |
630 | if ((ret = avfilter_link(last_filter, pad_idx, ofilter->filter, 0)) < 0)
631 | return ret;
632 |
633 | return 0;
634 | }
635 |
636 | #define DESCRIBE_FILTER_LINK(f, inout, in) \
637 | { \
638 | AVFilterContext *ctx = inout->filter_ctx; \
639 | AVFilterPad *pads = in ? ctx->input_pads : ctx->output_pads; \
640 | int nb_pads = in ? ctx->nb_inputs : ctx->nb_outputs; \
641 | AVIOContext *pb; \
642 | \
643 | if (avio_open_dyn_buf(&pb) < 0) \
644 | exit_program(1); \
645 | \
646 | avio_printf(pb, "%s", ctx->filter->name); \
647 | if (nb_pads > 1) \
648 | avio_printf(pb, ":%s", avfilter_pad_get_name(pads, inout->pad_idx));\
649 | avio_w8(pb, 0); \
650 | avio_close_dyn_buf(pb, &f->name); \
651 | }
652 |
653 | int configure_output_filter(FilterGraph *fg, OutputFilter *ofilter, AVFilterInOut *out)
654 | {
655 | av_freep(&ofilter->name);
656 | DESCRIBE_FILTER_LINK(ofilter, out, 0);
657 |
658 | if (!ofilter->ost) {
659 | av_log(NULL, AV_LOG_FATAL, "Filter %s has a unconnected output\n", ofilter->name);
660 | exit_program(1);
661 | }
662 |
663 | switch (avfilter_pad_get_type(out->filter_ctx->output_pads, out->pad_idx)) {
664 | case AVMEDIA_TYPE_VIDEO: return configure_output_video_filter(fg, ofilter, out);
665 | case AVMEDIA_TYPE_AUDIO: return configure_output_audio_filter(fg, ofilter, out);
666 | default: av_assert0(0);
667 | }
668 | }
669 |
670 | static int sub2video_prepare(InputStream *ist)
671 | {
672 | AVFormatContext *avf = input_files[ist->file_index]->ctx;
673 | int i, w, h;
674 |
675 | /* Compute the size of the canvas for the subtitles stream.
676 | If the subtitles codec has set a size, use it. Otherwise use the
677 | maximum dimensions of the video streams in the same file. */
678 | w = ist->dec_ctx->width;
679 | h = ist->dec_ctx->height;
680 | if (!(w && h)) {
681 | for (i = 0; i < avf->nb_streams; i++) {
682 | if (avf->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
683 | w = FFMAX(w, avf->streams[i]->codec->width);
684 | h = FFMAX(h, avf->streams[i]->codec->height);
685 | }
686 | }
687 | if (!(w && h)) {
688 | w = FFMAX(w, 720);
689 | h = FFMAX(h, 576);
690 | }
691 | av_log(avf, AV_LOG_INFO, "sub2video: using %dx%d canvas\n", w, h);
692 | }
693 | ist->sub2video.w = ist->resample_width = w;
694 | ist->sub2video.h = ist->resample_height = h;
695 |
696 | /* rectangles are AV_PIX_FMT_PAL8, but we have no guarantee that the
697 | palettes for all rectangles are identical or compatible */
698 | ist->resample_pix_fmt = ist->dec_ctx->pix_fmt = AV_PIX_FMT_RGB32;
699 |
700 | ist->sub2video.frame = av_frame_alloc();
701 | if (!ist->sub2video.frame)
702 | return AVERROR(ENOMEM);
703 | ist->sub2video.last_pts = INT64_MIN;
704 | return 0;
705 | }
706 |
707 | static int configure_input_video_filter(FilterGraph *fg, InputFilter *ifilter,
708 | AVFilterInOut *in)
709 | {
710 | AVFilterContext *last_filter;
711 | const AVFilter *buffer_filt = avfilter_get_by_name("buffer");
712 | InputStream *ist = ifilter->ist;
713 | InputFile *f = input_files[ist->file_index];
714 | AVRational tb = ist->framerate.num ? av_inv_q(ist->framerate) :
715 | ist->st->time_base;
716 | AVRational fr = ist->framerate;
717 | AVRational sar;
718 | AVBPrint args;
719 | char name[255];
720 | int ret, pad_idx = 0;
721 | int64_t tsoffset = 0;
722 |
723 | if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_AUDIO) {
724 | av_log(NULL, AV_LOG_ERROR, "Cannot connect video filter to audio input\n");
725 | return AVERROR(EINVAL);
726 | }
727 |
728 | if (!fr.num)
729 | fr = av_guess_frame_rate(input_files[ist->file_index]->ctx, ist->st, NULL);
730 |
731 | if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE) {
732 | ret = sub2video_prepare(ist);
733 | if (ret < 0)
734 | return ret;
735 | }
736 |
737 | sar = ist->st->sample_aspect_ratio.num ?
738 | ist->st->sample_aspect_ratio :
739 | ist->dec_ctx->sample_aspect_ratio;
740 | if(!sar.den)
741 | sar = (AVRational){0,1};
742 | av_bprint_init(&args, 0, 1);
743 | av_bprintf(&args,
744 | "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:"
745 | "pixel_aspect=%d/%d:sws_param=flags=%d", ist->resample_width,
746 | ist->resample_height,
747 | ist->hwaccel_retrieve_data ? ist->hwaccel_retrieved_pix_fmt : ist->resample_pix_fmt,
748 | tb.num, tb.den, sar.num, sar.den,
749 | SWS_BILINEAR + ((ist->dec_ctx->flags&AV_CODEC_FLAG_BITEXACT) ? SWS_BITEXACT:0));
750 | if (fr.num && fr.den)
751 | av_bprintf(&args, ":frame_rate=%d/%d", fr.num, fr.den);
752 | snprintf(name, sizeof(name), "graph %d input from stream %d:%d", fg->index,
753 | ist->file_index, ist->st->index);
754 |
755 | if ((ret = avfilter_graph_create_filter(&ifilter->filter, buffer_filt, name,
756 | args.str, NULL, fg->graph)) < 0)
757 | return ret;
758 | last_filter = ifilter->filter;
759 |
760 | if (ist->autorotate) {
761 | double theta = get_rotation(ist->st);
762 |
763 | if (fabs(theta - 90) < 1.0) {
764 | ret = insert_filter(&last_filter, &pad_idx, "transpose", "clock");
765 | } else if (fabs(theta - 180) < 1.0) {
766 | ret = insert_filter(&last_filter, &pad_idx, "hflip", NULL);
767 | if (ret < 0)
768 | return ret;
769 | ret = insert_filter(&last_filter, &pad_idx, "vflip", NULL);
770 | } else if (fabs(theta - 270) < 1.0) {
771 | ret = insert_filter(&last_filter, &pad_idx, "transpose", "cclock");
772 | } else if (fabs(theta) > 1.0) {
773 | char rotate_buf[64];
774 | snprintf(rotate_buf, sizeof(rotate_buf), "%f*PI/180", theta);
775 | ret = insert_filter(&last_filter, &pad_idx, "rotate", rotate_buf);
776 | }
777 | if (ret < 0)
778 | return ret;
779 | }
780 |
781 | if (ist->framerate.num) {
782 | AVFilterContext *setpts;
783 |
784 | snprintf(name, sizeof(name), "force CFR for input from stream %d:%d",
785 | ist->file_index, ist->st->index);
786 | if ((ret = avfilter_graph_create_filter(&setpts,
787 | avfilter_get_by_name("setpts"),
788 | name, "N", NULL,
789 | fg->graph)) < 0)
790 | return ret;
791 |
792 | if ((ret = avfilter_link(last_filter, 0, setpts, 0)) < 0)
793 | return ret;
794 |
795 | last_filter = setpts;
796 | }
797 |
798 | if (do_deinterlace) {
799 | AVFilterContext *yadif;
800 |
801 | snprintf(name, sizeof(name), "deinterlace input from stream %d:%d",
802 | ist->file_index, ist->st->index);
803 | if ((ret = avfilter_graph_create_filter(&yadif,
804 | avfilter_get_by_name("yadif"),
805 | name, "", NULL,
806 | fg->graph)) < 0)
807 | return ret;
808 |
809 | if ((ret = avfilter_link(last_filter, 0, yadif, 0)) < 0)
810 | return ret;
811 |
812 | last_filter = yadif;
813 | }
814 |
815 | snprintf(name, sizeof(name), "trim for input stream %d:%d",
816 | ist->file_index, ist->st->index);
817 | if (copy_ts) {
818 | tsoffset = f->start_time == AV_NOPTS_VALUE ? 0 : f->start_time;
819 | if (!start_at_zero && f->ctx->start_time != AV_NOPTS_VALUE)
820 | tsoffset += f->ctx->start_time;
821 | }
822 | ret = insert_trim(((f->start_time == AV_NOPTS_VALUE) || !f->accurate_seek) ?
823 | AV_NOPTS_VALUE : tsoffset, f->recording_time,
824 | &last_filter, &pad_idx, name);
825 | if (ret < 0)
826 | return ret;
827 |
828 | if ((ret = avfilter_link(last_filter, 0, in->filter_ctx, in->pad_idx)) < 0)
829 | return ret;
830 | return 0;
831 | }
832 |
833 | static int configure_input_audio_filter(FilterGraph *fg, InputFilter *ifilter,
834 | AVFilterInOut *in)
835 | {
836 | AVFilterContext *last_filter;
837 | const AVFilter *abuffer_filt = avfilter_get_by_name("abuffer");
838 | InputStream *ist = ifilter->ist;
839 | InputFile *f = input_files[ist->file_index];
840 | AVBPrint args;
841 | char name[255];
842 | int ret, pad_idx = 0;
843 | int64_t tsoffset = 0;
844 |
845 | if (ist->dec_ctx->codec_type != AVMEDIA_TYPE_AUDIO) {
846 | av_log(NULL, AV_LOG_ERROR, "Cannot connect audio filter to non audio input\n");
847 | return AVERROR(EINVAL);
848 | }
849 |
850 | av_bprint_init(&args, 0, AV_BPRINT_SIZE_AUTOMATIC);
851 | av_bprintf(&args, "time_base=%d/%d:sample_rate=%d:sample_fmt=%s",
852 | 1, ist->dec_ctx->sample_rate,
853 | ist->dec_ctx->sample_rate,
854 | av_get_sample_fmt_name(ist->dec_ctx->sample_fmt));
855 | if (ist->dec_ctx->channel_layout)
856 | av_bprintf(&args, ":channel_layout=0x%"PRIx64,
857 | ist->dec_ctx->channel_layout);
858 | else
859 | av_bprintf(&args, ":channels=%d", ist->dec_ctx->channels);
860 | snprintf(name, sizeof(name), "graph %d input from stream %d:%d", fg->index,
861 | ist->file_index, ist->st->index);
862 |
863 | if ((ret = avfilter_graph_create_filter(&ifilter->filter, abuffer_filt,
864 | name, args.str, NULL,
865 | fg->graph)) < 0)
866 | return ret;
867 | last_filter = ifilter->filter;
868 |
869 | #define AUTO_INSERT_FILTER_INPUT(opt_name, filter_name, arg) do { \
870 | AVFilterContext *filt_ctx; \
871 | \
872 | av_log(NULL, AV_LOG_INFO, opt_name " is forwarded to lavfi " \
873 | "similarly to -af " filter_name "=%s.\n", arg); \
874 | \
875 | snprintf(name, sizeof(name), "graph %d %s for input stream %d:%d", \
876 | fg->index, filter_name, ist->file_index, ist->st->index); \
877 | ret = avfilter_graph_create_filter(&filt_ctx, \
878 | avfilter_get_by_name(filter_name), \
879 | name, arg, NULL, fg->graph); \
880 | if (ret < 0) \
881 | return ret; \
882 | \
883 | ret = avfilter_link(last_filter, 0, filt_ctx, 0); \
884 | if (ret < 0) \
885 | return ret; \
886 | \
887 | last_filter = filt_ctx; \
888 | } while (0)
889 |
890 | if (audio_sync_method > 0) {
891 | char args[256] = {0};
892 |
893 | av_strlcatf(args, sizeof(args), "async=%d", audio_sync_method);
894 | if (audio_drift_threshold != 0.1)
895 | av_strlcatf(args, sizeof(args), ":min_hard_comp=%f", audio_drift_threshold);
896 | if (!fg->reconfiguration)
897 | av_strlcatf(args, sizeof(args), ":first_pts=0");
898 | AUTO_INSERT_FILTER_INPUT("-async", "aresample", args);
899 | }
900 |
901 | // if (ost->audio_channels_mapped) {
902 | // int i;
903 | // AVBPrint pan_buf;
904 | // av_bprint_init(&pan_buf, 256, 8192);
905 | // av_bprintf(&pan_buf, "0x%"PRIx64,
906 | // av_get_default_channel_layout(ost->audio_channels_mapped));
907 | // for (i = 0; i < ost->audio_channels_mapped; i++)
908 | // if (ost->audio_channels_map[i] != -1)
909 | // av_bprintf(&pan_buf, ":c%d=c%d", i, ost->audio_channels_map[i]);
910 | // AUTO_INSERT_FILTER_INPUT("-map_channel", "pan", pan_buf.str);
911 | // av_bprint_finalize(&pan_buf, NULL);
912 | // }
913 |
914 | if (audio_volume != 256) {
915 | char args[256];
916 |
917 | av_log(NULL, AV_LOG_WARNING, "-vol has been deprecated. Use the volume "
918 | "audio filter instead.\n");
919 |
920 | snprintf(args, sizeof(args), "%f", audio_volume / 256.);
921 | AUTO_INSERT_FILTER_INPUT("-vol", "volume", args);
922 | }
923 |
924 | snprintf(name, sizeof(name), "trim for input stream %d:%d",
925 | ist->file_index, ist->st->index);
926 | if (copy_ts) {
927 | tsoffset = f->start_time == AV_NOPTS_VALUE ? 0 : f->start_time;
928 | if (!start_at_zero && f->ctx->start_time != AV_NOPTS_VALUE)
929 | tsoffset += f->ctx->start_time;
930 | }
931 | ret = insert_trim(((f->start_time == AV_NOPTS_VALUE) || !f->accurate_seek) ?
932 | AV_NOPTS_VALUE : tsoffset, f->recording_time,
933 | &last_filter, &pad_idx, name);
934 | if (ret < 0)
935 | return ret;
936 |
937 | if ((ret = avfilter_link(last_filter, 0, in->filter_ctx, in->pad_idx)) < 0)
938 | return ret;
939 |
940 | return 0;
941 | }
942 |
943 | static int configure_input_filter(FilterGraph *fg, InputFilter *ifilter,
944 | AVFilterInOut *in)
945 | {
946 | av_freep(&ifilter->name);
947 | DESCRIBE_FILTER_LINK(ifilter, in, 1);
948 |
949 | if (!ifilter->ist->dec) {
950 | av_log(NULL, AV_LOG_ERROR,
951 | "No decoder for stream #%d:%d, filtering impossible\n",
952 | ifilter->ist->file_index, ifilter->ist->st->index);
953 | return AVERROR_DECODER_NOT_FOUND;
954 | }
955 | switch (avfilter_pad_get_type(in->filter_ctx->input_pads, in->pad_idx)) {
956 | case AVMEDIA_TYPE_VIDEO: return configure_input_video_filter(fg, ifilter, in);
957 | case AVMEDIA_TYPE_AUDIO: return configure_input_audio_filter(fg, ifilter, in);
958 | default: av_assert0(0);
959 | }
960 | }
961 |
962 | int configure_filtergraph(FilterGraph *fg)
963 | {
964 | AVFilterInOut *inputs, *outputs, *cur;
965 | int ret, i, simple = !fg->graph_desc;
966 | const char *graph_desc = simple ? fg->outputs[0]->ost->avfilter :
967 | fg->graph_desc;
968 |
969 | avfilter_graph_free(&fg->graph);
970 | if (!(fg->graph = avfilter_graph_alloc()))
971 | return AVERROR(ENOMEM);
972 |
973 | if (simple) {
974 | OutputStream *ost = fg->outputs[0]->ost;
975 | char args[512];
976 | AVDictionaryEntry *e = NULL;
977 |
978 | args[0] = 0;
979 | while ((e = av_dict_get(ost->sws_dict, "", e,
980 | AV_DICT_IGNORE_SUFFIX))) {
981 | av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value);
982 | }
983 | if (strlen(args))
984 | args[strlen(args)-1] = 0;
985 | fg->graph->scale_sws_opts = av_strdup(args);
986 |
987 | args[0] = 0;
988 | while ((e = av_dict_get(ost->swr_opts, "", e,
989 | AV_DICT_IGNORE_SUFFIX))) {
990 | av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value);
991 | }
992 | if (strlen(args))
993 | args[strlen(args)-1] = 0;
994 | av_opt_set(fg->graph, "aresample_swr_opts", args, 0);
995 |
996 | args[0] = '\0';
997 | while ((e = av_dict_get(fg->outputs[0]->ost->resample_opts, "", e,
998 | AV_DICT_IGNORE_SUFFIX))) {
999 | av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value);
1000 | }
1001 | if (strlen(args))
1002 | args[strlen(args) - 1] = '\0';
1003 | fg->graph->resample_lavr_opts = av_strdup(args);
1004 |
1005 | e = av_dict_get(ost->encoder_opts, "threads", NULL, 0);
1006 | if (e)
1007 | av_opt_set(fg->graph, "threads", e->value, 0);
1008 | }
1009 |
1010 | if ((ret = avfilter_graph_parse2(fg->graph, graph_desc, &inputs, &outputs)) < 0)
1011 | return ret;
1012 |
1013 | if (simple && (!inputs || inputs->next || !outputs || outputs->next)) {
1014 | const char *num_inputs;
1015 | const char *num_outputs;
1016 | if (!outputs) {
1017 | num_outputs = "0";
1018 | } else if (outputs->next) {
1019 | num_outputs = ">1";
1020 | } else {
1021 | num_outputs = "1";
1022 | }
1023 | if (!inputs) {
1024 | num_inputs = "0";
1025 | } else if (inputs->next) {
1026 | num_inputs = ">1";
1027 | } else {
1028 | num_inputs = "1";
1029 | }
1030 | av_log(NULL, AV_LOG_ERROR, "Simple filtergraph '%s' was expected "
1031 | "to have exactly 1 input and 1 output."
1032 | " However, it had %s input(s) and %s output(s)."
1033 | " Please adjust, or use a complex filtergraph (-filter_complex) instead.\n",
1034 | graph_desc, num_inputs, num_outputs);
1035 | return AVERROR(EINVAL);
1036 | }
1037 |
1038 | for (cur = inputs, i = 0; cur; cur = cur->next, i++)
1039 | if ((ret = configure_input_filter(fg, fg->inputs[i], cur)) < 0) {
1040 | avfilter_inout_free(&inputs);
1041 | avfilter_inout_free(&outputs);
1042 | return ret;
1043 | }
1044 | avfilter_inout_free(&inputs);
1045 |
1046 | for (cur = outputs, i = 0; cur; cur = cur->next, i++)
1047 | configure_output_filter(fg, fg->outputs[i], cur);
1048 | avfilter_inout_free(&outputs);
1049 |
1050 | if ((ret = avfilter_graph_config(fg->graph, NULL)) < 0)
1051 | return ret;
1052 |
1053 | fg->reconfiguration = 1;
1054 |
1055 | for (i = 0; i < fg->nb_outputs; i++) {
1056 | OutputStream *ost = fg->outputs[i]->ost;
1057 | if (!ost->enc) {
1058 | /* identical to the same check in ffmpeg.c, needed because
1059 | complex filter graphs are initialized earlier */
1060 | av_log(NULL, AV_LOG_ERROR, "Encoder (codec %s) not found for output stream #%d:%d\n",
1061 | avcodec_get_name(ost->st->codec->codec_id), ost->file_index, ost->index);
1062 | return AVERROR(EINVAL);
1063 | }
1064 | if (ost->enc->type == AVMEDIA_TYPE_AUDIO &&
1065 | !(ost->enc->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE))
1066 | av_buffersink_set_frame_size(ost->filter->filter,
1067 | ost->enc_ctx->frame_size);
1068 | }
1069 |
1070 | return 0;
1071 | }
1072 |
1073 | int ist_in_filtergraph(FilterGraph *fg, InputStream *ist)
1074 | {
1075 | int i;
1076 | for (i = 0; i < fg->nb_inputs; i++)
1077 | if (fg->inputs[i]->ist == ist)
1078 | return 1;
1079 | return 0;
1080 | }
1081 |
1082 |
--------------------------------------------------------------------------------
/app/src/main/jni/libffmpeg.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/jni/libffmpeg.so
--------------------------------------------------------------------------------
/app/src/main/jni/logjam.h:
--------------------------------------------------------------------------------
1 | #ifndef LOGJAM_H
2 | #define LOGJAM_H
3 |
4 | #include
5 |
6 | #define LOGTAG "FFmpegLog"
7 |
8 | #define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOGTAG, __VA_ARGS__)
9 | #define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG , LOGTAG, __VA_ARGS__)
10 | #define LOGI(...) __android_log_print(ANDROID_LOG_INFO , LOGTAG, __VA_ARGS__)
11 | #define LOGW(...) __android_log_print(ANDROID_LOG_WARN , LOGTAG, __VA_ARGS__)
12 | #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR , LOGTAG, __VA_ARGS__)
13 |
14 | #endif
--------------------------------------------------------------------------------
/app/src/main/res/drawable/progress_color_horizontal.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | -
5 |
6 |
7 |
13 |
14 |
15 |
16 | -
17 |
18 |
19 |
20 |
26 |
27 |
28 |
29 |
30 | -
31 |
32 |
33 |
34 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ripple_circle.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
10 |
11 |
14 |
15 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/video_seekbar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | -
4 |
5 |
6 |
7 |
8 |
9 | -
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 | -
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
10 |
17 |
24 |
29 |
36 |
37 |
40 |
48 |
56 |
57 |
61 |
67 |
74 |
75 |
76 |
83 |
89 |
97 |
98 |
104 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_make_video.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
14 |
15 |
22 |
23 |
30 |
31 |
37 |
38 |
46 |
47 |
48 |
60 |
61 |
66 |
67 |
73 |
74 |
82 |
83 |
84 |
90 |
91 |
97 |
98 |
106 |
107 |
108 |
112 |
113 |
126 |
127 |
128 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_music.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
14 |
15 |
22 |
23 |
29 |
30 |
31 |
37 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_permissions.xml:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_select_record.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
13 |
19 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/adapter_music.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
13 |
20 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/bt_start.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxhdpi/bt_start.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/icon_back_white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxhdpi/icon_back_white.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/icon_fanzhuan.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxhdpi/icon_fanzhuan.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/icon_video_ing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxhdpi/icon_video_ing.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/kaibo_icon_huakuai.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxhdpi/kaibo_icon_huakuai.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/live_close_icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxhdpi/live_close_icon.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangyxgit/FFmpegVideo/62260bee5a3e9cf24c2415bd03aa612f0fa96294/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | FFmpegVideo
3 | 知道了
4 | 确认
5 | 取消
6 | 退出
7 | 设置
8 | 帮助
9 |
10 | 系统繁忙,请稍后尝试。
11 | 请打开您的网络。
12 | 当前应用缺少必要权限。\n\n请点击\"设置\"-\"权限\"-打开所需权限。\n\n最后点击两次后退按钮,即可返回。
13 |
14 |
15 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
16 |
20 |
21 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | jcenter()
6 | google()
7 | }
8 | dependencies {
9 | classpath 'com.android.tools.build:gradle:3.0.1'
10 |
11 | // NOTE: Do not place your application dependencies here; they belong
12 | // in the individual module build.gradle files
13 | }
14 | }
15 |
16 | allprojects {
17 | repositories {
18 | jcenter()
19 | google()
20 | }
21 | }
22 |
23 | task clean(type: Delete) {
24 | delete rootProject.buildDir
25 | }
26 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | org.gradle.jvmargs=-Xmx1536m
13 |
14 | # When configured, Gradle will run in incubating parallel mode.
15 | # This option should only be used with decoupled projects. More details, visit
16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
17 | # org.gradle.parallel=true
18 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------