├── .gitignore
├── LICENSE
├── README.md
├── pom.xml
└── src
├── main
└── java
│ └── com
│ └── github
│ └── manevolent
│ └── ffmpeg4j
│ ├── AudioFormat.java
│ ├── AudioFrame.java
│ ├── FFmpeg.java
│ ├── FFmpegError.java
│ ├── FFmpegException.java
│ ├── FFmpegIO.java
│ ├── FFmpegInput.java
│ ├── FFmpegOutput.java
│ ├── FFmpegStreamContext.java
│ ├── Logging.java
│ ├── MediaFrame.java
│ ├── MediaStream.java
│ ├── MediaType.java
│ ├── VideoFormat.java
│ ├── VideoFrame.java
│ ├── filter
│ ├── MediaFilter.java
│ ├── MediaFilterChain.java
│ ├── audio
│ │ ├── AudioFilter.java
│ │ ├── AudioFilterNone.java
│ │ └── FFmpegAudioResampleFilter.java
│ └── video
│ │ ├── FFmpegVideoRescaleFilter.java
│ │ ├── VideoFilter.java
│ │ ├── VideoFilterChain.java
│ │ └── VideoFilterNone.java
│ ├── math
│ └── Rational.java
│ ├── output
│ ├── AudioTargetSubstream.java
│ ├── FFmpegAudioTargetSubstream.java
│ ├── FFmpegEncoderContext.java
│ ├── FFmpegVideoTargetSubstream.java
│ ├── MediaTargetSubstream.java
│ └── VideoTargetSubstream.java
│ ├── source
│ ├── AudioSourceSubstream.java
│ ├── FFmpegAudioSourceSubstream.java
│ ├── FFmpegDecoderContext.java
│ ├── FFmpegVideoSourceSubstream.java
│ ├── MediaSourceSubstream.java
│ └── VideoSourceSubstream.java
│ ├── stream
│ ├── FFmpegFormatContext.java
│ ├── Stream.java
│ ├── event
│ │ ├── EventChannel.java
│ │ ├── EventException.java
│ │ └── EventListener.java
│ ├── output
│ │ ├── FFmpegTargetStream.java
│ │ └── TargetStream.java
│ └── source
│ │ ├── FFmpegSourceStream.java
│ │ └── SourceStream.java
│ └── transcoder
│ └── Transcoder.java
└── test
├── java
├── FFmpegInputTest.java
├── FFmpegTest.java
└── FFmpegTranscodeTest.java
└── resources
├── example.ogg
└── sample-mp4-file-small.mp4
/.gitignore:
--------------------------------------------------------------------------------
1 | syntax: regexp
2 | *.class
3 | *.classpath
4 | .idea/
5 | runtime/
6 | out/
7 | *.eml
8 | *.iml
9 | *.userlibraries
10 | *.log
11 | *.sql
12 | *.sqlite
13 | .DS_Store
14 | .DS_Store?
15 | ._*
16 | .Spotlight-V100
17 | .Trashes
18 | ehthumbs.db
19 | Thumbs.db
20 | target/
21 | pom.xml.tag
22 | pom.xml.releaseBackup
23 | pom.xml.versionsBackup
24 | pom.xml.next
25 | release.properties
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ffmpeg4j
2 |
3 | FFmpeg4j is a Java library that wraps the functionality of the popular open-source multimedia library FFmpeg (https://www.ffmpeg.org/), whose JNI bindings are excellently exposed through JavaCPP (https://github.com/bytedeco/javacpp). This preserves the cross-platform benefits of the JRE, while still delivering the full features and performance benefits of the FFmpeg library in an OOP fashion.
4 |
5 | This library runs FFmpeg native routines within the JRE, via JNI. You do not need a compiled executable (i.e. ffmpeg.exe) to use ffmpeg4j, only a series of static libraries which are part of the package.
6 |
7 | # Maven
8 |
9 | If you want the latest `-SNAPSHOT`:
10 |
11 | ```
12 |
13 |
14 | jitpack.io
15 | https://jitpack.io
16 |
17 |
18 |
19 | com.github.manevolent
20 | ffmpeg4j
21 | -SNAPSHOT
22 |
23 | ```
24 |
25 | # Features
26 |
27 | - A (partly complete) wrapper around the core behaviors of the FFmpeg library: audio, video, and their containing file formats.
28 | - Full coverage of all formats supported by FFmpeg (there are many!)
29 | - Tested for stability and optimized for the least wrapper overhead
30 | - Capable of delivering great apps like music bots, video transcoders, and livestream propogation
31 | - Sensible structure to fit within a Java development environment; don't deal directly with the C-like constructs exposed by JavaCPP.
32 | - Use typical InputStream and OutputStream objects to read and write media.
33 | - Use Channels to read and write media when seeking is needed (i.e. MP4 muxing).
34 | - Write applications that don't touch the disk (filesystem) for better performance and lower resource cost of in-flight data.
35 |
36 | # Examples
37 |
38 | ### Read an audio file
39 | ```java
40 | InputStream inputStream = new FileInputStream("example.ogg");
41 | FFmpegInput input = new FFmpegInput(inputStream);
42 | FFmpegSourceStream stream = input.open(inputFormat);
43 |
44 | // Read the file header, and register substreams in FFmpeg4j
45 | stream.registerStreams();
46 |
47 | AudioSourceSubstream audioSourceSubstream = null;
48 | for (MediaSourceSubstream substream : stream.getSubstreams()) {
49 | if (substream.getMediaType() != MediaType.AUDIO) continue;
50 |
51 | audioSourceSubstream = (AudioSourceSubstream) substream;
52 | }
53 |
54 | if (audioSourceSubstream == null) throw new NullPointerException();
55 |
56 | AudioFrame frame;
57 |
58 | while (true) {
59 | try {
60 | frame = audioSourceSubstream.next();
61 | float[] interleaved_ABABAB_AudioSamples = frame.getSamples();
62 | } catch (EOFException ex) {
63 | break;
64 | }
65 | }
66 | ```
67 |
68 | ### Transcode media
69 | ```java
70 | private void transcode(InputStream inputStream,
71 | String inputFormatName,
72 | SeekableByteChannel outputChannel,
73 | String outputFormatName) throws FFmpegException, IOException {
74 | try (FFmpegSourceStream sourceStream = FFmpegIO.openInputStream(inputStream).open(inputFormatName);
75 | FFmpegTargetStream targetStream = FFmpegIO.openChannel(outputChannel).asOutput().open(outputFormatName)) {
76 | sourceStream.registerStreams();
77 | sourceStream.copyToTargetStream(targetStream);
78 | Transcoder.convert(sourceStream, targetStream, Double.MAX_VALUE);
79 | }
80 | }
81 | ```
82 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | com.github.manevolent
8 | ffmpeg4j
9 | 5.1.2-1.5.8-1
10 |
11 |
12 | 1.8
13 | 1.8
14 |
15 |
16 |
17 |
18 |
19 | org.apache.maven.plugins
20 | maven-compiler-plugin
21 |
22 | 1.8
23 | 1.8
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 | org.bytedeco
32 | ffmpeg
33 | 5.1.2-1.5.8
34 |
35 |
36 | org.bytedeco
37 | ffmpeg-platform
38 | 5.1.2-1.5.8
39 |
40 |
41 | junit
42 | junit
43 | RELEASE
44 | test
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/AudioFormat.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | public class AudioFormat {
4 | private final int sampleRate;
5 | private final int channels;
6 | private final long channel_layout;
7 |
8 | public AudioFormat(int sampleRate, int channels, long channel_layout) {
9 | this.sampleRate = sampleRate;
10 | this.channels = channels;
11 | this.channel_layout = channel_layout;
12 | }
13 |
14 | public int getSampleRate() {
15 | return sampleRate;
16 | }
17 |
18 | public int getChannels() {
19 | return channels;
20 | }
21 |
22 | @Override
23 | public String toString() {
24 | return Integer.toString(sampleRate) + "Hz, " + Integer.toString(channels) + "ch";
25 | }
26 |
27 | @Override
28 | public boolean equals(Object b) {
29 | return b != null && b instanceof AudioFormat && ((AudioFormat) b).channels == channels
30 | && ((AudioFormat) b).sampleRate == sampleRate;
31 | }
32 |
33 | public long getChannelLayout() {
34 | return channel_layout;
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/AudioFrame.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | public class AudioFrame extends MediaFrame {
4 | private final float[] samples;
5 | private final int length;
6 |
7 | private final AudioFormat format;
8 |
9 | public AudioFrame(double timestamp, double position, double time,
10 | float[] samples, AudioFormat format) {
11 | this(timestamp, position, time, samples, samples.length, format);
12 | }
13 |
14 | public AudioFrame(double timestamp, double position, double time,
15 | float[] samples, int length, AudioFormat format) {
16 | super(position, time, timestamp);
17 | this.samples = samples;
18 | this.length = length;
19 | this.format = format;
20 | }
21 |
22 | /**
23 | * Gets the samples in this frame, in PCM interleaved format.
24 | */
25 | public float[] getSamples() {
26 | return samples;
27 | }
28 |
29 | public AudioFormat getFormat() {
30 | return format;
31 | }
32 |
33 | public int getLength() {
34 | return length;
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/FFmpeg.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | import com.github.manevolent.ffmpeg4j.math.*;
4 | import org.bytedeco.ffmpeg.avcodec.*;
5 | import org.bytedeco.ffmpeg.avformat.*;
6 | import org.bytedeco.ffmpeg.avutil.*;
7 | import org.bytedeco.ffmpeg.global.*;
8 | import org.bytedeco.javacpp.*;
9 |
10 | import java.util.*;
11 | import java.util.function.*;
12 |
13 | import static org.bytedeco.ffmpeg.global.avutil.av_q2d;
14 |
15 | public final class FFmpeg {
16 |
17 | public static Collection readPointer(IntPointer pointer) {
18 | List list = new LinkedList<>();
19 |
20 | for (long i = 0;; i ++) {
21 | if (pointer.get(i) < 0)
22 | break;
23 | list.add(pointer.get(i));
24 | }
25 |
26 | return list;
27 | }
28 |
29 | /**
30 | * Registers FFmpeg codecs and formats
31 | * @throws FFmpegException
32 | */
33 | @Deprecated
34 | public static void register() throws FFmpegException {
35 | // Deprecated, this does nothing the version of FFmpeg we use now
36 | }
37 |
38 | /**
39 | * See: https://ffmpeg.org/pipermail/libav-user/2018-May/011160.html
40 | * @return
41 | */
42 | private static Collection iterate(Function iterateFunction) {
43 | Collection outs = new ArrayList<>();
44 | try (Pointer opaque = new Pointer()) {
45 | T out;
46 | while ((out = iterateFunction.apply(opaque)) != null) {
47 | outs.add(out);
48 | }
49 | }
50 | return Collections.unmodifiableCollection(outs);
51 | }
52 |
53 | private static Collection iterateMuxers() {
54 | return iterate(avformat::av_muxer_iterate);
55 | }
56 |
57 | private static Collection iterateDemuxers() {
58 | return iterate(avformat::av_demuxer_iterate);
59 | }
60 |
61 | private static Collection iterateCodecs() {
62 | return iterate(avcodec::av_codec_iterate);
63 | }
64 |
65 | /**
66 | * Gets an FFmpeg codec instance by name.
67 | * @param name Name of the codec to search for.
68 | * @return static AVCodec reference.
69 | * @throws FFmpegException
70 | */
71 | public static AVCodec getCodecByName(String name) throws FFmpegException {
72 | if (name == null) throw new NullPointerException();
73 |
74 | for (AVCodec currentCodec : iterateCodecs()) {
75 | if (currentCodec.name() == null) continue;
76 | if (currentCodec.name().getString().equalsIgnoreCase(name))
77 | return currentCodec;
78 | }
79 |
80 | throw new FFmpegException("Unknown codec name: " + name);
81 | }
82 |
83 | /**
84 | * Finds an output format by extension
85 | * @param extension Output format extension
86 | * @return static output format reference.
87 | * @throws FFmpegException
88 | */
89 | public static AVOutputFormat getOutputFormatByExtension(String extension) throws FFmpegException {
90 | if (extension == null) throw new NullPointerException();
91 |
92 | for (AVOutputFormat currentFormat : iterateMuxers()) {
93 | if (currentFormat.extensions() == null) continue;
94 | String[] extensions = currentFormat.extensions().getString().split(",");
95 | if (Arrays.stream(extensions).anyMatch(x -> x.equalsIgnoreCase(extension)))
96 | return currentFormat;
97 | }
98 |
99 | throw new FFmpegException("Unknown output format extension: " + extension);
100 | }
101 |
102 | /**
103 | * Finds an input format by extension
104 | * @param extension Input format extension
105 | * @return static input format reference.
106 | * @throws FFmpegException
107 | */
108 | public static AVInputFormat getInputFormatByExtension(String extension) throws FFmpegException {
109 | if (extension == null) throw new NullPointerException();
110 |
111 | for (AVInputFormat currentFormat : iterateDemuxers()) {
112 | if (currentFormat.extensions() == null) continue;
113 | String[] extensions = currentFormat.extensions().getString().split(",");
114 | if (Arrays.stream(extensions).anyMatch(x -> x.equalsIgnoreCase(extension)))
115 | return currentFormat;
116 | }
117 |
118 | throw new FFmpegException("Unknown input format extension: " + extension);
119 | }
120 |
121 | /**
122 | * Finds an output format by name
123 | * @param name Output format name
124 | * @return static output format reference.
125 | * @throws FFmpegException
126 | */
127 | public static AVOutputFormat getOutputFormatByName(String name) throws FFmpegException {
128 | if (name == null) throw new NullPointerException();
129 |
130 | for (AVOutputFormat currentFormat : iterateMuxers()) {
131 | if (currentFormat.name() == null) continue;
132 | if (currentFormat.name().getString().equalsIgnoreCase(name))
133 | return currentFormat;
134 | }
135 |
136 | throw new FFmpegException("Unknown output format name: " + name);
137 | }
138 |
139 | /**
140 | * Finds an input format by name
141 | * @param name Input format name
142 | * @return static output format reference.
143 | * @throws FFmpegException
144 | */
145 | public static AVInputFormat getInputFormatByName(String name) throws FFmpegException {
146 | if (name == null) throw new NullPointerException();
147 |
148 | // Find the input format.
149 | AVInputFormat inputFormat = avformat.av_find_input_format(name);
150 | if (inputFormat == null) throw new FFmpegException("Unknown input format name: " + name);
151 | return inputFormat;
152 | }
153 |
154 | /**
155 | * Finds an output format by MIME type
156 | * @param mimeType Output format MIME type
157 | * @return static output format reference.
158 | * @throws FFmpegException
159 | */
160 | public static AVOutputFormat getOutputFormatByMime(String mimeType) throws FFmpegException {
161 | if (mimeType == null) throw new NullPointerException();
162 |
163 | for (AVOutputFormat currentFormat : iterateMuxers()) {
164 | if (currentFormat.mime_type() == null) continue;
165 | String[] mimeTypes = currentFormat.mime_type().getString().split(",");
166 | if (Arrays.stream(mimeTypes).anyMatch(x -> x.equalsIgnoreCase(mimeType)))
167 | return currentFormat;
168 | }
169 |
170 | throw new FFmpegException("Unknown output format MIME type: " + mimeType);
171 | }
172 |
173 |
174 | /**
175 | * Finds an input format by MIME type
176 | * @param mimeType Input format MIME type
177 | * @return static input format reference.
178 | * @throws FFmpegException
179 | */
180 | public static AVInputFormat getInputFormatByMime(String mimeType) throws FFmpegException {
181 | if (mimeType == null) throw new NullPointerException();
182 |
183 | for (AVInputFormat currentFormat : iterateDemuxers()) {
184 | if (currentFormat.mime_type() == null) continue;
185 | String[] mimeTypes = currentFormat.mime_type().getString().split(",");
186 | if (Arrays.stream(mimeTypes).anyMatch(x -> x.equalsIgnoreCase(mimeType)))
187 | return currentFormat;
188 | }
189 |
190 | throw new FFmpegException("Unknown input format MIME type: " + mimeType);
191 | }
192 |
193 | @Deprecated
194 | public static int guessFFMpegChannelLayout(int channels) throws FFmpegException {
195 | switch (channels) {
196 | case 1:
197 | return (int) avutil.AV_CH_LAYOUT_MONO;
198 | case 2:
199 | return (int) avutil.AV_CH_LAYOUT_STEREO;
200 | case 4:
201 | return (int) avutil.AV_CH_LAYOUT_3POINT1;
202 | case 5:
203 | return (int) avutil.AV_CH_LAYOUT_5POINT0;
204 | case 6:
205 | return (int) avutil.AV_CH_LAYOUT_5POINT1;
206 | case 7:
207 | return (int) avutil.AV_CH_LAYOUT_7POINT0;
208 | case 8:
209 | return (int) avutil.AV_CH_LAYOUT_7POINT1;
210 | default:
211 | throw new FFmpegException("Unsupported channel count: " + channels);
212 | }
213 | }
214 |
215 | /**
216 | * Finds a specific pixel format by name
217 | * @param pixelFormat Pixel format name
218 | * @return static pixel format instance
219 | * @throws FFmpegException
220 | */
221 | public static int getPixelFormatByName(String pixelFormat) throws FFmpegException {
222 | int pix_fmt = avutil.av_get_pix_fmt(pixelFormat);
223 |
224 | if (pix_fmt < 0)
225 | throw new FFmpegException("Unknown pixel format: " + pixelFormat);
226 |
227 | return pix_fmt;
228 | }
229 |
230 | public static double timestampToSeconds(AVRational timebase, long timestamp) {
231 | return (double) timestamp * Rational.fromAVRational(timebase).toDouble();
232 | }
233 | }
234 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/FFmpegError.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 |
4 | import org.bytedeco.ffmpeg.global.*;
5 |
6 | import java.nio.ByteBuffer;
7 |
8 | public class FFmpegError {
9 | public static int checkError(String function, int returnCode) throws FFmpegException {
10 | if (returnCode < 0) {
11 | ByteBuffer byteBuffer = ByteBuffer.allocate(1024);
12 | avutil.av_strerror(returnCode, byteBuffer, 1024);
13 | throw new FFmpegException("@" + Thread.currentThread().getName() + ": ffmpeg/" +
14 | function + ": " + new String(byteBuffer.array()).trim() + " (code=" + returnCode + ")");
15 | }
16 |
17 | return returnCode;
18 | }
19 |
20 | public static int checkErrorMuted(String function, int returnCode) {
21 | if (returnCode < 0) {
22 | ByteBuffer byteBuffer = ByteBuffer.allocate(1024);
23 | avutil.av_strerror(returnCode, byteBuffer, 1024);
24 | System.err.println("@" + Thread.currentThread().getName() + ": ffmpeg/" +
25 | function + ": " + new String(byteBuffer.array()).trim() + " (code=" + returnCode + ")");
26 | }
27 |
28 | return returnCode;
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/FFmpegException.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | public class FFmpegException extends Exception {
4 | public FFmpegException(String cause) {
5 | super(cause);
6 | }
7 | public FFmpegException(Exception cause) {
8 | super(cause);
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/FFmpegIO.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | import com.github.manevolent.ffmpeg4j.stream.output.FFmpegTargetStream;
4 | import org.bytedeco.ffmpeg.avformat.*;
5 | import org.bytedeco.ffmpeg.global.*;
6 | import org.bytedeco.javacpp.*;
7 |
8 | import java.io.*;
9 | import java.nio.channels.ByteChannel;
10 | import java.nio.channels.Channel;
11 | import java.nio.channels.Channels;
12 | import java.nio.channels.SeekableByteChannel;
13 | import java.nio.file.*;
14 | import java.util.*;
15 | import java.util.function.Function;
16 | import java.util.logging.Level;
17 |
18 | // http://www.codeproject.com/Tips/489450/Creating-Custom-FFmpeg-IO-Context
19 | public final class FFmpegIO implements AutoCloseable {
20 | public static final int MAXIMUM_STATES = 128;
21 |
22 | /**
23 | * Default FFmpeg buffer size (used for buffering input from the stream pipes)
24 | */
25 | public static final int DEFAULT_BUFFER_SIZE = 32768;
26 |
27 | /**
28 | * Holds a list of IOStates for the global system.
29 | */
30 | private static int states_in_use = 0;
31 |
32 | public static int getStatesInUse() {
33 | return states_in_use;
34 | }
35 |
36 | private static final IOState[] IO_STATE_REGISTRY = new IOState[MAXIMUM_STATES];
37 |
38 | private static class IOState implements AutoCloseable {
39 | public final AVIOContext context;
40 |
41 | // Handlers for Java-based I/O
42 | public final InputStream inputStream;
43 | public final OutputStream outputStream;
44 |
45 | private final Pointer internalBufferPointer;
46 |
47 | private final int id;
48 |
49 | /**
50 | * While true, this IOState is considered in-use.
51 | */
52 | public boolean open = true;
53 | public Function seek;
54 |
55 | public int num_ops = 0, total = 0;
56 |
57 | public byte[] buffer = null;
58 |
59 | private final Object closeLock = new Object();
60 |
61 | private IOState(int id,
62 | AVIOContext context,
63 | InputStream inputStream,
64 | OutputStream outputStream,
65 | Pointer internalBufferPointer,
66 | Function seek) {
67 | this.id = id;
68 | this.context = context;
69 | this.inputStream = inputStream;
70 | this.outputStream = outputStream;
71 | this.internalBufferPointer = internalBufferPointer;
72 | this.seek = seek;
73 | }
74 |
75 | @Override
76 | public void close() throws Exception {
77 | synchronized (closeLock) {
78 | if (open) {
79 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL,
80 | "closing I/O stream id=" + id
81 | + " num_ops=" + num_ops + " total=" + total
82 | );
83 |
84 | buffer = null;
85 |
86 | if (outputStream != null)
87 | outputStream.close();
88 |
89 | if (inputStream != null)
90 | inputStream.close();
91 |
92 | states_in_use--;
93 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "states in use=" + states_in_use);
94 |
95 | open = false;
96 | }
97 | }
98 | }
99 |
100 | public int getId() {
101 | return id;
102 | }
103 |
104 | public long seek(long position) {
105 | return seek.apply(position);
106 | }
107 | }
108 |
109 | private static final Object ioLock = new Object();
110 |
111 | /**
112 | * AVIOContext holding callbacks
113 | */
114 | private final AVIOContext avioContext;
115 |
116 | /**
117 | * Used to prevent the JVM from destroying the callback objects for this AVIO contxet
118 | */
119 | private final AutoCloseable[] closeables;
120 |
121 | /**
122 | * This native method should never be de-allocated. It finds what IOState the pointer (pointer) means,
123 | * and uses it to read some data. Keep in mind that you can only have one of these native methods per
124 | * callback, so it's up to us to hold a list of states that the native method will use. This method
125 | * cannot have variables outside of it besides static variables.
126 | */
127 | private static final Read_packet_Pointer_BytePointer_int read =
128 | new Read_packet_Pointer_BytePointer_int() {
129 | @Override
130 | public int call(Pointer pointer, BytePointer buffer, int len) {
131 | IntPointer ioStatePtr = new IntPointer(pointer);
132 | int stateId = ioStatePtr.get();
133 |
134 | try {
135 | IOState state = IO_STATE_REGISTRY[stateId];
136 | if (state == null || !state.open) throw new NullPointerException();
137 |
138 | int target = Math.min(len, state.context.buffer_size());
139 |
140 | int pos = 0, read;
141 |
142 | while (pos < target) {
143 | try {
144 | if (state.buffer == null || state.buffer.length < target - pos)
145 | state.buffer = new byte[target - pos];
146 |
147 | read = state.inputStream.read(
148 | state.buffer,
149 | pos,
150 | target - pos
151 | );
152 |
153 | state.num_ops++;
154 | } catch (IOException e) {
155 | Logging.LOGGER.log(Level.WARNING, "Problem in FFmpeg IO read id=" + stateId, e);
156 | read = -1;
157 | }
158 |
159 | if (read < 0) {
160 | if (pos <= 0) {
161 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL,
162 | "EOF in I/O stream id=" +
163 | stateId + ": read=" + read + " pos=" + pos + " target=" + target
164 | );
165 |
166 | return avutil.AVERROR_EOF; // AVERROR_EOF
167 | }
168 | else break; // Still have some data to read
169 | } else {
170 | pos += read;
171 | state.total += read;
172 | }
173 | }
174 |
175 | if (pos > 0)
176 | buffer.position(0).put(state.buffer, 0, pos);
177 |
178 | return pos;
179 | } catch (Throwable e) {
180 | Logging.LOGGER.log(Level.WARNING, "Problem in FFmpeg IO read stream id=" + stateId, e);
181 | return -1;
182 | }
183 | }
184 | };
185 |
186 | private static final Write_packet_Pointer_BytePointer_int write =
187 | new Write_packet_Pointer_BytePointer_int() {
188 | @Override
189 | public int call(org.bytedeco.javacpp.Pointer pointer,
190 | BytePointer buffer,
191 | int len) {
192 | try {
193 | IntPointer ioStatePtr = new IntPointer(pointer);
194 | IOState state = IO_STATE_REGISTRY[ioStatePtr.get()];
195 | if (state == null || !state.open) throw new NullPointerException();
196 |
197 | int to_write = Math.min(len, state.context.buffer_size());
198 |
199 | if (to_write <= 0)
200 | throw new IllegalArgumentException("to_write: " + to_write);
201 |
202 | // Allocate buffer (this was a huge pain in the ass for me, by the way. allocate it...)
203 | // otherwise we'll cause a SIGSEV crash in buffer.get below
204 | if (state.buffer == null || state.buffer.length < to_write)
205 | state.buffer = new byte[to_write];
206 |
207 | buffer.get(state.buffer, 0, to_write);
208 |
209 | state.outputStream.write(state.buffer, 0, to_write);
210 |
211 | state.num_ops ++;
212 | state.total += to_write;
213 |
214 | return to_write;
215 | } catch (Throwable e) {
216 | Logging.LOGGER.log(Level.WARNING, "problem in FFmpeg IO write", e);
217 | return -1;
218 | }
219 | }
220 | };
221 |
222 |
223 | private static final Seek_Pointer_long_int seek =
224 | new Seek_Pointer_long_int() {
225 | @Override
226 | public long call(org.bytedeco.javacpp.Pointer pointer,
227 | long position,
228 | int whence) {
229 | switch (whence) {
230 | case 0:
231 | break;
232 | case avformat.AVSEEK_SIZE:
233 | /**
234 | * Passing this as the "whence" parameter to a seek function causes it to
235 | * return the filesize without seeking anywhere. Supporting this is optional.
236 | * If it is not supported then the seek function will return <0.
237 | */
238 | // Not supported
239 | return -1;
240 | case avformat.AVSEEK_FORCE:
241 | /**
242 | * Oring this flag as into the "whence" parameter to a seek function causes it to
243 | * seek by any means (like reopening and linear reading) or other normally unreasonable
244 | * means that can be extremely slow.
245 | * This may be ignored by the seek code.
246 | */
247 | // Ignore
248 | break;
249 | }
250 |
251 | try {
252 | IntPointer ioStatePtr = new IntPointer(pointer);
253 | IOState state = IO_STATE_REGISTRY[ioStatePtr.get()];
254 | if (state == null || !state.open) throw new NullPointerException();
255 |
256 | if (state.seek == null) {
257 | return -1;
258 | }
259 |
260 | return state.seek(position);
261 | } catch (Throwable e) {
262 | Logging.LOGGER.log(Level.WARNING, "problem in FFmpeg IO write", e);
263 | return -1;
264 | }
265 | }
266 | };
267 |
268 |
269 | public FFmpegIO(AVIOContext avioContext, AutoCloseable... autoCloseables) {
270 | this.avioContext = avioContext;
271 | this.closeables = autoCloseables;
272 | }
273 |
274 | private static void setIOState(int id, IOState state) {
275 | if (state != null && state.open) {
276 | states_in_use++;
277 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "states in use=" + states_in_use);
278 | }
279 |
280 | IO_STATE_REGISTRY[id] = state;
281 | }
282 |
283 | /**
284 | * Finds the next free IOState.
285 | * @return
286 | * @throws FFmpegException
287 | */
288 | private static int allocateIOStateId() throws FFmpegException {
289 | int ioStateId = -1; // -1 if no iostate is available
290 |
291 | if (states_in_use >= MAXIMUM_STATES)
292 | throw new FFmpegException("no I/O states are available " +
293 | "(current=" + states_in_use + " max=" + MAXIMUM_STATES + ").");
294 |
295 | IOState state;
296 | for (int newIOStateId = 0; newIOStateId < IO_STATE_REGISTRY.length; newIOStateId ++) {
297 | state = IO_STATE_REGISTRY[newIOStateId];
298 |
299 | if (state == null) {
300 | ioStateId = newIOStateId;
301 | break;
302 | } else if (!state.open) {
303 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "re-claiming IO state id=" + newIOStateId);
304 | ioStateId = newIOStateId;
305 | break;
306 | }
307 | }
308 |
309 | if (ioStateId < 0)
310 | throw new FFmpegException("failed to allocate I/O state; are none available? " +
311 | "(current=" + states_in_use + " max=" + MAXIMUM_STATES + ").");
312 |
313 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "opened I/O state id=" + ioStateId);
314 | return ioStateId;
315 | }
316 |
317 | public static FFmpegInput openInput(File file) throws IOException, FFmpegException {
318 | return openInputStream(Files.newInputStream(file.toPath()), DEFAULT_BUFFER_SIZE);
319 | }
320 |
321 | public static FFmpegInput openInput(File file, int bufferSize) throws IOException, FFmpegException {
322 | return openInputStream(Files.newInputStream(file.toPath()), bufferSize);
323 | }
324 |
325 | public static FFmpegInput openInputStream(final InputStream _inputStream) throws FFmpegException {
326 | return openInputStream(_inputStream, DEFAULT_BUFFER_SIZE);
327 | }
328 |
329 | /**
330 | * Opens a custom AVIOContext based around the managed InputStream proved.
331 | * @param _inputStream InputStream instance to have FFmpeg read from.
332 | * @param bufferSize buffer size of the input.
333 | * @return FFmpegSource instance which points to the input stream provided.
334 | */
335 | public static FFmpegInput openInputStream(final InputStream _inputStream, final int bufferSize)
336 | throws FFmpegException {
337 | Objects.requireNonNull(_inputStream, "Input stream cannot be null");
338 |
339 | synchronized (ioLock) {
340 | // Lock an IOSTATE
341 | int ioStateId = allocateIOStateId();
342 |
343 | // Open the underlying AVIOContext.
344 | Pointer internalBufferPointer = avutil.av_malloc(bufferSize);
345 |
346 | final AVIOContext context = avformat.avio_alloc_context(
347 | new BytePointer(internalBufferPointer).capacity(bufferSize), bufferSize, // internal Buffer and its size
348 | 0,
349 | null,
350 | read,
351 | null,
352 | seek
353 | );
354 |
355 | //Returns Allocated AVIOContext or NULL on failure.
356 | if (context == null) throw new NullPointerException();
357 |
358 | context.seekable(0);
359 |
360 | IntPointer intPointer = new IntPointer(avutil.av_malloc(4));
361 | intPointer.put(ioStateId);
362 |
363 | context.opaque(intPointer);
364 | context.write_flag(0);
365 |
366 | IOState state = new IOState(ioStateId, context, _inputStream, null, internalBufferPointer, null);
367 | setIOState(ioStateId, state);
368 |
369 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "opened input state id=" + ioStateId);
370 | return new FFmpegInput(new FFmpegIO(context, state));
371 | }
372 | }
373 |
374 | public static FFmpegOutput openOutput(File file, int bufferSize) throws IOException, FFmpegException {
375 | return openOutputStream(Files.newOutputStream(file.toPath()), bufferSize);
376 | }
377 |
378 | public static FFmpegOutput openOutput(File file) throws IOException, FFmpegException {
379 | return openOutputStream(Files.newOutputStream(file.toPath()), FFmpegIO.DEFAULT_BUFFER_SIZE);
380 | }
381 |
382 | public static FFmpegOutput openOutputStream(final OutputStream _outputStream)
383 | throws FFmpegException {
384 | return openOutputStream(_outputStream, DEFAULT_BUFFER_SIZE);
385 | }
386 |
387 | /**
388 | * Opens a custom AVIOContext based around the managed OutputStream provided.
389 | * @param _outputStream OutputStream instance to have FFmpeg read from.
390 | * @param bufferSize buffer size of the input.
391 | * @return FFmpegSource instance which points to the input stream provided.
392 | */
393 | public static FFmpegOutput openOutputStream(final OutputStream _outputStream, final int bufferSize)
394 | throws FFmpegException {
395 | synchronized (ioLock) {
396 | // Lock an IOSTATE
397 | int ioStateId = allocateIOStateId();
398 |
399 | // Open the underlying AVIOContext.
400 | Pointer internalBufferPointer = avutil.av_malloc(bufferSize); // sizeof() == 1 here
401 |
402 | final AVIOContext context = avformat.avio_alloc_context(
403 | new BytePointer(internalBufferPointer), bufferSize, // internal Buffer and its size
404 | 1, // write_flag
405 | null,
406 | null,
407 | write,
408 | seek
409 | );
410 |
411 | //Returns Allocated AVIOContext or NULL on failure.
412 | if (context == null) throw new NullPointerException();
413 |
414 | context.seekable(0);
415 |
416 | IntPointer intPointer = new IntPointer(avutil.av_malloc(4));
417 | intPointer.put(ioStateId);
418 |
419 | context.opaque(intPointer);
420 | context.write_flag(1);
421 |
422 | IOState state = new IOState(ioStateId, context, null, _outputStream, internalBufferPointer, null);
423 | setIOState(ioStateId, state);
424 |
425 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "opened output state id=" + ioStateId);
426 | return new FFmpegOutput(new FFmpegIO(context, state));
427 | }
428 | }
429 |
430 | public static FFmpegIO openChannel(final SeekableByteChannel channel) throws FFmpegException {
431 | return openChannel(channel, DEFAULT_BUFFER_SIZE);
432 | }
433 |
434 | /**
435 | * Opens a custom AVIOContext based around the managed channel provided. This context will be seekable.
436 | * @param channel Channel instance to have FFmpeg read from.
437 | * @param bufferSize buffer size of the input.
438 | * @return FFmpegSource instance which points to the input stream provided.
439 | */
440 | public static FFmpegIO openChannel(final SeekableByteChannel channel, final int bufferSize)
441 | throws FFmpegException {
442 | synchronized (ioLock) {
443 | // Lock an IOSTATE
444 | int ioStateId = allocateIOStateId();
445 |
446 | // Open the underlying AVIOContext.
447 | Pointer internalBufferPointer = avutil.av_malloc(bufferSize); // sizeof() == 1 here
448 |
449 | final AVIOContext context = avformat.avio_alloc_context(
450 | new BytePointer(internalBufferPointer), bufferSize, // internal Buffer and its size
451 | 1, // write_flag
452 | null,
453 | read,
454 | write,
455 | seek
456 | );
457 |
458 | //Returns Allocated AVIOContext or NULL on failure.
459 | if (context == null) throw new NullPointerException();
460 |
461 | context.seekable(1);
462 |
463 | IntPointer intPointer = new IntPointer(avutil.av_malloc(4));
464 | intPointer.put(ioStateId);
465 |
466 | context.opaque(intPointer);
467 | context.write_flag(1);
468 |
469 | IOState state = new IOState(ioStateId, context,
470 | Channels.newInputStream(channel),
471 | Channels.newOutputStream(channel),
472 | internalBufferPointer,
473 | (pos) -> {
474 | try {
475 | channel.position(pos);
476 | return channel.position();
477 | } catch (IOException e) {
478 | return -1L;
479 | }
480 | }
481 | );
482 |
483 | setIOState(ioStateId, state);
484 |
485 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "opened output state id=" + ioStateId);
486 | return new FFmpegIO(context, state);
487 | }
488 | }
489 |
490 | public static FFmpegOutput openNativeUrlOutput(String path) {
491 | AVIOContext context = new AVIOContext();
492 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "open native output stream: " + path + "...");
493 | avformat.avio_open(context, path, avformat.AVIO_FLAG_WRITE);
494 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "opened native output stream: " + path + ".");
495 | return new FFmpegOutput(new FFmpegIO(context, (Closeable) () -> avformat.avio_close(context)));
496 | }
497 |
498 | public static FFmpegInput openNativeUrlInput(String path) {
499 | AVIOContext context = new AVIOContext();
500 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "open native input stream: " + path + "...");
501 | avformat.avio_open(context, path, avformat.AVIO_FLAG_READ);
502 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "opened native input stream: " + path + ".");
503 | return new FFmpegInput(new FFmpegIO(context));
504 | }
505 |
506 | public FFmpegInput asInput() {
507 | return new FFmpegInput(this);
508 | }
509 |
510 | public FFmpegOutput asOutput() {
511 | return new FFmpegOutput(this);
512 | }
513 |
514 |
515 | public AVIOContext getContext() {
516 | return avioContext;
517 | }
518 |
519 | @Override
520 | public void close() throws Exception {
521 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegIO.close() called");
522 |
523 | for (AutoCloseable closeable : closeables)
524 | if (closeable != null) {
525 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "closeable.close()... (" + closeable.toString() + ")");
526 | closeable.close();
527 | }
528 |
529 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegIO.close() completed");
530 | }
531 |
532 | public static FFmpegIO createNullIO() {
533 | AVIOContext context = new AVIOContext();
534 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "creating null I/O...");
535 | return new FFmpegIO(context, null, null);
536 | }
537 | }
538 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/FFmpegInput.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | import com.github.manevolent.ffmpeg4j.stream.FFmpegFormatContext;
4 | import com.github.manevolent.ffmpeg4j.stream.source.FFmpegSourceStream;
5 | import org.bytedeco.ffmpeg.avcodec.*;
6 | import org.bytedeco.ffmpeg.avformat.*;
7 | import org.bytedeco.ffmpeg.avutil.*;
8 | import org.bytedeco.ffmpeg.global.*;
9 |
10 | import java.io.InputStream;
11 | import java.nio.channels.Channel;
12 | import java.nio.channels.SeekableByteChannel;
13 | import java.util.logging.Level;
14 |
15 | /**
16 | * Represents the native input functionality for FFmpeg, at the container level (mp4, flv, etc).
17 | */
18 | public class FFmpegInput implements AutoCloseable, FFmpegFormatContext {
19 | private static final Object openLock = new Object();
20 | private final AVFormatContext formatContext;
21 | private final FFmpegIO io;
22 | private volatile boolean opened = false;
23 |
24 | private final Object closeLock = new Object();
25 | private boolean closed = false;
26 |
27 | public FFmpegInput(FFmpegIO io) {
28 | this.formatContext = avformat.avformat_alloc_context();
29 | if (this.formatContext == null) throw new NullPointerException();
30 |
31 | this.io = io;
32 | setAVIOContext(io.getContext());
33 | }
34 |
35 | public int substreamCount() {
36 | return formatContext.nb_streams();
37 | }
38 |
39 | public AVFormatContext getContext() {
40 | return formatContext;
41 | }
42 |
43 | private void setAVIOContext(AVIOContext context) {
44 | enableCustomIO();
45 | this.formatContext.pb(context);
46 | }
47 |
48 | private void enableCustomIO() {
49 | setFlag(FFmpegFormatContext.AVFormatFlag.AVFMT_FLAG_CUSTOM_IO, true);
50 | }
51 |
52 | /**
53 | * Opens the input for the format.
54 | * @param format Container or raw format name ("flv", "mp4", etc.)
55 | * @throws RuntimeException
56 | */
57 | public FFmpegSourceStream open(String format) throws FFmpegException {
58 | return open(FFmpeg.getInputFormatByName(format));
59 | }
60 |
61 | /**
62 | * Opens the input for the format.
63 | * @param inputFormat Input format context
64 | * @throws RuntimeException
65 | */
66 | public FFmpegSourceStream open(AVInputFormat inputFormat) throws FFmpegException {
67 | // Open the input format.
68 | FFmpegError.checkError("avformat_open_input",
69 | avformat.avformat_open_input(
70 | formatContext,
71 | (String)null,
72 | inputFormat,
73 | null
74 | )
75 | );
76 |
77 | opened = true;
78 |
79 | // Important: find & initialize stream information.
80 | // Contrary to belief, it doesn't seek. It just buffers up packets.
81 | FFmpegError.checkError(
82 | "avformat_find_stream_info",
83 | avformat.avformat_find_stream_info(formatContext, (AVDictionary) null)
84 | );
85 |
86 | //avformat.av_dump_format(formatContext, 0, "", 0);
87 |
88 | return new FFmpegSourceStream(this);
89 | }
90 |
91 | /**
92 | * Registers a stream.
93 | * @param stream_index stream index to register (0-indexed)
94 | * @return true if the stream was registered, false otherwise.
95 | */
96 | public boolean registerStream(FFmpegSourceStream sourceStream, int stream_index)
97 | throws FFmpegException {
98 | if (stream_index < 0)
99 | return false;
100 |
101 | // Find the stream in the format.
102 | AVStream stream = formatContext.streams(stream_index);
103 |
104 | // Find the codec ID of the stream.
105 | int codecId = stream.codecpar().codec_id();
106 | FFmpegError.checkError("codec_id", codecId);
107 |
108 | // Find the decoder based on the codec ID of the stream.
109 | AVCodec codec = avcodec.avcodec_find_decoder(codecId);
110 | if (codec == null) {
111 | Logging.LOGGER.log(Level.FINE,
112 | "registerStream/avcodec_find_decoder: no decoder for codec id=" + codecId + " for stream index="
113 | + stream_index + "."
114 | );
115 |
116 | return false;
117 | }
118 |
119 | // Open the codec. This was very tricky in the conceptual development phase, because I did not realize that
120 | // the 'ctx' object needs to have parameters (at least for HVC/264 video). It seems that these parameters are
121 | // available by retrieving the pointer for the AVStream's AVCodecContext (see ctx = stream.codec(); above).
122 | synchronized (openLock) { // avcodec_open2 is not thread-safe apparently.
123 | AVCodecContext codecContext = avcodec.avcodec_alloc_context3(codec);
124 |
125 | // https://stackoverflow.com/questions/9652760/how-to-set-decode-pixel-format-in-libavcodec
126 | // P.S. the place to stick in the overriding callback would be before the
127 | // avcodec_open. Mind you, it's been a while since I looked at this stuff.
128 | codecContext.get_format(sourceStream.getGet_format_callback());
129 |
130 | // Copy over codec parameters to the context, this is required in some cases
131 | avcodec.avcodec_parameters_to_context(codecContext, formatContext.streams(stream_index).codecpar());
132 |
133 | FFmpegError.checkError("avcodec_open2", avcodec.avcodec_open2(codecContext, codec, (AVDictionary) null));
134 | }
135 |
136 | // Assign the stream to the substream.
137 | sourceStream.registerSubstream(stream_index, stream);
138 |
139 | return true;
140 | }
141 |
142 | /**
143 | * true if the context has opened (see open(String)).
144 | */
145 | public boolean isOpened() {
146 | return opened;
147 | }
148 |
149 | /**
150 | * Releases native resources held by this format context.
151 | * @throws Exception
152 | */
153 | public void close() throws Exception {
154 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegInput.close() called");
155 |
156 | synchronized (closeLock) {
157 | if (!closed) {
158 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "io.close()...");
159 | io.close();
160 | closed = true;
161 | } else {
162 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegInput already closed!");
163 | }
164 | }
165 |
166 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegInput.close() completed");
167 | }
168 |
169 | @Override
170 | public AVFormatContext getFormatContext() {
171 | return formatContext;
172 | }
173 |
174 | }
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/FFmpegOutput.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | import com.github.manevolent.ffmpeg4j.stream.FFmpegFormatContext;
4 | import com.github.manevolent.ffmpeg4j.stream.output.FFmpegTargetStream;
5 | import com.github.manevolent.ffmpeg4j.stream.source.FFmpegSourceStream;
6 | import org.bytedeco.ffmpeg.avcodec.AVCodec;
7 | import org.bytedeco.ffmpeg.avcodec.AVCodecContext;
8 | import org.bytedeco.ffmpeg.avformat.*;
9 | import org.bytedeco.ffmpeg.avutil.AVDictionary;
10 | import org.bytedeco.ffmpeg.global.avcodec;
11 | import org.bytedeco.ffmpeg.global.avformat;
12 |
13 | import java.io.InputStream;
14 | import java.io.OutputStream;
15 | import java.nio.channels.SeekableByteChannel;
16 | import java.util.logging.Level;
17 |
18 | /**
19 | * Represents the native input functionality for FFmpeg, at the container level (mp4, flv, etc).
20 | */
21 | public class FFmpegOutput implements AutoCloseable, FFmpegFormatContext {
22 | private static final Object openLock = new Object();
23 | private final AVFormatContext formatContext;
24 | private final FFmpegIO io;
25 | private volatile boolean opened = false;
26 |
27 | private final Object closeLock = new Object();
28 | private boolean closed = false;
29 |
30 | public FFmpegOutput(FFmpegIO io) {
31 | this.formatContext = avformat.avformat_alloc_context();
32 | if (this.formatContext == null) throw new NullPointerException();
33 |
34 | this.io = io;
35 | setAVIOContext(io.getContext());
36 | }
37 |
38 | public int substreamCount() {
39 | return formatContext.nb_streams();
40 | }
41 |
42 | public AVFormatContext getContext() {
43 | return formatContext;
44 | }
45 |
46 | private void setAVIOContext(AVIOContext context) {
47 | enableCustomIO();
48 | this.formatContext.pb(context);
49 | }
50 |
51 | private void enableCustomIO() {
52 | setFlag(AVFormatFlag.AVFMT_FLAG_CUSTOM_IO, true);
53 | }
54 |
55 | /**
56 | * Opens the output for the format.
57 | * @param formatName Container or raw format name ("flv", "mp4", etc.)
58 | * @throws RuntimeException
59 | */
60 | public FFmpegTargetStream open(String formatName) throws FFmpegException {
61 | return open(FFmpeg.getOutputFormatByName(formatName));
62 | }
63 |
64 | /**
65 | * Opens the output for the format.
66 | * @param outputFormat Input format context
67 | * @throws RuntimeException
68 | */
69 | public FFmpegTargetStream open(AVOutputFormat outputFormat) throws FFmpegException {
70 | // Open the input format.
71 | FFmpegError.checkError(
72 | "avformat_alloc_output_context2",
73 | avformat.avformat_alloc_output_context2(
74 | formatContext, // is set to the created format context, or to NULL in case of failure
75 | (AVOutputFormat) null, // format to use for allocating the context, if NULL format_name and filename are used instead
76 | outputFormat.name(), // the name of output format to use for allocating the context, if NULL filename is used instead
77 | null // the name of the filename to use for allocating the context, may be NULL
78 | )
79 | );
80 |
81 | opened = true;
82 |
83 | return new FFmpegTargetStream(formatContext, io, new FFmpegTargetStream.FFmpegNativeOutput());
84 | }
85 |
86 | /**
87 | * true if the context has opened (see open(String)).
88 | */
89 | public boolean isOpened() {
90 | return opened;
91 | }
92 |
93 | /**
94 | * Releases native resources held by this format context.
95 | * @throws Exception
96 | */
97 | public void close() throws Exception {
98 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegInput.close() called");
99 |
100 | synchronized (closeLock) {
101 | if (!closed) {
102 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "io.close()...");
103 | io.close();
104 | closed = true;
105 | } else {
106 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegInput already closed!");
107 | }
108 | }
109 |
110 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegInput.close() completed");
111 | }
112 |
113 | @Override
114 | public AVFormatContext getFormatContext() {
115 | return formatContext;
116 | }
117 |
118 | }
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/FFmpegStreamContext.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | import org.bytedeco.ffmpeg.avcodec.AVCodecContext;
4 | import org.bytedeco.ffmpeg.avformat.AVStream;
5 | import org.bytedeco.ffmpeg.global.avcodec;
6 |
7 | public interface FFmpegStreamContext {
8 |
9 | AVCodecContext getCodecContext();
10 |
11 | AVStream getStream();
12 |
13 | default void copyParameters(FFmpegStreamContext target) throws FFmpegException {
14 | int ret = avcodec.avcodec_parameters_copy(getStream().codecpar(), target.getStream().codecpar());
15 | FFmpegError.checkError("avcodec_parameters_copy", ret);
16 | }
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/Logging.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | import java.util.logging.Level;
4 | import java.util.logging.Logger;
5 |
6 | public class Logging {
7 | public static Level DEBUG_LOG_LEVEL = Level.FINEST;
8 | public static final Logger LOGGER = Logger.getLogger("Media");
9 |
10 | static {
11 | LOGGER.setParent(Logger.getGlobal());
12 | LOGGER.setUseParentHandlers(true);
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/MediaFrame.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | public abstract class MediaFrame {
4 | private final double timestamp;
5 | private final double position;
6 | private final double time;
7 |
8 | protected MediaFrame(double position, double time, double timestamp) {
9 | this.position = position;
10 | this.timestamp = timestamp;
11 | this.time = time;
12 | }
13 |
14 | /**
15 | * Gets the representation of how where this frame is positioned in the stream.
16 | */
17 | public double getPosition() {
18 | return position;
19 | }
20 |
21 | /**
22 | * Gets the representation of how long this frame is.
23 | */
24 | public double getTime() {
25 | return time;
26 | }
27 |
28 | /**
29 | * Gets an encoding timestamp for this frame.
30 | * @return Timestamp.
31 | */
32 | public double getTimestamp() {
33 | return timestamp;
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/MediaStream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | public abstract class MediaStream implements AutoCloseable {
4 | private volatile double position = 0D;
5 |
6 | /**
7 | * Sets the stream's position in seconds. Called by decoders.
8 | * @param position Position.
9 | */
10 | protected final void setPosition(double position) {
11 | this.position = position;
12 | }
13 |
14 | /**
15 | * Gets the stream's position in seconds, based on the last read packet.
16 | * @return Stream position.
17 | */
18 | public final double getPosition() {
19 | return this.position;
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/MediaType.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | public enum MediaType {
4 | AUDIO,
5 | VIDEO
6 | }
7 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/VideoFormat.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | import org.bytedeco.ffmpeg.avcodec.*;
4 | import org.bytedeco.ffmpeg.global.*;
5 | import org.bytedeco.javacpp.IntPointer;
6 |
7 | import java.nio.*;
8 | import java.util.Collection;
9 | import java.util.List;
10 | import java.util.stream.Collectors;
11 |
12 | public class VideoFormat {
13 | private final int width, height;
14 | private final double framesPerSecond;
15 |
16 | public VideoFormat(int width, int height, double framesPerSecond) {
17 | this.width = width;
18 | this.height = height;
19 | this.framesPerSecond = framesPerSecond;
20 | }
21 |
22 | public int getWidth() {
23 | return width;
24 | }
25 |
26 | public int getHeight() {
27 | return height;
28 | }
29 |
30 | /**
31 | * Fractional representation of frames per second (seconds per frame basically)
32 | * @return 1 / FPS
33 | */
34 | public double getFramesPerSecond() {
35 | return framesPerSecond;
36 | }
37 |
38 | public double getAspectRatio() {
39 | return (double) getWidth() / (double) getHeight();
40 | }
41 |
42 | @Override
43 | public String toString() {
44 | return Integer.toString(width) + "x" + Integer.toString(height) + " @" + (Double.toString(framesPerSecond)) + "FPS";
45 | }
46 |
47 | public static int getBestPixelFormat(String codecName, int pixelFormat) {
48 | AVCodec codec = avcodec.avcodec_find_encoder_by_name(codecName);
49 | if (codec == null) throw new RuntimeException(codecName);
50 | return getBestPixelFormat(codec, pixelFormat);
51 | }
52 |
53 | public static int getBestPixelFormat(AVCodec codec, int pixelFormat) {
54 | Collection formats = FFmpeg.readPointer(codec.pix_fmts());
55 |
56 | /*
57 | [in] dst_pix_fmt1 One of the two destination pixel formats to choose from
58 | [in] dst_pix_fmt2 The other of the two destination pixel formats to choose from
59 | [in] src_pix_fmt Source pixel format
60 | [in] has_alpha Whether the source pixel format alpha channel is used.
61 | [in,out] loss_ptr Combination of loss flags. In: selects which of the losses to ignore, i.e.
62 | NULL or value of zero means we care about all losses.
63 | Out: the loss that occurs when converting from src to selected dst pixel format.
64 | */
65 |
66 | int best_format = org.bytedeco.ffmpeg.global.avcodec.avcodec_find_best_pix_fmt_of_list(
67 | codec.pix_fmts(),
68 | pixelFormat,
69 | 0,
70 | (IntPointer) null
71 | );
72 |
73 | return best_format;
74 | }
75 |
76 | public static int getBestPixelFormat(AVCodec a, AVCodec b, int pixelFormat) {
77 | Collection a_formats = FFmpeg.readPointer(a.pix_fmts());
78 | Collection b_formats = FFmpeg.readPointer(b.pix_fmts());
79 |
80 | IntBuffer buffer = IntBuffer.allocate(Math.min(a_formats.size(), b_formats.size()));
81 | a_formats.stream().filter(x -> b_formats.stream().anyMatch(y -> y.intValue() == x.intValue())).forEach(buffer::put);
82 |
83 | /*
84 | [in] dst_pix_fmt1 One of the two destination pixel formats to choose from
85 | [in] dst_pix_fmt2 The other of the two destination pixel formats to choose from
86 | [in] src_pix_fmt Source pixel format
87 | [in] has_alpha Whether the source pixel format alpha channel is used.
88 | [in,out] loss_ptr Combination of loss flags. In: selects which of the losses to ignore, i.e.
89 | NULL or value of zero means we care about all losses.
90 | Out: the loss that occurs when converting from src to selected dst pixel format.
91 | */
92 |
93 | int best_format = org.bytedeco.ffmpeg.global.avcodec.avcodec_find_best_pix_fmt_of_list(
94 | buffer,
95 | pixelFormat,
96 | 0,
97 | null
98 | );
99 |
100 | return best_format;
101 | }
102 |
103 | public static int getBestPixelFormat(String encoderName, String decoderName, int pixelFormat) {
104 | AVCodec encoder = avcodec.avcodec_find_encoder_by_name(encoderName);
105 | if (encoder == null) throw new RuntimeException("unrecognized video encoder: " + encoderName);
106 |
107 | AVCodec decoder = avcodec.avcodec_find_decoder_by_name(decoderName);
108 | if (decoder == null) throw new RuntimeException("unrecognized video decoder: " + decoderName);
109 |
110 | return getBestPixelFormat(encoder, decoder, pixelFormat);
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/VideoFrame.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j;
2 |
3 | public class VideoFrame extends MediaFrame {
4 | //private static final int FIELDS = 3;
5 | private final int format;
6 | private final int width, height;
7 | private final byte[] data;
8 |
9 | public VideoFrame(double timestamp, double position, double time,
10 | int format, int width, int height, byte[] frameData) {
11 | super(position, time, timestamp);
12 |
13 | this.format = format;
14 |
15 | this.width = width;
16 | this.height = height;
17 | this.data = frameData;
18 | }
19 |
20 | /**private int addr(int x, int y, int offs) {
21 | return (y * getWidth() * FIELDS) + (x * FIELDS) + offs;
22 | }
23 |
24 | public byte getR(int x, int y) {
25 | return data[addr(x, y, 0)];
26 | }
27 |
28 | public byte getG(int x, int y) {
29 | return data[addr(x, y, 1)];
30 | }
31 |
32 | public byte getB(int x, int y) {
33 | return data[addr(x, y, 2)];
34 | }
35 |
36 | public void setR(int x, int y, byte r) {
37 | data[addr(x, y, 0)] = r;
38 | }
39 |
40 | public void setG(int x, int y, byte g) {
41 | data[addr(x, y, 1)] = g;
42 | }
43 |
44 | public void setB(int x, int y, byte b) {
45 | data[addr(x, y, 2)] = b;
46 | }
47 |
48 | public int getRGB(int x, int y) {
49 | int basePosition = (y * getWidth() * FIELDS) + (x * FIELDS);
50 | int argb = 0xFF;
51 | argb = (argb << 0) | (data[basePosition] & 0xFF);
52 | argb = (argb << 8) | (data[basePosition+1] & 0xFF);
53 | argb = (argb << 16) | (data[basePosition+2] & 0xFF);
54 |
55 | return argb;
56 | }
57 |
58 | public int setRGB(int x, int y, int rgb) {
59 | int basePosition = (y * getWidth() * FIELDS) + (x * FIELDS);
60 | data[basePosition] = (byte) (0xFF & ( rgb >> 16));
61 | data[basePosition+1] = (byte) (0xFF & (rgb >> 8 ));
62 | data[basePosition+2] = (byte) (0xFF & (rgb >> 0 ));
63 | return rgb;
64 | }**/
65 |
66 | public byte[] getData() {
67 | return data;
68 | }
69 |
70 | public int getWidth() {
71 | return width;
72 | }
73 |
74 | public int getHeight() {
75 | return height;
76 | }
77 |
78 | /**
79 | * Gets the FFmpeg native pixel format for this frame.
80 | * @return pixel format.
81 | */
82 | public int getFormat() {
83 | return format;
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/MediaFilter.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter;
2 |
3 | import com.github.manevolent.ffmpeg4j.MediaFrame;
4 |
5 | import java.util.Collection;
6 | import java.util.Collections;
7 |
8 | public abstract class MediaFilter implements AutoCloseable {
9 | public abstract Collection apply(T source);
10 |
11 | public Collection flush() {
12 | return Collections.emptyList();
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/MediaFilterChain.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter;
2 |
3 | import com.github.manevolent.ffmpeg4j.MediaFrame;
4 |
5 | import java.util.Collection;
6 | import java.util.Collections;
7 | import java.util.LinkedList;
8 | import java.util.List;
9 |
10 | public abstract class MediaFilterChain extends MediaFilter {
11 | private final List> filters;
12 |
13 | protected MediaFilterChain(Collection> filters) {
14 | this.filters = new LinkedList<>(filters);
15 | }
16 |
17 | @Override
18 | public Collection apply(T source) {
19 | Collection frames = Collections.singleton(source);
20 | for (MediaFilter filter : filters) {
21 | Collection newCollection = new LinkedList<>();
22 | for (T frame : frames) newCollection.addAll(filter.apply(frame));
23 | frames = newCollection;
24 | }
25 | return frames;
26 | }
27 |
28 | @Override
29 | public Collection flush() {
30 | Collection frames = Collections.emptyList();
31 | for (MediaFilter filter : filters) {
32 | Collection newCollection = new LinkedList<>();
33 | for (T frame : frames) newCollection.addAll(filter.apply(frame));
34 | newCollection.addAll(filter.flush());
35 | frames = newCollection;
36 | }
37 | return frames;
38 | }
39 |
40 | @Override
41 | public void close() throws Exception {
42 | // Do nothing
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/audio/AudioFilter.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter.audio;
2 |
3 | import com.github.manevolent.ffmpeg4j.AudioFrame;
4 | import com.github.manevolent.ffmpeg4j.filter.MediaFilter;
5 |
6 | public abstract class AudioFilter extends MediaFilter {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/audio/AudioFilterNone.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter.audio;
2 |
3 | import com.github.manevolent.ffmpeg4j.AudioFrame;
4 |
5 | import java.util.Collection;
6 | import java.util.Collections;
7 |
8 | public class AudioFilterNone extends AudioFilter {
9 | @Override
10 | public Collection apply(AudioFrame source) {
11 | return Collections.singletonList(source);
12 | }
13 |
14 | @Override
15 | public void close() throws Exception {
16 |
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/audio/FFmpegAudioResampleFilter.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter.audio;
2 |
3 | import com.github.manevolent.ffmpeg4j.*;
4 | import org.bytedeco.ffmpeg.global.*;
5 | import org.bytedeco.ffmpeg.swresample.*;
6 | import org.bytedeco.javacpp.*;
7 |
8 | import java.nio.ByteBuffer;
9 | import java.nio.ByteOrder;
10 | import java.nio.FloatBuffer;
11 | import java.util.Collection;
12 | import java.util.Collections;
13 |
14 | public class FFmpegAudioResampleFilter extends AudioFilter {
15 | private static final int SAMPLE_FORMAT = avutil.AV_SAMPLE_FMT_FLT;
16 | public static final int DEFAULT_BUFFER_SIZE = 16 * 1024;
17 | private volatile SwrContext swrContext;
18 |
19 | private final AudioFormat input, output;
20 |
21 | private final BytePointer[] samples_in;
22 | private final BytePointer[] samples_out;
23 | private final PointerPointer samples_in_ptr;
24 | private final PointerPointer samples_out_ptr;
25 |
26 | private final int outputBytesPerSample, inputBytesPerSample;
27 | private final int inputPlanes, outputPlanes;
28 |
29 | private ByteBuffer presampleOutputBuffer = ByteBuffer.allocate(0);
30 | private byte[] recvBuffer;
31 |
32 | public FFmpegAudioResampleFilter(AudioFormat input, AudioFormat output, int bufferSize) throws FFmpegException {
33 | this.input = input;
34 | this.output = output;
35 |
36 | if (input.getChannels() <= 0)
37 | throw new FFmpegException("invalid input channel count: " + input.getChannels());
38 |
39 | if (output.getChannels() <= 0)
40 | throw new FFmpegException("invalid output channel count: " + output.getChannels());
41 |
42 | try {
43 | // Configure input parameters
44 | int ffmpegInputFormat = SAMPLE_FORMAT;
45 | int inputChannels = input.getChannels();
46 | inputPlanes = avutil.av_sample_fmt_is_planar(ffmpegInputFormat) != 0 ? inputChannels : 1;
47 | int inputSampleRate = (int)input.getSampleRate();
48 | inputBytesPerSample = avutil.av_get_bytes_per_sample(ffmpegInputFormat);
49 | int inputFrameSize = bufferSize * (input.getChannels() / inputPlanes) * inputBytesPerSample; // x4 for float datatype
50 |
51 | // Configure output parameters
52 | int ffmpegOutputFormat = SAMPLE_FORMAT;
53 | int outputChannels = output.getChannels();
54 | outputPlanes = avutil.av_sample_fmt_is_planar(ffmpegOutputFormat) != 0 ? outputChannels : 1;
55 | int outputSampleRate = (int)output.getSampleRate();
56 | outputBytesPerSample = avutil.av_get_bytes_per_sample(ffmpegOutputFormat);
57 | int outputFrameSize = avutil.av_samples_get_buffer_size(
58 | (IntPointer) null,
59 | outputChannels,
60 | inputFrameSize, // Input frame size neccessary
61 | ffmpegOutputFormat,
62 | 1
63 | ) / outputPlanes;
64 |
65 | swrContext = swresample.swr_alloc_set_opts(
66 | null,
67 |
68 | // Output configuration
69 | output.getChannelLayout(),
70 | ffmpegOutputFormat,
71 | outputSampleRate,
72 |
73 | // Input configuration
74 | input.getChannelLayout(),
75 | ffmpegInputFormat,
76 | inputSampleRate,
77 |
78 | 0, null
79 | );
80 |
81 | // Force resampler to always resample regardless of the sample rates.
82 | // This forces the output to always be floats.
83 | avutil.av_opt_set_int(swrContext, "swr_flags", 1, 0);
84 |
85 | FFmpegError.checkError("swr_init", swresample.swr_init(swrContext));
86 |
87 | // Create input buffers
88 | samples_in = new BytePointer[inputPlanes];
89 | for (int i = 0; i < inputPlanes; i++) {
90 | samples_in[i] = new BytePointer(avutil.av_malloc(inputFrameSize)).capacity(inputFrameSize);
91 | }
92 |
93 | // Create output buffers
94 | samples_out = new BytePointer[outputPlanes];
95 | for (int i = 0; i < outputPlanes; i++) {
96 | samples_out[i] = new BytePointer(avutil.av_malloc(outputFrameSize)).capacity(outputFrameSize);
97 | }
98 |
99 | // Initialize input and output sample buffers;
100 | samples_in_ptr = new PointerPointer(inputPlanes);
101 | samples_out_ptr = new PointerPointer(outputPlanes);
102 |
103 | for (int i = 0; i < samples_out.length; i++)
104 | samples_out_ptr.put(i, samples_out[i]);
105 |
106 | for (int i = 0; i < samples_in.length; i++)
107 | samples_in_ptr.put(i, samples_in[i]);
108 | } catch (Throwable e) {
109 | if (swrContext != null) {
110 | swresample.swr_free(swrContext);
111 | swrContext = null;
112 | }
113 |
114 | throw new RuntimeException(e);
115 | }
116 | }
117 |
118 | @Override
119 | public Collection apply(AudioFrame source) {
120 | int outputCount =
121 | (int)Math.min(
122 | (samples_out[0].limit() - samples_out[0].position()) / (output.getChannels() * outputBytesPerSample),
123 | Integer.MAX_VALUE
124 | );
125 |
126 | int ffmpegNativeLength = source.getSamples().length * inputBytesPerSample;
127 | if (presampleOutputBuffer.capacity() < ffmpegNativeLength) {
128 | presampleOutputBuffer = ByteBuffer.allocate(ffmpegNativeLength);
129 | presampleOutputBuffer.order(ByteOrder.nativeOrder());
130 | }
131 |
132 | presampleOutputBuffer.position(0);
133 | presampleOutputBuffer.asFloatBuffer().put(source.getSamples());
134 |
135 | samples_in[0].position(0).put(presampleOutputBuffer.array(), 0, ffmpegNativeLength);
136 |
137 | // Returns number of samples output per channel, negative value on error
138 | int ret = swresample.swr_convert(
139 | swrContext,
140 | samples_out_ptr, outputCount,
141 | samples_in_ptr, source.getSamples().length / input.getChannels()
142 | );
143 |
144 | // Check return values
145 | try {
146 | FFmpegError.checkError("swr_convert", ret);
147 | } catch (FFmpegException e) {
148 | throw new RuntimeException(e);
149 | }
150 |
151 | if (ret == 0) return Collections.emptyList();
152 |
153 | // Read native sample buffer(s) into managed raw byte array
154 | // WARNING: This only works if the output format is non-planar (doesn't end with "P")
155 |
156 | int returnedSamples = ret * output.getChannels();
157 | int len = returnedSamples * 4;
158 | if (recvBuffer == null || recvBuffer.length < len)
159 | recvBuffer = new byte[len];
160 |
161 | samples_out[0].position(0).get(recvBuffer);
162 |
163 | // Convert raw data to bytes.
164 | // This is done by converting the raw samples to floats right out of ffmpeg to preserve the
165 | // original quality post-resample.
166 |
167 | FloatBuffer buffer = ByteBuffer.wrap(recvBuffer).order(ByteOrder.nativeOrder()).asFloatBuffer();
168 | buffer.position(0).limit(returnedSamples);
169 |
170 | float[] newBuffer = new float[returnedSamples];
171 | for (int i = 0; i < returnedSamples; i ++) newBuffer[i] = buffer.get();
172 |
173 | // Return total re-sampled bytes to the higher-level audio system.
174 | return Collections.singletonList(new AudioFrame(
175 | source.getTimestamp(),
176 | source.getPosition(),
177 | source.getTime(),
178 | newBuffer,
179 | output
180 | ));
181 | }
182 |
183 | public AudioFormat getInputFormat() {
184 | return input;
185 | }
186 |
187 | public AudioFormat getOutputFormat() {
188 | return output;
189 | }
190 |
191 | @Override
192 | public Collection flush() {
193 | return apply(new AudioFrame(0D, 0D, 0D, new float[0], null));
194 | }
195 |
196 | @Override
197 | public void close() throws Exception {
198 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegAudioResampleFilter.close() called");
199 |
200 | // see: https://ffmpeg.org/doxygen/2.1/doc_2examples_2resampling_audio_8c-example.html
201 | for (int i = 0; i < samples_in.length; i++) {
202 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_in[" + i + "])...");
203 | avutil.av_free(samples_in[i]);
204 | samples_in[i].deallocate();
205 | samples_in[i] = null;
206 | }
207 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_in_ptr...");
208 | samples_in_ptr.deallocate();
209 |
210 | // see: https://ffmpeg.org/doxygen/2.1/doc_2examples_2resampling_audio_8c-example.html
211 | for (int i = 0; i < samples_out.length; i++) {
212 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_out[" + i + "])...");
213 | avutil.av_free(samples_out[i]);
214 | samples_out[i].deallocate();
215 | samples_out[i] = null;
216 | }
217 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_out_ptr...");
218 | samples_out_ptr.deallocate();
219 |
220 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "swr_free.close(swrContext)...");
221 | swresample.swr_free(swrContext);
222 |
223 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegAudioResampleFilter.close() completed");
224 | }
225 | }
226 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/video/FFmpegVideoRescaleFilter.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter.video;
2 |
3 | import com.github.manevolent.ffmpeg4j.*;
4 | import org.bytedeco.ffmpeg.avutil.*;
5 | import org.bytedeco.ffmpeg.global.*;
6 | import org.bytedeco.ffmpeg.swscale.*;
7 | import org.bytedeco.javacpp.*;
8 |
9 | import java.util.ArrayList;
10 | import java.util.Collection;
11 | import java.util.List;
12 |
13 | public class FFmpegVideoRescaleFilter extends VideoFilter {
14 | private final VideoFormat inputFormat, outputFormat;
15 |
16 | // FFmpeg native stuff (for video conversion)
17 | private final BytePointer inputBuffer;
18 | private final BytePointer outputBuffer;
19 | private final AVFrame inputFrame;
20 | private final AVFrame outputFrame;
21 | private final SwsContext sws;
22 |
23 | private final double outputFrameDuration;
24 | private final double inputFrameDuration;
25 |
26 | private final int pixelFormat;
27 |
28 | private VideoFrame lastFrame;
29 | private long count = 0L;
30 |
31 | public FFmpegVideoRescaleFilter(VideoFormat input, VideoFormat output, int pixelFormat) throws FFmpegException {
32 | /*
33 | http://stackoverflow.com/questions/29743648/which-flag-to-use-for-better-quality-with-sws-scale
34 |
35 | The RGB24 to YUV420 conversation itself is lossy. The scaling algorithm is probably used in downscaling
36 | the color information. I'd say the quality is: point << bilinear < bicubic < lanczos/sinc/spline I don't
37 | really know the others. Under rare circumstances sinc is the ideal scaler and lossless, but those
38 | conditions are usually not met. Are you also scaling the video? Otherwise I'd go for bicubic.
39 | */
40 |
41 | this(input, output, pixelFormat, swscale.SWS_BILINEAR);
42 | }
43 |
44 | public FFmpegVideoRescaleFilter(VideoFormat input, VideoFormat output, int pixelFormat, int scaleFilter)
45 | throws FFmpegException {
46 | this.pixelFormat = pixelFormat;
47 |
48 | this.inputFormat = input;
49 | this.outputFormat = output;
50 |
51 | this.inputFrameDuration = 1D / input.getFramesPerSecond();
52 | this.outputFrameDuration = 1D / output.getFramesPerSecond();
53 |
54 | if (input.getHeight() != output.getHeight() || input.getWidth() != output.getWidth()) {
55 | outputFrame = avutil.av_frame_alloc();
56 | if (outputFrame == null) throw new RuntimeException("failed to allocate output frame");
57 |
58 | int numBytesInput = avutil.av_image_get_buffer_size(
59 | pixelFormat,
60 | input.getWidth(),
61 | input.getHeight(),
62 | 1 // used by some other methods in ffmpeg
63 | );
64 |
65 | int numBytesOutput = avutil.av_image_get_buffer_size(
66 | pixelFormat,
67 | output.getWidth(),
68 | output.getHeight(),
69 | 1 // used by some other methods in ffmpeg
70 | );
71 |
72 | inputBuffer = new BytePointer(avutil.av_malloc(numBytesInput));
73 | outputBuffer = new BytePointer(avutil.av_malloc(numBytesOutput));
74 |
75 | sws = swscale.sws_getContext(
76 | input.getWidth(), input.getHeight(), pixelFormat, // source
77 | output.getWidth(), output.getHeight(), pixelFormat, // destination
78 | scaleFilter, // flags (see above)
79 | null, null, (DoublePointer) null // filters, params
80 | );
81 |
82 | // Assign appropriate parts of buffer to image planes in pFrameRGB
83 | // Note that pFrameRGB is an AVFrame, but AVFrame is a superset
84 | // of AVPicture
85 | this.inputFrame = new AVFrame();
86 |
87 | // Assign appropriate parts of buffer to image planes in pFrameRGB
88 | // See: https://mail.gnome.org/archives/commits-list/2016-February/msg05531.html
89 | FFmpegError.checkError("av_image_fill_arrays", avutil.av_image_fill_arrays(
90 | inputFrame.data(),
91 | inputFrame.linesize(),
92 | inputBuffer,
93 | pixelFormat,
94 | input.getWidth(),
95 | input.getHeight(),
96 | 1
97 | ));
98 |
99 | FFmpegError.checkError("av_image_fill_arrays", avutil.av_image_fill_arrays(
100 | outputFrame.data(),
101 | outputFrame.linesize(),
102 | outputBuffer,
103 | pixelFormat,
104 | input.getWidth(),
105 | input.getHeight(),
106 | 1
107 | ));
108 | } else {
109 | inputBuffer = null;
110 | outputBuffer = null;
111 | inputFrame = null;
112 | outputFrame = null;
113 | sws = null;
114 | }
115 | }
116 |
117 | @Override
118 | public Collection apply(VideoFrame source) {
119 | List videoFrames = new ArrayList<>();
120 | if (source == null || source.getTime() <= 0D) return videoFrames;
121 |
122 | // Decide to drop or keep the frame
123 | double outputPositionInSeconds = (double)count * outputFrameDuration;
124 |
125 | double newInputPositionInSeconds = source.getPosition() + inputFrameDuration;
126 | double newOutputPositionInSeconds = outputPositionInSeconds + outputFrameDuration;
127 |
128 | // FPS conversions
129 | if (inputFrameDuration != outputFrameDuration) {
130 | if (newOutputPositionInSeconds > newInputPositionInSeconds) // 60FPS -> 30FPS
131 | return videoFrames; // drop the frame
132 | else if (lastFrame != null) { // 30FPS -> 60FPS
133 | while (newOutputPositionInSeconds + outputFrameDuration < newInputPositionInSeconds) {
134 | videoFrames.add(lastFrame);
135 | newOutputPositionInSeconds += outputFrameDuration;
136 | }
137 | }
138 | }
139 |
140 | VideoFrame frame;
141 |
142 | if (inputFormat.getHeight() != outputFormat.getHeight() || inputFormat.getWidth() != outputFormat.getWidth()) {
143 | inputFrame.data(0).put(source.getData());
144 |
145 | int ret = swscale.sws_scale(
146 | sws, // the scaling context previously created with sws_getContext()
147 | inputFrame.data(), // the array containing the pointers to the planes of the source slice
148 | inputFrame.linesize(), // the array containing the strides for each plane of the source image
149 | 0, // the position in the source image of the slice to process, that is the number (counted starting from zero) in the image of the first row of the slice
150 | source.getHeight(), // the height of the source slice, that is the number of rows in the slice
151 | outputFrame.data(), // the array containing the pointers to the planes of the destination image
152 | outputFrame.linesize() // the array containing the strides for each plane of the destination image
153 | );
154 |
155 | try {
156 | FFmpegError.checkError("sws_scale", ret);
157 | } catch (FFmpegException e) {
158 | throw new RuntimeException(e);
159 | }
160 |
161 | BytePointer data = outputFrame.data(0);
162 | int l = outputFormat.getWidth();
163 |
164 | // Allocate pixel data buffer:
165 | byte[] pixelData = new byte[l * ret * 3];
166 | data.position(0).get(pixelData, 0, l * ret * 3);
167 |
168 | frame = new VideoFrame(
169 | source.getTimestamp() - source.getPosition() + newOutputPositionInSeconds, // adjust for realworld ts
170 | newOutputPositionInSeconds,
171 | outputFrameDuration,
172 | pixelFormat,
173 | l,
174 | ret,
175 | pixelData
176 | );
177 | } else {
178 | frame = source;
179 | }
180 |
181 | lastFrame = frame;
182 | videoFrames.add(frame);
183 | count += videoFrames.size();
184 |
185 | return videoFrames;
186 | }
187 |
188 | public VideoFormat getInputFormat() {
189 | return inputFormat;
190 | }
191 |
192 | public VideoFormat getOutputFormat() {
193 | return outputFormat;
194 | }
195 |
196 | @Override
197 | public void close() {
198 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegVideoRescaleFilter.close() called");
199 |
200 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "sws_freeContext(sws)...");
201 | swscale.sws_freeContext(sws);
202 |
203 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_free(inputPicture)...");
204 | avutil.av_frame_free(inputFrame);
205 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_frame_free(outputFrame)...");
206 | avutil.av_frame_free(outputFrame);
207 |
208 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegVideoRescaleFilter.close() completed");
209 | }
210 | }
211 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/video/VideoFilter.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter.video;
2 |
3 | import com.github.manevolent.ffmpeg4j.VideoFrame;
4 | import com.github.manevolent.ffmpeg4j.filter.MediaFilter;
5 |
6 | public abstract class VideoFilter extends MediaFilter {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/video/VideoFilterChain.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter.video;
2 |
3 | import com.github.manevolent.ffmpeg4j.VideoFrame;
4 | import com.github.manevolent.ffmpeg4j.filter.MediaFilter;
5 | import com.github.manevolent.ffmpeg4j.filter.MediaFilterChain;
6 |
7 | import java.util.Collection;
8 |
9 | public class VideoFilterChain extends MediaFilterChain {
10 | public VideoFilterChain(Collection> mediaFilters) {
11 | super(mediaFilters);
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/filter/video/VideoFilterNone.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.filter.video;
2 |
3 | import com.github.manevolent.ffmpeg4j.VideoFrame;
4 |
5 | import java.util.Collection;
6 | import java.util.Collections;
7 |
8 | public class VideoFilterNone extends VideoFilter {
9 | @Override
10 | public Collection apply(VideoFrame source) {
11 | return Collections.singleton(source);
12 | }
13 |
14 | @Override
15 | public void close() throws Exception {
16 |
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/math/Rational.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.math;
2 |
3 | import org.bytedeco.ffmpeg.avutil.*;
4 |
5 | public final class Rational {
6 |
7 | private long num, denom;
8 |
9 | public Rational(long num, long denom) {
10 | this.num = num; this.denom = denom;
11 | }
12 |
13 | public static Rational fromAVRational(AVRational avRational) {
14 | return new Rational(
15 | (long)avRational.num() & 0x00000000ffffffffL,
16 | (long)avRational.den() & 0x00000000ffffffffL
17 | );
18 | }
19 |
20 | public double toDouble() {
21 | return (double)num / (double)denom;
22 | }
23 |
24 | public long getNumerator() {
25 | return num;
26 | }
27 |
28 | public long getDenominator() {
29 | return denom;
30 | }
31 |
32 | public String toString() {
33 | return String.valueOf(num) + "/" + String.valueOf(denom);
34 | }
35 |
36 | public static Rational toRational(double number){
37 | return toRational(number, 8);
38 | }
39 |
40 | public static Rational toRational(double number, int largestRightOfDecimal) {
41 |
42 | long sign = 1;
43 | if (number < 0) {
44 | number = -number;
45 | sign = -1;
46 | }
47 |
48 | final long SECOND_MULTIPLIER_MAX = (long) Math.pow(10, largestRightOfDecimal - 1);
49 | final long FIRST_MULTIPLIER_MAX = SECOND_MULTIPLIER_MAX * 10L;
50 | final double ERROR = Math.pow(10, -largestRightOfDecimal - 1);
51 | long firstMultiplier = 1;
52 | long secondMultiplier = 1;
53 | boolean notIntOrIrrational = false;
54 | long truncatedNumber = (long) number;
55 | Rational rationalNumber = new Rational((long) (sign * number * FIRST_MULTIPLIER_MAX), FIRST_MULTIPLIER_MAX);
56 |
57 | double error = number - truncatedNumber;
58 | while ((error >= ERROR) && (firstMultiplier <= FIRST_MULTIPLIER_MAX)) {
59 | secondMultiplier = 1;
60 | firstMultiplier *= 10;
61 | while ((secondMultiplier <= SECOND_MULTIPLIER_MAX) && (secondMultiplier < firstMultiplier)) {
62 | double difference = (number * firstMultiplier) - (number * secondMultiplier);
63 | truncatedNumber = (long) difference;
64 | error = difference - truncatedNumber;
65 | if (error < ERROR) {
66 | notIntOrIrrational = true;
67 | break;
68 | }
69 | secondMultiplier *= 10;
70 | }
71 | }
72 | if (notIntOrIrrational) {
73 | rationalNumber = new Rational(sign * truncatedNumber, firstMultiplier - secondMultiplier);
74 | }
75 | return rationalNumber;
76 | }
77 | }
78 |
79 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/output/AudioTargetSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.output;
2 |
3 | import com.github.manevolent.ffmpeg4j.AudioFrame;
4 |
5 | public abstract class AudioTargetSubstream extends MediaTargetSubstream {
6 | }
7 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/output/FFmpegAudioTargetSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.output;
2 |
3 | import com.github.manevolent.ffmpeg4j.*;
4 | import com.github.manevolent.ffmpeg4j.stream.output.FFmpegTargetStream;
5 | import org.bytedeco.ffmpeg.avcodec.*;
6 | import org.bytedeco.ffmpeg.avformat.*;
7 | import org.bytedeco.ffmpeg.avutil.*;
8 | import org.bytedeco.ffmpeg.global.*;
9 | import org.bytedeco.ffmpeg.swresample.*;
10 | import org.bytedeco.javacpp.*;
11 |
12 | import java.io.EOFException;
13 | import java.io.IOException;
14 | import java.nio.ByteBuffer;
15 | import java.nio.ByteOrder;
16 |
17 | public class FFmpegAudioTargetSubstream
18 | extends AudioTargetSubstream
19 | implements FFmpegEncoderContext
20 | {
21 | private static final int SAMPLE_FORMAT = avutil.AV_SAMPLE_FMT_FLT;
22 |
23 | private final FFmpegTargetStream targetStream;
24 | private final AVStream stream;
25 | private final AVCodecContext codecContext;
26 | private final AVPacket packet;
27 |
28 | //swresample
29 | private volatile SwrContext swrContext;
30 | private final int outputBytesPerSample, inputBytesPerSample, inputPlanes, outputPlanes;
31 | private ByteBuffer presampleOutputBuffer = ByteBuffer.allocate(0);
32 | private final BytePointer[] samples_in;
33 | private final BytePointer[] samples_out;
34 | private final PointerPointer samples_in_ptr;
35 | private final PointerPointer samples_out_ptr;
36 |
37 | private int sampleBufferPosition = 0;
38 | private final float[] sampleBuffer;
39 | private final AVRational nativeTimeBase;
40 | private volatile long writtenSamples = 0L;
41 |
42 | public FFmpegAudioTargetSubstream(FFmpegTargetStream targetStream, AVStream stream, AVCodecContext codecContext) throws FFmpegException {
43 | this.packet = new AVPacket();
44 | this.targetStream = targetStream;
45 | this.stream = stream;
46 | this.codecContext = codecContext;
47 |
48 | // Configure input parameters
49 | int ffmpegInputFormat = SAMPLE_FORMAT;
50 | int inputChannels = stream.codecpar().channels();
51 | inputPlanes = avutil.av_sample_fmt_is_planar(ffmpegInputFormat) != 0 ? inputChannels : 1;
52 | int inputSampleRate = stream.codecpar().sample_rate();
53 | inputBytesPerSample = avutil.av_get_bytes_per_sample(ffmpegInputFormat);
54 | int inputFrameSize = (16*1024) * inputPlanes * inputBytesPerSample;
55 |
56 | // Configure output parameters
57 | int ffmpegOutputFormat = stream.codecpar().format();
58 | int outputChannels = stream.codecpar().channels();
59 | outputPlanes = avutil.av_sample_fmt_is_planar(ffmpegOutputFormat) != 0 ? outputChannels : 1;
60 | int outputSampleRate = stream.codecpar().sample_rate();
61 | outputBytesPerSample = avutil.av_get_bytes_per_sample(ffmpegOutputFormat);
62 | int outputFrameSize = avutil.av_samples_get_buffer_size(
63 | (IntPointer) null,
64 | outputChannels,
65 | inputFrameSize, // Input frame size neccessary
66 | ffmpegOutputFormat,
67 | 1
68 | ) / outputPlanes;
69 |
70 | swrContext = swresample.swr_alloc_set_opts(
71 | null,
72 |
73 | // Output configuration
74 | stream.codecpar().channel_layout(),
75 | ffmpegOutputFormat,
76 | stream.codecpar().sample_rate(),
77 |
78 | // Input configuration
79 | stream.codecpar().channel_layout(),
80 | ffmpegInputFormat,
81 | stream.codecpar().sample_rate(),
82 |
83 | 0, null
84 | );
85 |
86 | // Force resampler to always resample regardless of the sample rates.
87 | // This forces the output to always be floats.
88 | avutil.av_opt_set_int(swrContext, "swr_flags", 1, 0);
89 |
90 | FFmpegError.checkError("swr_init", swresample.swr_init(swrContext));
91 |
92 | // Create input buffers
93 | samples_in = new BytePointer[inputPlanes];
94 | for (int i = 0; i < inputPlanes; i++) {
95 | samples_in[i] = new BytePointer(avutil.av_malloc(inputFrameSize)).capacity(inputFrameSize);
96 | }
97 |
98 | // Create output buffers
99 | samples_out = new BytePointer[outputPlanes];
100 | for (int i = 0; i < outputPlanes; i++) {
101 | samples_out[i] = new BytePointer(avutil.av_malloc(outputFrameSize)).capacity(outputFrameSize);
102 | }
103 |
104 | // Initialize input and output sample buffers;
105 | samples_in_ptr = new PointerPointer(inputPlanes);
106 | samples_out_ptr = new PointerPointer(outputPlanes);
107 |
108 | for (int i = 0; i < samples_out.length; i++)
109 | samples_out_ptr.put(i, samples_out[i]);
110 |
111 | for (int i = 0; i < samples_in.length; i++)
112 | samples_in_ptr.put(i, samples_in[i]);
113 |
114 | this.nativeTimeBase = new AVRational();
115 | nativeTimeBase.num(1);
116 | nativeTimeBase.den(outputSampleRate);
117 |
118 | // Smp buffer is 2 frames long, always
119 | this.sampleBuffer = new float[(stream.codecpar().frame_size() * outputChannels) * 2];
120 | }
121 |
122 | @Override
123 | public MediaType getType() {
124 | return MediaType.AUDIO;
125 | }
126 |
127 | /**
128 | * Writes a single (or partial) frame to the stream. Cannot ingest more than one frame.
129 | * @param samples Buffer of samples to write, can be any length >= len
130 | * @param len Samples per channel (may need to be equal to frame_size). Sending 0 will flush the stream. Cannot be
131 | * > frame_size(). Can be 0. Must be positive.
132 | * @throws FFmpegException
133 | */
134 | private int writeFrame(float[] samples, int len)
135 | throws FFmpegException, EOFException {
136 | if (len < 0)
137 | throw new FFmpegException(new ArrayIndexOutOfBoundsException(len));
138 |
139 | if (len > getCodecContext().frame_size())
140 | throw new FFmpegException("invalid frame size: " + len + " > " + getCodecContext().frame_size());
141 |
142 | int ffmpegNativeLength = len * getCodecContext().channels() * inputBytesPerSample;
143 | if (presampleOutputBuffer.capacity() < ffmpegNativeLength) {
144 | presampleOutputBuffer = ByteBuffer.allocate(ffmpegNativeLength);
145 | presampleOutputBuffer.order(ByteOrder.nativeOrder());
146 | }
147 |
148 | presampleOutputBuffer.position(0);
149 |
150 | // Clip audio between -1F,1F
151 | for (int i = 0; i < len; i ++)
152 | samples[i] = Math.min(1F, Math.max(-1F, samples[i]));
153 |
154 | // Obtain a 'samplesToRead' worth (chunk) of frames from the sample buffer
155 | presampleOutputBuffer.asFloatBuffer().put(samples, 0, len * stream.codecpar().channels());
156 |
157 | samples_in[0].position(0).put(presampleOutputBuffer.array(), 0, ffmpegNativeLength);
158 |
159 | int outputCount =
160 | (int) Math.min(
161 | (samples_out[0].limit() - samples_out[0].position()) /
162 | (stream.codecpar().channels() * outputBytesPerSample),
163 | Integer.MAX_VALUE
164 | );
165 |
166 | //Returns number of samples output per channel, negative value on error
167 | int ret = swresample.swr_convert(
168 | swrContext,
169 | samples_out_ptr, outputCount,
170 | samples_in_ptr, len
171 | );
172 |
173 | // Check return values
174 | FFmpegError.checkError("swr_convert", ret);
175 |
176 | if (ret == 0) return 0;
177 |
178 | AVFrame frame = avutil.av_frame_alloc();
179 | if (frame == null) throw new NullPointerException("av_frame_alloc");
180 |
181 | try {
182 | frame.nb_samples(ret);
183 | frame.format(stream.codecpar().format());
184 | frame.channels(stream.codecpar().channels());
185 | frame.channel_layout(stream.codecpar().channel_layout());
186 | frame.pts(avutil.av_rescale_q(writtenSamples, nativeTimeBase, getCodecContext().time_base()));
187 |
188 | for (int plane = 0; plane < outputPlanes; plane++)
189 | frame.data(plane, samples_out[plane]);
190 |
191 | encodeFrame(frame);
192 | } finally {
193 | avutil.av_frame_free(frame);
194 | }
195 |
196 | writtenSamples += ret;
197 | setPosition((double) writtenSamples / (double) stream.codecpar().sample_rate());
198 |
199 | return ret;
200 | }
201 |
202 | /**
203 | * Drains the internal buffer of samples. This method typically drains full frames instead of partial frames,
204 | * as some (most) codecs do not support "variable" frame sizes, such as OPUS, which supports 960 samples per frame
205 | * typically. In this case, we want to flush/drain down in chunks.
206 | * @param flush Overrides the chunking code -- this will flush down ANY variable frame size, it set to true by
207 | * the flush() function when bookending the stream. Unwise to do this elsewhere.
208 | * @return
209 | * @throws FFmpegException
210 | * @throws EOFException
211 | */
212 | private int drainInternalBuffer(boolean flush) throws FFmpegException, EOFException {
213 | int totalFrameSize = stream.codecpar().frame_size() * stream.codecpar().channels();
214 | int minimumFrameSize = flush ? 1 : totalFrameSize;
215 | int written = 0, encoded = 0, toWrite;
216 |
217 | int channels = stream.codecpar().channels();
218 | if (channels <= 0) throw new IllegalArgumentException("channels <= 0: " + channels);
219 |
220 | while (sampleBufferPosition >= minimumFrameSize) {
221 | toWrite = flush ?
222 | Math.min(totalFrameSize, sampleBufferPosition) :
223 | totalFrameSize;
224 |
225 | encoded += writeFrame(sampleBuffer, toWrite / channels);
226 |
227 | System.arraycopy(
228 | sampleBuffer, toWrite,
229 | sampleBuffer, 0, sampleBufferPosition - toWrite
230 | );
231 |
232 | sampleBufferPosition -= toWrite;
233 | written += toWrite;
234 | }
235 |
236 | return written;
237 | }
238 |
239 | /**
240 | * Flushes all available internal samples from the buffer, effectively emptying any waiting data.
241 | * @throws IOException
242 | */
243 | public void flush() throws IOException {
244 | try {
245 | drainInternalBuffer(true);
246 | writeFrame(new float[0], 0);
247 | } catch (FFmpegException e) {
248 | throw new IOException(e);
249 | }
250 | }
251 |
252 | /**
253 | * Frontend encoder. This accepts any arbitrarily sized audio frame (0-n samples) and will automatically drain
254 | * it down the stream correctly for you.
255 | * @param o Audio Frame object to encode into sub-frames and, subsequently, packets.
256 | * @throws IOException
257 | */
258 | @Override
259 | public void write(AudioFrame o) throws IOException {
260 | int size = o.getLength();
261 |
262 | if (o == null || size <= 0) {
263 | try {
264 | drainInternalBuffer(false);
265 | } catch (FFmpegException e) {
266 | throw new IOException(e);
267 | }
268 | return;
269 | }
270 |
271 | int position = 0;
272 | int read;
273 |
274 | while (position < size) {
275 | // Fill sample buffer with remaining samples
276 | read = Math.min(size - position, sampleBuffer.length - sampleBufferPosition);
277 | if (read <= 0) throw new ArrayIndexOutOfBoundsException(read);
278 |
279 | System.arraycopy(o.getSamples(), position, sampleBuffer, sampleBufferPosition, read);
280 | sampleBufferPosition += read;
281 | position += read;
282 |
283 | // Drain the internal buffer
284 | try {
285 | drainInternalBuffer(false);
286 | } catch (FFmpegException e) {
287 | throw new IOException(e);
288 | }
289 |
290 | }
291 | }
292 |
293 | @Override
294 | public void close() throws Exception {
295 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegAudioTargetSubstream.close() called");
296 |
297 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "avcodec_close(stream.codec())...");
298 | avcodec.avcodec_close(getCodecContext());
299 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_free(stream.codec())...");
300 | avutil.av_free(getCodecContext());
301 |
302 | // see: https://ffmpeg.org/doxygen/2.1/doc_2examples_2resampling_audio_8c-example.html
303 | for (int i = 0; i < samples_in.length; i++) {
304 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_in[" + i + "])...");
305 | avutil.av_free(samples_in[i]);
306 | samples_in[i].deallocate();
307 | samples_in[i] = null;
308 | }
309 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_in_ptr...");
310 | samples_in_ptr.deallocate();
311 |
312 | // see: https://ffmpeg.org/doxygen/2.1/doc_2examples_2resampling_audio_8c-example.html
313 | for (int i = 0; i < samples_out.length; i++) {
314 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_out[" + i + "])...");
315 | avutil.av_free(samples_out[i]);
316 | samples_out[i].deallocate();
317 | samples_out[i] = null;
318 | }
319 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_out_ptr...");
320 | samples_out_ptr.deallocate();
321 |
322 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "nativeTimeBase.deallocate()...");
323 | nativeTimeBase.deallocate();
324 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "swr_free(swrContext)...");
325 | swresample.swr_free(swrContext);
326 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_free_packet(packet)...");
327 | avcodec.av_packet_free(packet);
328 |
329 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegAudioTargetSubstream.close() completed");
330 | }
331 |
332 | /**
333 | * Called by the encoder context -- writes an audio packet out to the parent target stream.
334 | * @param packet Encoded packet to write
335 | * @throws FFmpegException
336 | * @throws EOFException
337 | */
338 | @Override
339 | public void writePacket(AVPacket packet) throws FFmpegException, EOFException {
340 | packet.pts(writtenSamples);
341 | avcodec.av_packet_rescale_ts(packet, getCodecContext().time_base(), stream.time_base());
342 | packet.stream_index(stream.index());
343 | getTargetStream().writePacket(packet);
344 | }
345 |
346 | @Override
347 | public AVCodecContext getCodecContext() {
348 | return codecContext;
349 | }
350 |
351 | @Override
352 | public AVStream getStream() {
353 | return getStream();
354 | }
355 |
356 | @Override
357 | public FFmpegTargetStream getTargetStream() {
358 | return targetStream;
359 | }
360 | }
361 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/output/FFmpegEncoderContext.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.output;
2 |
3 | import com.github.manevolent.ffmpeg4j.FFmpegStreamContext;
4 | import com.github.manevolent.ffmpeg4j.FFmpegError;
5 | import com.github.manevolent.ffmpeg4j.FFmpegException;
6 | import com.github.manevolent.ffmpeg4j.stream.output.FFmpegTargetStream;
7 | import org.bytedeco.ffmpeg.avcodec.*;
8 | import org.bytedeco.ffmpeg.avutil.*;
9 | import org.bytedeco.ffmpeg.global.*;
10 |
11 | import java.io.EOFException;
12 |
13 | public interface FFmpegEncoderContext extends FFmpegStreamContext {
14 |
15 | void writePacket(AVPacket packet) throws FFmpegException, EOFException;
16 |
17 | FFmpegTargetStream getTargetStream();
18 |
19 | /**
20 | * Processes encoded frames made available by encodeFrame() in their packetized state and flushes them on to
21 | * writePacket(), which is typically fulfilled by a superclass
22 | * @return Number of packets written.
23 | * @throws FFmpegException
24 | * @throws EOFException
25 | */
26 | default int processAvailablePackets() throws FFmpegException, EOFException {
27 | int ret = 0;
28 |
29 | int packets_finished = 0;
30 |
31 | while (ret >= 0) {
32 | AVPacket packet = avcodec.av_packet_alloc();
33 | if (packet == null) throw new NullPointerException("av_packet_alloc()");
34 |
35 | try {
36 | ret = avcodec.avcodec_receive_packet(getCodecContext(), packet);
37 | if (ret == avutil.AVERROR_EAGAIN()) break; // output is not available right now - user must try to send new input
38 |
39 | // Check for misc. errors:
40 | FFmpegError.checkError("avcodec_receive_packet", ret);
41 |
42 | writePacket(packet);
43 |
44 | // If we made it this far:
45 | packets_finished++;
46 | } finally {
47 | avcodec.av_packet_free(packet);
48 | }
49 | }
50 |
51 | return packets_finished;
52 | }
53 |
54 | /**
55 | * Encodes a raw frame into a series of packets.
56 | * @param frame Frame to encode.
57 | * @return Number of packets made available.
58 | * @throws FFmpegException
59 | * @throws EOFException
60 | */
61 | default int encodeFrame(AVFrame frame) throws FFmpegException, EOFException {
62 | int ret = -11, packet_finished = 0;
63 |
64 | while (ret == -11) {
65 | ret = avcodec.avcodec_send_frame(getCodecContext(), frame);
66 |
67 | if (ret != -11)
68 | FFmpegError.checkError("avcodec_send_frame", ret);
69 |
70 | packet_finished += processAvailablePackets();
71 | }
72 |
73 | return packet_finished;
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/output/FFmpegVideoTargetSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.output;
2 |
3 | import com.github.manevolent.ffmpeg4j.FFmpegError;
4 | import com.github.manevolent.ffmpeg4j.FFmpegException;
5 | import com.github.manevolent.ffmpeg4j.Logging;
6 | import com.github.manevolent.ffmpeg4j.VideoFrame;
7 | import com.github.manevolent.ffmpeg4j.math.Rational;
8 | import com.github.manevolent.ffmpeg4j.stream.output.FFmpegTargetStream;
9 |
10 | import org.bytedeco.ffmpeg.global.*;
11 | import org.bytedeco.javacpp.*;
12 | import org.bytedeco.ffmpeg.avcodec.*;
13 | import org.bytedeco.ffmpeg.avformat.*;
14 | import org.bytedeco.ffmpeg.avutil.*;
15 | import org.bytedeco.ffmpeg.swscale.*;
16 |
17 | import java.io.EOFException;
18 | import java.io.IOException;
19 |
20 | public class FFmpegVideoTargetSubstream
21 | extends VideoTargetSubstream
22 | implements FFmpegEncoderContext {
23 | private final FFmpegTargetStream targetStream;
24 | private final AVStream stream;
25 | private final AVCodecContext codecContext;
26 | private final Rational timeBase;
27 |
28 | // sws stuff
29 | private final BytePointer inputBuffer;
30 | private final BytePointer outputBuffer;
31 | private final AVFrame inputFrame;
32 | private final AVFrame outputFrame;
33 | private final SwsContext sws;
34 |
35 | private final int pixelFormat; // input pixel format
36 |
37 | private final double frameRate;
38 |
39 | private volatile long writtenFrames = 0L;
40 |
41 | public FFmpegVideoTargetSubstream(FFmpegTargetStream targetStream, AVStream stream, AVCodecContext codecContext, double fps)
42 | throws FFmpegException {
43 | this.targetStream = targetStream;
44 | this.stream = stream;
45 | this.codecContext = codecContext;
46 |
47 | this.timeBase = Rational.fromAVRational(stream.time_base());
48 | this.frameRate = fps;
49 |
50 | this.pixelFormat = targetStream.getPixelFormat();
51 |
52 | // SWScale
53 | outputFrame = avutil.av_frame_alloc();
54 | if (outputFrame == null) throw new RuntimeException("failed to allocate output frame");
55 | inputFrame = avutil.av_frame_alloc();
56 | if (inputFrame == null) throw new RuntimeException("failed to allocate input frame");
57 |
58 | int numBytesInput = avutil.av_image_get_buffer_size(
59 | pixelFormat,
60 | stream.codecpar().width(),
61 | stream.codecpar().height(),
62 | 1 // used by some other methods in ffmpeg
63 | );
64 | inputBuffer = new BytePointer(avutil.av_malloc(numBytesInput));
65 |
66 | int numBytesOutput = avutil.av_image_get_buffer_size(
67 | stream.codecpar().format(),
68 | stream.codecpar().width(),
69 | stream.codecpar().height(),
70 | 1
71 | );
72 | outputBuffer = new BytePointer(avutil.av_malloc(numBytesOutput));
73 |
74 | /*
75 | http://stackoverflow.com/questions/29743648/which-flag-to-use-for-better-quality-with-sws-scale
76 |
77 | The RGB24 to YUV420 conversation itself is lossy. The scaling algorithm is probably used in downscaling
78 | the color information. I'd say the quality is: point << bilinear < bicubic < lanczos/sinc/spline I don't
79 | really know the others. Under rare circumstances sinc is the ideal scaler and lossless, but those
80 | conditions are usually not met. Are you also scaling the video? Otherwise I'd go for bicubic.
81 | */
82 |
83 | sws = swscale.sws_getContext(
84 | stream.codecpar().width(), stream.codecpar().height(), pixelFormat, // source
85 | stream.codecpar().width(), stream.codecpar().height(), stream.codecpar().format(), // destination
86 | swscale.SWS_BILINEAR, // flags (see above)
87 | null, null, (DoublePointer) null // filters, params
88 | );
89 |
90 | // Assign appropriate parts of buffer to image planes in pFrameRGB
91 | // See: https://mail.gnome.org/archives/commits-list/2016-February/msg05531.html
92 | FFmpegError.checkError("av_image_fill_arrays", avutil.av_image_fill_arrays(
93 | inputFrame.data(),
94 | inputFrame.linesize(),
95 | inputBuffer,
96 | pixelFormat,
97 | stream.codecpar().width(),
98 | stream.codecpar().height(),
99 | 1
100 | ));
101 |
102 | FFmpegError.checkError("av_image_fill_arrays", avutil.av_image_fill_arrays(
103 | outputFrame.data(),
104 | outputFrame.linesize(),
105 | outputBuffer,
106 | stream.codecpar().format(),
107 | stream.codecpar().width(),
108 | stream.codecpar().height(),
109 | 1
110 | ));
111 | }
112 |
113 | @Override
114 | public void write(VideoFrame o) throws IOException {
115 | if (o.getFormat() != pixelFormat)
116 | throw new IOException(
117 | new FFmpegException("frame has mismatched pixel format: " +
118 | "expected " + o.getFormat() + " != " + pixelFormat)
119 | );
120 |
121 | inputFrame.data(0).put(o.getData());
122 | //inputPicture.linesize(0, o.getWidth());
123 |
124 | int ret = swscale.sws_scale(
125 | sws, // the scaling context previously created with sws_getContext()
126 | inputFrame.data(), // the array containing the pointers to the planes of the source slice
127 | inputFrame.linesize(), // the array containing the strides for each plane of the source image
128 | 0, // the position in the source image of the slice to process, that is the number (counted starting from zero) in the image of the first row of the slice
129 | stream.codecpar().height(), // the height of the source slice, that is the number of rows in the slice
130 | outputFrame.data(), // the array containing the pointers to the planes of the destination image
131 | outputFrame.linesize() // the array containing the strides for each plane of the destination image
132 | );
133 |
134 | try {
135 | FFmpegError.checkError("sws_scale", ret);
136 | } catch (FFmpegException e) {
137 | throw new RuntimeException(e);
138 | }
139 |
140 | //outputFrame.pts(avutil.av_rescale_q(writtenFrames, nativeTimeBase, stream.codec().time_base()));
141 |
142 | //outputFrame.linesize(0, o.getWidth());
143 | outputFrame.width(o.getWidth());
144 | outputFrame.height(o.getHeight());
145 | outputFrame.format(stream.codecpar().format());
146 |
147 | try {
148 | outputFrame.pts(writtenFrames);
149 | encodeFrame(outputFrame);
150 |
151 | writtenFrames ++;
152 | setPosition((double) writtenFrames / (double) frameRate);
153 | } catch (FFmpegException e) {
154 | throw new IOException(e);
155 | }
156 | }
157 |
158 | // I don't think we have anything to flush here
159 | @Override
160 | public void flush() throws IOException {
161 | // Do nothing?
162 | }
163 |
164 | @Override
165 | public void close() throws Exception {
166 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegVideoTargetSubstream.close() called");
167 |
168 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "sws_freeContext(sws)...");
169 | swscale.sws_freeContext(sws);
170 |
171 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_frame_free(inputFrame)...");
172 | avutil.av_frame_free(inputFrame);
173 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_frame_free(outputFrame)...");
174 | avutil.av_frame_free(outputFrame);
175 |
176 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "avcodec_close(codecContext))...");
177 | avcodec.avcodec_close(codecContext);
178 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_free(codecContext)...");
179 | avutil.av_free(codecContext);
180 |
181 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegVideoTargetSubstream.close() completed");
182 | }
183 |
184 | @Override
185 | public void writePacket(AVPacket packet) throws FFmpegException, EOFException {
186 | if (packet.pts() != avutil.AV_NOPTS_VALUE)
187 | avcodec.av_packet_rescale_ts(packet, codecContext.time_base(), stream.time_base());
188 |
189 | packet.stream_index(stream.index());
190 |
191 | getTargetStream().writePacket(packet);
192 | }
193 |
194 | @Override
195 | public AVCodecContext getCodecContext() {
196 | return codecContext;
197 | }
198 |
199 | @Override
200 | public AVStream getStream() {
201 | return stream;
202 | }
203 |
204 | @Override
205 | public FFmpegTargetStream getTargetStream() {
206 | return targetStream;
207 | }
208 | }
209 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/output/MediaTargetSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.output;
2 |
3 | import com.github.manevolent.ffmpeg4j.MediaStream;
4 | import com.github.manevolent.ffmpeg4j.MediaType;
5 |
6 | import java.io.IOException;
7 |
8 | public abstract class MediaTargetSubstream extends MediaStream {
9 | public abstract void write(T o) throws IOException;
10 | public abstract void flush() throws IOException;
11 | public abstract MediaType getType();
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/output/VideoTargetSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.output;
2 |
3 | import com.github.manevolent.ffmpeg4j.MediaType;
4 | import com.github.manevolent.ffmpeg4j.VideoFrame;
5 |
6 | public abstract class VideoTargetSubstream extends MediaTargetSubstream {
7 | @Override
8 | public MediaType getType() {
9 | return MediaType.VIDEO;
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/source/AudioSourceSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.source;
2 |
3 | import com.github.manevolent.ffmpeg4j.AudioFormat;
4 | import com.github.manevolent.ffmpeg4j.AudioFrame;
5 | import com.github.manevolent.ffmpeg4j.MediaType;
6 | import com.github.manevolent.ffmpeg4j.stream.source.SourceStream;
7 |
8 | public abstract class AudioSourceSubstream extends MediaSourceSubstream {
9 | public AudioSourceSubstream(SourceStream parent) {
10 | super(parent, MediaType.AUDIO);
11 | }
12 |
13 | public abstract AudioFormat getFormat();
14 |
15 | @Override
16 | public String toString() {
17 | return getFormat().toString() + " " + (getBitRate() / 1000) + "Kbps";
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/source/FFmpegAudioSourceSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.source;
2 |
3 | import com.github.manevolent.ffmpeg4j.*;
4 | import com.github.manevolent.ffmpeg4j.math.Rational;
5 | import com.github.manevolent.ffmpeg4j.stream.source.FFmpegSourceStream;
6 | import org.bytedeco.ffmpeg.avcodec.*;
7 | import org.bytedeco.ffmpeg.avformat.*;
8 | import org.bytedeco.ffmpeg.avutil.*;
9 | import org.bytedeco.ffmpeg.global.*;
10 | import org.bytedeco.ffmpeg.swresample.*;
11 | import org.bytedeco.javacpp.*;
12 |
13 | import java.io.ByteArrayInputStream;
14 | import java.io.IOException;
15 | import java.nio.ByteBuffer;
16 | import java.nio.ByteOrder;
17 | import java.nio.FloatBuffer;
18 |
19 | public class FFmpegAudioSourceSubstream
20 | extends AudioSourceSubstream
21 | implements FFmpegDecoderContext {
22 | private static final int OUTPUT_FORMAT = avutil.AV_SAMPLE_FMT_FLT;
23 |
24 | private final FFmpegSourceStream parentStream;
25 | private final AudioFormat audioFormat;
26 |
27 | // FFmpeg native stuff
28 | private final AVCodecContext codecContext;
29 | private final SwrContext swrContext;
30 | private final AVStream stream;
31 |
32 | private final int outputSampleRate;
33 | private final int outputChannels;
34 | private final int outputBytesPerSample;
35 | private final int audio_input_frame_size;
36 |
37 | private final BytePointer[] samples_out;
38 | private final PointerPointer samples_out_ptr;
39 |
40 | private boolean closed = false;
41 |
42 | private volatile byte[] recvBuffer;
43 | private volatile long totalDecoded = 0L;
44 |
45 | public FFmpegAudioSourceSubstream(FFmpegSourceStream parentStream, AVStream stream, AVCodecContext codecContext)
46 | throws FFmpegException {
47 | super(parentStream);
48 |
49 | this.stream = stream;
50 | this.parentStream = parentStream;
51 | this.codecContext = codecContext;
52 |
53 | int channels = stream.codecpar().channels();
54 |
55 | if (channels <= 0)
56 | channels = avutil.av_get_channel_layout_nb_channels(stream.codecpar().channel_layout());
57 |
58 | if (channels <= 0)
59 | throw new IllegalArgumentException("channel count not discernible");
60 |
61 | this.outputChannels = channels;
62 |
63 | this.outputBytesPerSample = avutil.av_get_bytes_per_sample(OUTPUT_FORMAT);
64 | this.outputSampleRate = stream.codecpar().sample_rate();
65 | this.audio_input_frame_size = 256 * 1024 / outputChannels;
66 |
67 | swrContext = swresample.swr_alloc_set_opts(
68 | null,
69 |
70 | // Output configuration
71 | stream.codecpar().channel_layout(),
72 | OUTPUT_FORMAT,
73 | stream.codecpar().sample_rate(),
74 |
75 | // Input configuration
76 | stream.codecpar().channel_layout(),
77 | stream.codecpar().format(),
78 | stream.codecpar().sample_rate(),
79 |
80 | 0, null
81 | );
82 |
83 | FFmpegError.checkError("av_opt_set_int", avutil.av_opt_set_int(swrContext, "swr_flags", 1, 0));
84 | FFmpegError.checkError("swr_init", swresample.swr_init(swrContext));
85 |
86 | int data_size = avutil.av_samples_get_buffer_size(
87 | (IntPointer) null,
88 | outputChannels,
89 | audio_input_frame_size,
90 | OUTPUT_FORMAT,
91 | 1 // buffer size alignment (0 = default, 1 = no alignment)
92 | );
93 |
94 | samples_out = new BytePointer[avutil.av_sample_fmt_is_planar(OUTPUT_FORMAT) == 1 ? outputChannels : 1];
95 | for (int i = 0; i < samples_out.length; i++)
96 | samples_out[i] = new BytePointer(avutil.av_malloc(data_size)).capacity(data_size);
97 | samples_out_ptr = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS);
98 |
99 | for (int i = 0; i < samples_out.length; i++)
100 | samples_out_ptr.put(i, samples_out[i]);
101 |
102 | this.audioFormat = new AudioFormat(outputSampleRate, outputChannels, stream.codecpar().channel_layout());
103 | }
104 |
105 | @Override
106 | public int getBitRate() {
107 | return (int) stream.codecpar().bit_rate();
108 | }
109 |
110 | @Override
111 | public boolean read() throws IOException {
112 | return parentStream.readPacket() != null;
113 | }
114 |
115 | @Override
116 | public AudioFormat getFormat() {
117 | return audioFormat;
118 | }
119 |
120 | @Override
121 | public AVCodecContext getCodecContext() {
122 | return codecContext;
123 | }
124 |
125 | @Override
126 | public AVStream getStream() {
127 | return stream;
128 | }
129 |
130 | @Override
131 | public void decode(AVFrame frame) throws FFmpegException {
132 | int outputCount =
133 | (int)Math.min(
134 | (samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputBytesPerSample),
135 | Integer.MAX_VALUE
136 | );
137 |
138 | int ret = FFmpegError.checkError("swr_convert", swresample.swr_convert(
139 | swrContext,
140 | samples_out_ptr, outputCount,
141 | frame.extended_data(), frame.nb_samples()
142 | ));
143 |
144 | if (ret == 0)
145 | return; // Do nothing.
146 |
147 | // Read unpacked sample buffers
148 | int bytesResampled = ret * outputBytesPerSample * outputChannels;
149 | if (recvBuffer == null || recvBuffer.length < bytesResampled)
150 | recvBuffer = new byte[bytesResampled];
151 |
152 | samples_out[0].position(0).get(recvBuffer);
153 |
154 | FloatBuffer buffer = ByteBuffer
155 | .wrap(recvBuffer)
156 | .order(ByteOrder.nativeOrder())
157 | .asFloatBuffer();
158 |
159 | float[] floats = new float[ret * outputChannels];
160 | for (int i = 0; i < floats.length; i ++)
161 | floats[i] = buffer.get();
162 |
163 | // Add packet to queue
164 | totalDecoded += ret;
165 | double time = FFmpeg.timestampToSeconds(stream.time_base(), frame.pkt_duration());
166 | double position = FFmpeg.timestampToSeconds(stream.time_base(), frame.pkt_dts());
167 | setPosition(position);
168 | double timestamp = parentStream.getCreatedTime() + position;
169 | parentStream.updatePacketTimestamp(timestamp);
170 | put(new AudioFrame(
171 | timestamp,
172 | position,
173 | time,
174 | floats,
175 | getFormat()
176 | ));
177 | }
178 |
179 | @Override
180 | public void close() throws Exception {
181 | synchronized (this) {
182 | if (closed) {
183 | throw new IllegalStateException("already closed");
184 | }
185 |
186 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegAudioSourceSubstream.close() called");
187 |
188 | // see: https://ffmpeg.org/doxygen/2.1/doc_2examples_2resampling_audio_8c-example.html
189 | for (int i = 0; i < samples_out.length; i++) {
190 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_out[" + i + "])...");
191 | avutil.av_free(samples_out[i]);
192 | samples_out[i].deallocate();
193 | samples_out[i] = null;
194 | }
195 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "deallocating samples_out_ptr...");
196 | samples_out_ptr.deallocate();
197 |
198 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "swr_free(swrContext)...");
199 | swresample.swr_free(swrContext);
200 |
201 | // Close a given AVCodecContext and free all the data associated with it (but not the AVCodecContext itself).
202 | // So free it afterwards.
203 |
204 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "avcodec_close(codecContext)...");
205 | avcodec.avcodec_close(codecContext);
206 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_free(codecContext)...");
207 | avutil.av_free(codecContext);
208 |
209 | closed = true;
210 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegAudioSourceSubstream.close() complete");
211 | }
212 | }
213 |
214 | public static void main(String[] args) throws Exception {
215 | avutil.av_malloc(1);
216 |
217 | long phy_before_z = Pointer.physicalBytes();
218 |
219 | for (int z = 0; z < 5000; z ++) {
220 | FFmpegIO.openInputStream(new ByteArrayInputStream(new byte[0]), FFmpegIO.DEFAULT_BUFFER_SIZE).close();
221 | }
222 |
223 | System.err.println(Pointer.physicalBytes() - phy_before_z);
224 |
225 | try {
226 | Thread.sleep(100000L );
227 | } catch (InterruptedException e) {
228 | e.printStackTrace();
229 | }
230 | }
231 | }
232 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/source/FFmpegDecoderContext.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.source;
2 | import com.github.manevolent.ffmpeg4j.FFmpegError;
3 | import com.github.manevolent.ffmpeg4j.FFmpegException;
4 | import com.github.manevolent.ffmpeg4j.FFmpegStreamContext;
5 | import com.github.manevolent.ffmpeg4j.output.FFmpegEncoderContext;
6 | import org.bytedeco.ffmpeg.avcodec.*;
7 | import org.bytedeco.ffmpeg.avformat.AVStream;
8 | import org.bytedeco.ffmpeg.avutil.*;
9 | import org.bytedeco.ffmpeg.global.*;
10 |
11 | public interface FFmpegDecoderContext extends FFmpegStreamContext {
12 |
13 | /**
14 | * Finds if the decoder is currently decoding, or if it is instead discarding packets.
15 | * @return true if decoding is taking place, false otherwise.
16 | */
17 | boolean isDecoding();
18 |
19 | /**
20 | * Decode callback for the decoder.
21 | * @param frame Frame to decode.
22 | * @throws FFmpegException
23 | */
24 | void decode(AVFrame frame) throws FFmpegException;
25 |
26 | /**
27 | * Processes frames made available by decodePacket(). This calls decode(), which is typically fulfilled in a
28 | * superclass.
29 | * @return number of frames made available.
30 | * @throws FFmpegException
31 | */
32 | default int processAvailableFrames() throws FFmpegException {
33 | int ret = 0;
34 |
35 | int frames_finished = 0;
36 |
37 | AVFrame frame;
38 | while (ret >= 0) {
39 | frame = avutil.av_frame_alloc();
40 | if (frame == null)
41 | throw new NullPointerException("av_frame_alloc");
42 |
43 | try {
44 | ret = avcodec.avcodec_receive_frame(getCodecContext(), frame);
45 | if (ret == avutil.AVERROR_EAGAIN())
46 | break; // output is not available right now - user must try to send new input
47 |
48 | // Check for misc. errors:
49 | FFmpegError.checkError("avcodec_receive_frame", ret);
50 |
51 | try {
52 | decode(frame);
53 |
54 | // If we made it this far:
55 | frames_finished++;
56 | } finally {
57 | // Do nothing here
58 | }
59 | } finally {
60 | // VLC does this
61 | avutil.av_frame_free(frame);
62 |
63 | // Encourage JVM to de-allocate the object
64 | frame = null;
65 | }
66 | }
67 |
68 | return frames_finished;
69 | }
70 |
71 | /**
72 | * Decodes a given received packet. Typically the packet is received from the format stream (e.g. webm)
73 | * @param packet Packet to decode frames from.
74 | * @return Number of raw frames decoded.
75 | * @throws FFmpegException
76 | */
77 | default int decodePacket(AVPacket packet) throws FFmpegException {
78 | int ret = avutil.AVERROR_EAGAIN(), frames_finished = 0;
79 |
80 | while (ret == avutil.AVERROR_EAGAIN()) {
81 | ret = avcodec.avcodec_send_packet(getCodecContext(), packet);
82 | if (ret != avutil.AVERROR_EAGAIN())
83 | FFmpegError.checkError("avcodec_send_packet", ret);
84 |
85 | frames_finished += processAvailableFrames();
86 | }
87 |
88 | return frames_finished;
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/source/FFmpegVideoSourceSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.source;
2 |
3 | import com.github.manevolent.ffmpeg4j.*;
4 | import com.github.manevolent.ffmpeg4j.math.Rational;
5 | import com.github.manevolent.ffmpeg4j.stream.source.FFmpegSourceStream;
6 | import org.bytedeco.ffmpeg.avcodec.*;
7 | import org.bytedeco.ffmpeg.avformat.*;
8 | import org.bytedeco.ffmpeg.avutil.*;
9 | import org.bytedeco.ffmpeg.global.*;
10 | import org.bytedeco.ffmpeg.swscale.*;
11 | import org.bytedeco.javacpp.*;
12 |
13 | import java.io.IOException;
14 |
15 | public class FFmpegVideoSourceSubstream
16 | extends VideoSourceSubstream
17 | implements FFmpegDecoderContext {
18 | // FFmpeg native stuff (for video conversion)
19 | private final AVCodecContext codecContext;
20 | private final BytePointer buffer;
21 | private final AVFrame pFrameOut;
22 | private final SwsContext sws;
23 |
24 | // Managed stuff
25 | private final AVStream stream;
26 | private final FFmpegSourceStream parentStream;
27 | private final VideoFormat videoFormat;
28 |
29 | private volatile long totalDecoded;
30 |
31 | private final int frameSizeBytes;
32 |
33 | private int pixelFormat;
34 |
35 | private boolean closed = false;
36 |
37 | public FFmpegVideoSourceSubstream(FFmpegSourceStream parentStream,
38 | AVStream stream,
39 | AVCodecContext codecContext,
40 | int pixelFormat) throws FFmpegException {
41 | super(parentStream);
42 |
43 | this.pixelFormat = pixelFormat;
44 | this.stream = stream;
45 |
46 | this.parentStream = parentStream;
47 | this.codecContext = codecContext;
48 |
49 | pFrameOut = avutil.av_frame_alloc();
50 | if (pFrameOut == null) throw new RuntimeException("failed to allocate destination frame");
51 |
52 | this.frameSizeBytes = avutil.av_image_get_buffer_size(
53 | pixelFormat,
54 | stream.codecpar().width(),
55 | stream.codecpar().height(),
56 | 1 // used by some other methods in ffmpeg
57 | );
58 |
59 | buffer = new BytePointer(avutil.av_malloc(frameSizeBytes));
60 |
61 | /*
62 | http://stackoverflow.com/questions/29743648/which-flag-to-use-for-better-quality-with-sws-scale
63 |
64 | The RGB24 to YUV420 conversation itself is lossy. The scaling algorithm is probably used in downscaling
65 | the color information. I'd say the quality is: point << bilinear < bicubic < lanczos/sinc/spline I don't
66 | really know the others. Under rare circumstances sinc is the ideal scaler and lossless, but those
67 | conditions are usually not met. Are you also scaling the video? Otherwise I'd go for bicubic.
68 | */
69 |
70 | sws = swscale.sws_getContext(
71 | stream.codecpar().width(), stream.codecpar().height(), stream.codecpar().format(), // source
72 | stream.codecpar().width(), stream.codecpar().height(), pixelFormat, // destination
73 | swscale.SWS_BILINEAR, // flags (see above)
74 | null, null, (DoublePointer) null // filters, params
75 | );
76 |
77 | // Assign appropriate parts of buffer to image planes in pFrameRGB
78 | // See: https://mail.gnome.org/archives/commits-list/2016-February/msg05531.html
79 | FFmpegError.checkError("av_image_fill_arrays", avutil.av_image_fill_arrays(
80 | pFrameOut.data(),
81 | pFrameOut.linesize(),
82 | buffer,
83 | pixelFormat,
84 | stream.codecpar().width(),
85 | stream.codecpar().height(),
86 | 1
87 | ));
88 |
89 | Rational rational = Rational.fromAVRational(stream.r_frame_rate());
90 |
91 | this.videoFormat = new VideoFormat(
92 | stream.codecpar().width(),
93 | stream.codecpar().height(),
94 | rational.toDouble()
95 | );
96 | }
97 |
98 | @Override
99 | public int getBitRate() {
100 | return (int) stream.codecpar().bit_rate();
101 | }
102 |
103 | @Override
104 | public boolean read() throws IOException {
105 | return parentStream.readPacket() != null;
106 | }
107 |
108 | @Override
109 | public VideoFormat getFormat() {
110 | return videoFormat;
111 | }
112 |
113 | @Override
114 | public AVCodecContext getCodecContext() {
115 | return codecContext;
116 | }
117 |
118 | @Override
119 | public AVStream getStream() {
120 | return stream;
121 | }
122 |
123 | @Override
124 | public void decode(AVFrame frame) throws FFmpegException {
125 | int ret = swscale.sws_scale(
126 | sws, // the scaling context previously created with sws_getContext()
127 | frame.data(), // the array containing the pointers to the planes of the source slice
128 | frame.linesize(), // the array containing the strides for each plane of the source image
129 | 0, // the position in the source image of the slice to process, that is the number (counted starting from zero) in the image of the first row of the slice
130 | stream.codecpar().height(), // the height of the source slice, that is the number of rows in the slice
131 | pFrameOut.data(), // the array containing the pointers to the planes of the destination image
132 | pFrameOut.linesize() // the array containing the strides for each plane of the destination image
133 | );
134 |
135 | FFmpegError.checkError("sws_scale", ret);
136 |
137 | BytePointer data = pFrameOut.data(0);
138 | int l = pFrameOut.linesize(0);
139 |
140 | // Allocate pixel data buffer:
141 | byte[] pixelData = new byte[frameSizeBytes];
142 | data.position(0).get(pixelData, 0, l * frame.height());
143 |
144 | double position = FFmpeg.timestampToSeconds(stream.time_base(), frame.pkt_dts());
145 | setPosition(position);
146 | double time = 1D / videoFormat.getFramesPerSecond();
147 | double timestamp = parentStream.getCreatedTime() + position;
148 | parentStream.updatePacketTimestamp(timestamp);
149 |
150 | put(new VideoFrame(
151 | timestamp,
152 | position,
153 | time,
154 | pixelFormat,
155 | stream.codecpar().width(),
156 | stream.codecpar().height(),
157 | pixelData
158 | ));
159 |
160 | totalDecoded ++;
161 |
162 | //(double) frame.pts() * (double) Rational.fromAVRational(stream.time_base()).toDouble()
163 | //setPosition((double)totalDecoded / videoFormat.getFramesPerSecond());
164 | }
165 |
166 | @Override
167 | public void close() throws Exception {
168 | synchronized (this) {
169 | if (closed) {
170 | throw new IllegalStateException("already closed");
171 | }
172 |
173 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegVideoSourceSubstream.close() called");
174 |
175 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "sws_freeContext(sws)...");
176 | swscale.sws_freeContext(sws);
177 |
178 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_free(buffer)...");
179 | avutil.av_free(buffer);
180 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_free(pFrameOut)...");
181 | avutil.av_free(pFrameOut);
182 |
183 | // Close a given AVCodecContext and free all the data associated with it (but not the AVCodecContext itself).
184 | // So free it afterwards.
185 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "avcodec_close(codecContext)...");
186 | avcodec.avcodec_close(codecContext);
187 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_free(codecContext)...");
188 | avutil.av_free(codecContext);
189 |
190 | closed = true;
191 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegVideoSourceSubstream.close() completed");
192 | }
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/source/MediaSourceSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.source;
2 |
3 | import com.github.manevolent.ffmpeg4j.MediaStream;
4 | import com.github.manevolent.ffmpeg4j.MediaType;
5 | import com.github.manevolent.ffmpeg4j.stream.source.SourceStream;
6 |
7 | import java.io.IOException;
8 | import java.util.ArrayList;
9 | import java.util.Collection;
10 | import java.util.List;
11 | import java.util.Queue;
12 | import java.util.concurrent.LinkedBlockingQueue;
13 |
14 | public abstract class MediaSourceSubstream extends MediaStream {
15 | private final Queue frameQueue = new LinkedBlockingQueue<>();
16 | private final MediaType mediaType;
17 | private volatile double lost;
18 | private final SourceStream parent;
19 | private boolean decoding = true;
20 |
21 | protected MediaSourceSubstream(SourceStream parent, MediaType mediaType) {
22 | this.parent = parent;
23 | this.mediaType = mediaType;
24 | }
25 |
26 | public final SourceStream getParent() {
27 | return parent;
28 | }
29 |
30 | protected boolean put(T frame) {
31 | return frameQueue.add(frame);
32 | }
33 |
34 | public abstract int getBitRate();
35 |
36 | public final MediaType getMediaType() {
37 | return mediaType;
38 | }
39 |
40 | /**
41 | * Flushes the source stream, emptying all buffered data.
42 | */
43 | public final void flush() {
44 | frameQueue.clear();
45 | }
46 |
47 | /**
48 | * Requests that the stream read a packet and put it onto the buffer.
49 | * @return true if the read was successful, false if the stream is otherwise broken.
50 | * @throws IOException
51 | */
52 | public abstract boolean read() throws IOException;
53 |
54 | /**
55 | * Gets the next available object from the source stream.
56 | * @return Object. Null, if the substream is otherwise empty or ended.
57 | * @throws IOException
58 | */
59 | public T next() throws IOException {
60 | while (frameQueue.size() <= 0)
61 | {
62 | if (!isDecoding()) throw new IOException(new IllegalStateException("not decoding"));
63 | read();
64 | }
65 |
66 | return tryNext();
67 | }
68 |
69 | public T peek() throws IOException {
70 | while (frameQueue.size() <= 0)
71 | read();
72 |
73 | return tryPeek();
74 | }
75 |
76 | public T tryNext() {
77 | return frameQueue.poll();
78 | }
79 |
80 | public T tryPeek() {
81 | return frameQueue.peek();
82 | }
83 |
84 | public Collection drain() {
85 | List newList = new ArrayList(frameQueue.size());
86 | T o;
87 | while ((o = tryNext()) != null) newList.add(o);
88 | return newList;
89 | }
90 |
91 | public double getLost() {
92 | return lost;
93 | }
94 |
95 | public void setLost(double lost) {
96 | this.lost = lost;
97 | }
98 |
99 | public boolean isDecoding() {
100 | return decoding;
101 | }
102 |
103 | public void setDecoding(boolean decoding) {
104 | this.decoding = decoding;
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/source/VideoSourceSubstream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.source;
2 |
3 | import com.github.manevolent.ffmpeg4j.MediaType;
4 | import com.github.manevolent.ffmpeg4j.VideoFormat;
5 | import com.github.manevolent.ffmpeg4j.VideoFrame;
6 | import com.github.manevolent.ffmpeg4j.stream.source.SourceStream;
7 |
8 | public abstract class VideoSourceSubstream extends MediaSourceSubstream {
9 | public VideoSourceSubstream(SourceStream parent) {
10 | super(parent, MediaType.VIDEO);
11 | }
12 |
13 | public abstract VideoFormat getFormat();
14 |
15 | @Override
16 | public String toString() {
17 | return getFormat().toString() + " " + (getBitRate() / 1000) + "Kbps";
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/FFmpegFormatContext.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream;
2 |
3 | import org.bytedeco.ffmpeg.avformat.*;
4 |
5 | import java.util.HashMap;
6 | import java.util.Map;
7 |
8 | public interface FFmpegFormatContext {
9 | AVFormatContext getFormatContext();
10 |
11 | default void setFlag(int flag, boolean value) {
12 | if (!value)
13 | getFormatContext().flags(getFormatContext().flags() & ~flag);
14 | else
15 | getFormatContext().flags(getFormatContext().flags() | flag);
16 | }
17 |
18 |
19 | default void setFlag(AVFormatFlag flag, boolean value) {
20 | setFlag(flag.flag, value);
21 | }
22 |
23 | default boolean isFlagSet(AVFormatFlag flag) {
24 | return isFlagSet(flag.flag);
25 | }
26 |
27 | default boolean isFlagSet(int flag) {
28 | return (getFormatContext().flags() & flag) == flag;
29 | }
30 |
31 | default Map getFlags() {
32 | Map flags = new HashMap<>();
33 |
34 | for (AVFormatFlag flag : AVFormatFlag.values())
35 | flags.put(flag, isFlagSet(flag));
36 |
37 | return flags;
38 | }
39 |
40 | enum AVFormatFlag {
41 | AVFMT_FLAG_GENPTS(0x0001),
42 | AVFMT_FLAG_IGNIDX(0x0002),
43 | AVFMT_FLAG_NONBLOCK(0x0004),
44 | AVFMT_FLAG_IGNDTS(0x0008),
45 | AVFMT_FLAG_NOFILLIN(0x0010),
46 | AVFMT_FLAG_NOPARSE (0x0020),
47 | AVFMT_FLAG_NOBUFFER(0x0040),
48 | AVFMT_FLAG_CUSTOM_IO(0x0080),
49 | AVFMT_FLAG_DISCARD_CORRUPT(0x0100),
50 | AVFMT_FLAG_FLUSH_PACKETS(0x0200),
51 | AVFMT_FLAG_BITEXACT(0x0400),
52 | AVFMT_FLAG_MP4A_LATM(0x0800),
53 | AVFMT_FLAG_SORT_DTS(0x1000),
54 | AVFMT_FLAG_PRIV_OPT(0x2000),
55 | AVFMT_FLAG_KEEP_SIDE_DATA(0x4000)
56 |
57 |
58 | ;
59 |
60 | private final int flag;
61 |
62 | AVFormatFlag(int flag) {
63 | this.flag = flag;
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/Stream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream;
2 |
3 | import com.github.manevolent.ffmpeg4j.MediaStream;
4 |
5 | import java.util.List;
6 | import java.util.stream.*;
7 |
8 | public abstract class Stream implements AutoCloseable {
9 |
10 | public abstract List getSubstreams();
11 |
12 | public List getSubstreams(Class extends T> type) {
13 | return getSubstreams().stream().filter(ss -> type.isAssignableFrom(ss.getClass()) ||
14 | ss.getClass().equals(type)).collect(Collectors.toList());
15 | }
16 |
17 | public double getPosition() {
18 | return getSubstreams().stream().mapToDouble(MediaStream::getPosition)
19 | .min().orElse(0D);
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/event/EventChannel.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream.event;
2 |
3 | import java.util.LinkedList;
4 | import java.util.List;
5 |
6 | public class EventChannel implements EventListener {
7 | private final List> list = new LinkedList<>();
8 | public boolean addListener(EventListener listener) {
9 | return list.add(listener);
10 | }
11 | public boolean removeListener(EventListener listener) {
12 | return list.remove(listener);
13 | }
14 | public void accept(T o) throws EventException {
15 | for (EventListener listener : list) listener.accept(o);
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/event/EventException.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream.event;
2 |
3 | public class EventException extends Exception {
4 | public EventException(Exception cause) {
5 | super(cause);
6 | }
7 | public EventException(String message) {
8 | super(message);
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/event/EventListener.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream.event;
2 |
3 | /**
4 | * Represents the streaming api's event listener
5 | * @param Event object
6 | */
7 | public interface EventListener {
8 | void accept(T o) throws EventException;
9 | }
10 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/output/FFmpegTargetStream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream.output;
2 |
3 | import com.github.manevolent.ffmpeg4j.*;
4 | import com.github.manevolent.ffmpeg4j.math.Rational;
5 | import com.github.manevolent.ffmpeg4j.output.FFmpegAudioTargetSubstream;
6 | import com.github.manevolent.ffmpeg4j.output.FFmpegVideoTargetSubstream;
7 | import com.github.manevolent.ffmpeg4j.output.MediaTargetSubstream;
8 | import com.github.manevolent.ffmpeg4j.stream.FFmpegFormatContext;
9 | import org.bytedeco.ffmpeg.avcodec.*;
10 | import org.bytedeco.ffmpeg.avformat.*;
11 | import org.bytedeco.ffmpeg.avutil.*;
12 | import org.bytedeco.ffmpeg.global.*;
13 | import org.bytedeco.javacpp.*;
14 |
15 | import java.io.EOFException;
16 | import java.util.ArrayList;
17 | import java.util.Collection;
18 | import java.util.List;
19 | import java.util.Map;
20 |
21 | public class FFmpegTargetStream extends TargetStream implements FFmpegFormatContext {
22 | private final AVFormatContext formatContext;
23 | private final FFmpegIO io;
24 | private final List substreams = new ArrayList<>();
25 | private final FFmpegPacketOutput packetOutput;
26 |
27 | private final Object closeLock = new Object();
28 |
29 | private int pixelFormat = org.bytedeco.ffmpeg.global.avutil.AV_PIX_FMT_RGB24;
30 |
31 | private boolean closed;
32 |
33 | public FFmpegTargetStream(AVFormatContext formatContext, FFmpegIO io, FFmpegPacketOutput packetOutput) {
34 | this.io = io;
35 | this.formatContext = formatContext;
36 | setAVIOContext(io.getContext());
37 | this.packetOutput = packetOutput;
38 | }
39 |
40 | public FFmpegTargetStream(String formatName, FFmpegIO io, FFmpegPacketOutput packetOutput) throws FFmpegException {
41 | this.io = io;
42 |
43 | AVOutputFormat outputFormat = avformat.av_guess_format(formatName, (String)null, (String)null);
44 | if (outputFormat == null) throw new IllegalArgumentException("unknown output format");
45 |
46 | this.formatContext = avformat.avformat_alloc_context();
47 | if (formatContext == null) throw new NullPointerException();
48 |
49 | FFmpegError.checkError(
50 | "avformat_alloc_output_context2",
51 | avformat.avformat_alloc_output_context2(formatContext, outputFormat, (String)null, (String)null)
52 | );
53 |
54 | setAVIOContext(io.getContext());
55 |
56 | this.packetOutput = packetOutput;
57 | }
58 |
59 | public FFmpegTargetStream(String formatName, String url, FFmpegPacketOutput packetOutput) throws FFmpegException {
60 | this.io = null;
61 |
62 | this.formatContext = avformat.avformat_alloc_context();
63 | if (formatContext == null) throw new NullPointerException();
64 |
65 | FFmpegError.checkError(
66 | "avformat_alloc_output_context2",
67 | avformat.avformat_alloc_output_context2(
68 | formatContext, // is set to the created format context, or to NULL in case of failure
69 | (AVOutputFormat) null, // format to use for allocating the context, if NULL format_name and filename are used instead
70 | formatName, // the name of output format to use for allocating the context, if NULL filename is used instead
71 | url // the name of the filename to use for allocating the context, may be NULL
72 | )
73 | );
74 |
75 | if (formatContext.isNull()) throw new NullPointerException();
76 | if (formatContext.oformat().isNull()) throw new NullPointerException();
77 |
78 | formatContext.pb(new AVIOContext());
79 |
80 | if (!isFlagSet(avformat.AVFMT_NOFILE)) {
81 | FFmpegError.checkError(
82 | "avio_open",
83 | avformat.avio_open(
84 | new PointerPointer(formatContext),
85 | new BytePointer(url),
86 | avformat.AVIO_FLAG_WRITE
87 | )
88 | );
89 | }
90 |
91 | this.packetOutput = packetOutput;
92 | }
93 |
94 | public FFmpegTargetStream(String formatName, String url) throws FFmpegException {
95 | this(formatName, url, new FFmpegNativeOutput());
96 | }
97 |
98 | public int substreamCount() {
99 | return formatContext.nb_streams();
100 | }
101 |
102 | private AVIOContext getAVIOContext() {
103 | return this.formatContext.pb();
104 | }
105 |
106 | private void setAVIOContext(AVIOContext context) {
107 | enableCustomIO();
108 | this.formatContext.pb(context);
109 | }
110 |
111 | private void enableCustomIO() {
112 | setFlag(FFmpegFormatContext.AVFormatFlag.AVFMT_FLAG_CUSTOM_IO, true);
113 | }
114 |
115 | public void writeFFmpegHeader() throws FFmpegException {
116 | //avformat.av_dump_format(formatContext, 0, (String) null, 1);
117 |
118 | FFmpegError.checkError(
119 | "avformat_write_header",
120 | avformat.avformat_write_header(formatContext, (AVDictionary) null)
121 | );
122 | }
123 |
124 | @Override
125 | public void writeHeader() {
126 | try {
127 | writeFFmpegHeader();
128 |
129 | getOnReady().accept(this);
130 | } catch (Exception e) {
131 | throw new RuntimeException(e);
132 | }
133 | }
134 |
135 | public FFmpegVideoTargetSubstream registerVideoSubstream(String codecName,
136 | VideoFormat format,
137 | Map options) throws FFmpegException {
138 | return registerVideoSubstream(
139 | codecName,
140 | format.getWidth(), format.getHeight(), format.getFramesPerSecond(),
141 | options
142 | );
143 | }
144 |
145 | public FFmpegVideoTargetSubstream registerVideoSubstream(String codecName,
146 | int width, int height, double fps,
147 | Map options) throws FFmpegException {
148 | AVCodec codec = org.bytedeco.ffmpeg.global.avcodec.avcodec_find_encoder_by_name(codecName);
149 | if (codec == null) throw new FFmpegException("unrecognized video codec: " + codecName);
150 |
151 | return registerVideoSubstream(codec, width, height, fps, options);
152 | }
153 |
154 | public FFmpegVideoTargetSubstream registerVideoSubstream(AVCodec codec,
155 | int width, int height, double fps,
156 | Map options) throws FFmpegException {
157 | if (codec.type() != avutil.AVMEDIA_TYPE_VIDEO)
158 | throw new FFmpegException("codec is not video: " + codec.name().getString());
159 |
160 | codec = avcodec.avcodec_find_encoder(codec.id());
161 | if (codec == null) throw new FFmpegException("video codec does not have encoder");
162 |
163 | AVStream stream = avformat.avformat_new_stream(formatContext, codec);
164 | if (stream == null) throw new FFmpegException("could not create video substream");
165 |
166 | // Assign a stream ID to this encoder.
167 | stream.id(formatContext.nb_streams() - 1);
168 |
169 | AVCodecContext codecContext = avcodec.avcodec_alloc_context3(codec);
170 |
171 | // Set up appropriate pixel format target
172 | Collection supported_formats = FFmpeg.readPointer(codec.pix_fmts());
173 | boolean is_pixel_format_supported = supported_formats.contains(getPixelFormat());
174 | int best_pix_fmt = is_pixel_format_supported ?
175 | getPixelFormat() :
176 | VideoFormat.getBestPixelFormat(codec, getPixelFormat());
177 | if (best_pix_fmt < 0) throw new FFmpegException("couldn't find comparable pixel format for encoder");
178 | codecContext.pix_fmt(best_pix_fmt);
179 |
180 | codecContext.width(width);
181 | codecContext.height(height);
182 |
183 | stream.codecpar().codec_id(codec.id());
184 | stream.codecpar().width(width);
185 | stream.codecpar().height(height);
186 | stream.codecpar().format(best_pix_fmt);
187 |
188 | Rational timeBase = Rational.toRational(1D/fps);
189 | codecContext.time_base(avutil.av_make_q((int) timeBase.getNumerator(), (int) timeBase.getDenominator()));
190 | Rational framerate = Rational.toRational(fps);
191 | codecContext.framerate(avutil.av_make_q((int) framerate.getNumerator(), (int) framerate.getDenominator()));
192 |
193 | // some formats want stream headers to be separate
194 | if ((formatContext.oformat().flags() & avformat.AVFMT_GLOBALHEADER) == avformat.AVFMT_GLOBALHEADER)
195 | codecContext.flags(codecContext.flags() | avcodec.AV_CODEC_FLAG_GLOBAL_HEADER);
196 |
197 | // pull in options
198 | AVDictionary optionDictionary = new AVDictionary();
199 | for (Map.Entry option : options.entrySet()) {
200 | FFmpegError.checkError(
201 | "av_dict_set/" + option.getKey(),
202 | avutil.av_dict_set(optionDictionary, option.getKey(), option.getValue(), 0)
203 | );
204 | }
205 |
206 | FFmpegError.checkError(
207 | "avcodec_open2",
208 | avcodec.avcodec_open2(codecContext, codec, optionDictionary)
209 | );
210 |
211 | FFmpegVideoTargetSubstream videoTargetSubstream = new FFmpegVideoTargetSubstream(
212 | this,
213 | stream,
214 | codecContext,
215 | fps
216 | );
217 |
218 | substreams.add(videoTargetSubstream);
219 |
220 | return videoTargetSubstream;
221 | }
222 |
223 | public FFmpegAudioTargetSubstream registerAudioSubstream(String codecName,
224 | AudioFormat audioFormat,
225 | Map options) throws FFmpegException {
226 | return registerAudioSubstream(
227 | codecName,
228 | audioFormat.getSampleRate(), audioFormat.getChannels(), audioFormat.getChannelLayout(),
229 | options
230 | );
231 | }
232 |
233 |
234 | public FFmpegAudioTargetSubstream registerAudioSubstream(String codecName,
235 | int sample_rate, int channels, long channel_layout,
236 | Map options) throws FFmpegException {
237 | AVCodec codec = avcodec.avcodec_find_encoder_by_name(codecName);
238 | if (codec == null) throw new FFmpegException("unrecognized audio codec: " + codecName);
239 |
240 | return registerAudioSubstream(
241 | codec,
242 | sample_rate, channels, channel_layout,
243 | options
244 | );
245 | }
246 |
247 | public FFmpegAudioTargetSubstream registerAudioSubstream(AVCodec codec,
248 | int sample_rate, int channels, long channel_layout,
249 | Map options) throws FFmpegException {
250 | if (codec.type() != avutil.AVMEDIA_TYPE_AUDIO)
251 | throw new FFmpegException("codec is not audio: " + codec.name().getString());
252 |
253 | codec = avcodec.avcodec_find_encoder(codec.id());
254 | if (codec == null) throw new FFmpegException("audio codec does not have encoder");
255 |
256 | AVStream stream = avformat.avformat_new_stream(formatContext, codec);
257 | if (stream == null) throw new RuntimeException("could not create audio substream");
258 |
259 | stream.id(formatContext.nb_streams() - 1);
260 |
261 | AVCodecContext codecContext = avcodec.avcodec_alloc_context3(codec);
262 |
263 | int sampleFormat = -1;
264 | for (int i = 0; ; i ++) {
265 | int newSampleFormatId = codec.sample_fmts().get(i);
266 | if (newSampleFormatId < 0) break;
267 | sampleFormat = newSampleFormatId;
268 | }
269 | if (sampleFormat < 0) throw new FFmpegException("could not pick audio sample format for codec");
270 |
271 | if (codecContext.codec().supported_samplerates() != null &&
272 | !codecContext.codec().supported_samplerates().isNull()) {
273 | boolean sampleRateSupported = false;
274 | for (int i = 0; !sampleRateSupported; i++) {
275 | int sampleRate = codecContext.codec().supported_samplerates().get(i);
276 | if (sampleRate == sample_rate)
277 | sampleRateSupported = true;
278 | else if (sampleRate <= 0)
279 | break;
280 | }
281 | if (!sampleRateSupported)
282 | throw new FFmpegException("codec does not support sample rate: " + sample_rate);
283 | }
284 |
285 | if (codecContext.codec().channel_layouts() != null && !codecContext.codec().channel_layouts().isNull()) {
286 | boolean channelLayoutSupported = false;
287 | for (int i = 0; !channelLayoutSupported; i++) {
288 | long channelLayout = codecContext.codec().channel_layouts().get(i);
289 | if (channelLayout == channel_layout)
290 | channelLayoutSupported = true;
291 | else if (channelLayout <= 0)
292 | break;
293 | }
294 | if (!channelLayoutSupported)
295 | throw new FFmpegException("codec does not support channel layout: " + channel_layout);
296 | }
297 |
298 | codecContext.sample_fmt(sampleFormat);
299 | codecContext.sample_rate(sample_rate);
300 | codecContext.channels(channels);
301 | codecContext.codec_type(codec.type());
302 | codecContext.channel_layout(channel_layout);
303 | codecContext.frame_size();
304 |
305 | // some formats want stream headers to be separate
306 | if ((formatContext.oformat().flags() & avformat.AVFMT_GLOBALHEADER) == avformat.AVFMT_GLOBALHEADER)
307 | codecContext.flags(codecContext.flags() | avcodec.AV_CODEC_FLAG_GLOBAL_HEADER);
308 |
309 |
310 | // pull in options
311 | AVDictionary optionDictionary = new AVDictionary();
312 | for (Map.Entry option : options.entrySet()) {
313 | FFmpegError.checkError(
314 | "av_dict_set/" + option.getKey(),
315 | avutil.av_dict_set(optionDictionary, option.getKey(), option.getValue(), 0)
316 | );
317 | }
318 |
319 | FFmpegError.checkError(
320 | "avcodec_open2",
321 | avcodec.avcodec_open2(codecContext, codec, optionDictionary)
322 | );
323 |
324 | stream.codecpar().codec_id(codec.id());
325 | stream.codecpar().format(sampleFormat);
326 | stream.codecpar().sample_rate(sample_rate);
327 | stream.codecpar().channels(channels);
328 | stream.codecpar().channel_layout(channel_layout);
329 | stream.codecpar().codec_type(codec.type());
330 | stream.codecpar().frame_size(codecContext.frame_size());
331 |
332 | FFmpegAudioTargetSubstream audioTargetSubstream = new FFmpegAudioTargetSubstream(
333 | this,
334 | stream,
335 | codecContext
336 | );
337 |
338 | substreams.add(audioTargetSubstream);
339 |
340 | return audioTargetSubstream;
341 | }
342 |
343 | @Override
344 | public void close() throws Exception {
345 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegTargetStream.close() called");
346 |
347 | synchronized (closeLock) {
348 | if (!closed) {
349 | try {
350 | // Flush underlying streams
351 | for (MediaTargetSubstream targetSubstream : substreams) {
352 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "flushing MediaTargetSubstream: " +
353 | targetSubstream.toString() + "...");
354 | targetSubstream.flush();
355 | }
356 |
357 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "packetOutput.flush(formatContext)...");
358 | // Flush packet buffer (I/O done at this point)
359 | packetOutput.flush(formatContext);
360 |
361 | // Write trailer to file
362 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "av_write_trailer...");
363 | avformat.av_write_trailer(getFormatContext());
364 |
365 | // Close output connection/file (may do nothing)
366 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "packetOutput.close()...");
367 | packetOutput.close();
368 | } finally { // Make sure native stuff is freed properly
369 | // Close all substreams (free memory)
370 | for (MediaTargetSubstream substream : substreams) {
371 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "closing MediaTargetSubstream:" + substream.toString() + "...");
372 | substream.close();
373 | }
374 |
375 | // Close native I/O handles
376 | if (io != null) { // managed IO (we close a lot of stuff on our own here)
377 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "io.close()...");
378 | io.close();
379 | } else { // native IO, let ffmpeg handle it
380 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "avformat_free_context(formatContext)...");
381 | avformat.avformat_free_context(formatContext);
382 | }
383 |
384 | closed = true;
385 | }
386 | } else {
387 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegTargetStream already closed!");
388 | }
389 | }
390 |
391 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegTargetStream.close() completed");
392 | }
393 |
394 | public AVFormatContext getFormatContext() {
395 | return formatContext;
396 | }
397 |
398 | @Override
399 | public List getSubstreams() {
400 | return substreams;
401 | }
402 |
403 | public void writePacket(AVPacket packet) throws FFmpegException, EOFException {
404 | if (packet == null || packet.isNull())
405 | {
406 | return; // Null packet -- ignore!
407 | }
408 | else if (packet.size() < 0)
409 | throw new FFmpegException("failed to write packet: size < 0: " + packet.size());
410 | else if ((packet.flags() & avcodec.AV_PKT_FLAG_CORRUPT) == avcodec.AV_PKT_FLAG_CORRUPT)
411 | throw new FFmpegException("failed to write packet: corrupt flag is set");
412 |
413 | if (!packetOutput.writePacket(formatContext, packet))
414 | return; // Packet wasn't written.
415 |
416 | // Calculate duration and stream position, etc
417 | getFormatContext().duration((int) Math.max(getFormatContext().duration(), packet.pts() + packet.duration()));
418 | }
419 |
420 | public void flush() throws FFmpegException {
421 | packetOutput.flush(formatContext);
422 | }
423 |
424 | public int getPixelFormat() {
425 | return pixelFormat;
426 | }
427 |
428 | public void setPixelFormat(int pixelFormat) {
429 | this.pixelFormat = pixelFormat;
430 | }
431 |
432 | public interface FFmpegPacketOutput extends AutoCloseable {
433 | boolean writePacket(AVFormatContext formatContext, AVPacket packet) throws FFmpegException, EOFException;
434 |
435 | default void flush(AVFormatContext formatContext) throws FFmpegException {
436 | // Do nothing
437 | }
438 |
439 | default void close() throws Exception {
440 |
441 | }
442 | }
443 |
444 | public static class FFmpegNativeOutput implements FFmpegPacketOutput {
445 | @Override
446 | public boolean writePacket(AVFormatContext formatContext, AVPacket packet)
447 | throws FFmpegException, EOFException {
448 | if (packet.size() == 0) return false; // Skip packet.
449 |
450 | if (packet.dts() < 0) packet.dts(0);
451 |
452 | int ret = avformat.av_interleaved_write_frame(formatContext, packet);
453 | if (ret == -31) // Broken pipe
454 | throw new EOFException();
455 |
456 | FFmpegError.checkError("av_interleaved_write_frame(formatContext, packet)", ret);
457 |
458 | return ret >= 0;
459 | }
460 |
461 | @Override
462 | public void flush(AVFormatContext formatContext) throws FFmpegException {
463 | FFmpegError.checkError(
464 | "av_interleaved_write_frame(formatContext, null)",
465 | avformat.av_interleaved_write_frame(formatContext, null)
466 | );
467 | }
468 | }
469 | }
470 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/output/TargetStream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream.output;
2 |
3 | import com.github.manevolent.ffmpeg4j.MediaType;
4 | import com.github.manevolent.ffmpeg4j.output.AudioTargetSubstream;
5 | import com.github.manevolent.ffmpeg4j.output.MediaTargetSubstream;
6 | import com.github.manevolent.ffmpeg4j.output.VideoTargetSubstream;
7 | import com.github.manevolent.ffmpeg4j.stream.Stream;
8 | import com.github.manevolent.ffmpeg4j.stream.event.EventChannel;
9 |
10 | import java.util.List;
11 |
12 | public abstract class TargetStream extends Stream {
13 | private final EventChannel onReady = new EventChannel<>();
14 |
15 | public final EventChannel getOnReady() {
16 | return onReady;
17 | }
18 |
19 | public abstract void writeHeader();
20 |
21 | @Override
22 | public abstract List getSubstreams();
23 |
24 | public AudioTargetSubstream getAudioTargetStream() {
25 | return (AudioTargetSubstream)
26 | getSubstreams().stream()
27 | .filter(x -> x.getType() == MediaType.AUDIO)
28 | .findFirst().orElse(null);
29 | }
30 |
31 | public VideoTargetSubstream getVideoTargetStream() {
32 | return (VideoTargetSubstream)
33 | getSubstreams().stream()
34 | .filter(x -> x.getType() == MediaType.VIDEO)
35 | .findFirst().orElse(null);
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/source/FFmpegSourceStream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream.source;
2 |
3 | import com.github.manevolent.ffmpeg4j.*;
4 | import com.github.manevolent.ffmpeg4j.output.MediaTargetSubstream;
5 | import com.github.manevolent.ffmpeg4j.source.FFmpegAudioSourceSubstream;
6 | import com.github.manevolent.ffmpeg4j.source.FFmpegDecoderContext;
7 | import com.github.manevolent.ffmpeg4j.source.FFmpegVideoSourceSubstream;
8 | import com.github.manevolent.ffmpeg4j.source.MediaSourceSubstream;
9 | import com.github.manevolent.ffmpeg4j.stream.FFmpegFormatContext;
10 | import com.github.manevolent.ffmpeg4j.stream.output.FFmpegTargetStream;
11 | import com.github.manevolent.ffmpeg4j.stream.output.TargetStream;
12 | import org.bytedeco.ffmpeg.avcodec.*;
13 | import org.bytedeco.ffmpeg.avformat.*;
14 | import org.bytedeco.ffmpeg.avutil.*;
15 | import org.bytedeco.ffmpeg.global.*;
16 | import org.bytedeco.javacpp.IntPointer;
17 | import org.bytedeco.javacpp.annotation.Cast;
18 |
19 | import java.io.EOFException;
20 | import java.io.IOException;
21 | import java.util.*;
22 | import java.util.function.BiConsumer;
23 | import java.util.function.Function;
24 |
25 | public class FFmpegSourceStream extends SourceStream implements FFmpegFormatContext {
26 | private final FFmpegInput input;
27 | private final List substreamList = new LinkedList<>();
28 | private final FFmpegDecoderContext[] substreams;
29 | private final Object readLock = new Object();
30 |
31 | private boolean registered = false;
32 | private volatile boolean closed = false;
33 | private final Object closeLock = new Object();
34 |
35 | private double position = -1D;
36 |
37 | private int pixelFormat = avutil.AV_PIX_FMT_RGB24;
38 |
39 | private final AVCodecContext.Get_format_AVCodecContext_IntPointer get_format_callback =
40 | new AVCodecContext.Get_format_AVCodecContext_IntPointer() {
41 | @Override
42 | public int call(AVCodecContext var1, @Cast({"const AVPixelFormat*"}) IntPointer pix_fmt_list) {
43 | Logging.LOGGER.log(
44 | Logging.DEBUG_LOG_LEVEL,
45 | "finding best pix_fmt match for decoder for " +
46 | avutil.av_get_pix_fmt_name(getPixelFormat()).getString()
47 | );
48 |
49 | int pix_fmt = avcodec.avcodec_find_best_pix_fmt_of_list(pix_fmt_list, getPixelFormat(), 0, null);
50 |
51 | if (pix_fmt >= 0)
52 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "offering pix_fmt " +
53 | avutil.av_get_pix_fmt_name(pix_fmt).getString() + " to decoder");
54 | else
55 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "couldn't find pix_fmt for decoder");
56 |
57 | return pix_fmt;
58 | }
59 | };
60 |
61 | public FFmpegSourceStream(FFmpegInput input) {
62 | this.substreams = new FFmpegDecoderContext[input.getContext().nb_streams()];
63 | this.input = input;
64 | }
65 |
66 | public FFmpegSourceStream(FFmpegInput input, long startTimeMicroseconds) {
67 | this(input);
68 |
69 | input.getContext().start_time_realtime(startTimeMicroseconds);
70 | }
71 |
72 | public final AVCodecContext.Get_format_AVCodecContext_IntPointer getGet_format_callback() {
73 | return get_format_callback;
74 | }
75 |
76 | public Collection registerStreams() throws FFmpegException {
77 | synchronized (readLock) {
78 | if (!registered) {
79 | // Register low-level streams.
80 | int stream_count = input.getFormatContext().nb_streams();
81 | for (int stream_index = 0; stream_index < stream_count; stream_index++)
82 | input.registerStream(this, stream_index);
83 |
84 | registered = true;
85 | }
86 |
87 | return Collections.unmodifiableCollection(substreamList);
88 | }
89 | }
90 |
91 | public FFmpegTargetStream createTarget(String formatName, FFmpegOutput output) throws FFmpegException {
92 | return createTarget(FFmpeg.getOutputFormatByName(formatName), output);
93 | }
94 |
95 | public FFmpegTargetStream createTarget(AVOutputFormat format, FFmpegOutput output) throws FFmpegException {
96 | FFmpegTargetStream targetStream = null;
97 | try {
98 | targetStream = output.open(format);
99 | copyToTargetStream(targetStream);
100 | return targetStream;
101 | } catch (Throwable e) {
102 | if (targetStream != null) {
103 | try {
104 | targetStream.close();
105 | } catch (Exception closeException) {
106 | e.addSuppressed(closeException);
107 | }
108 | }
109 |
110 | throw e;
111 | }
112 | }
113 |
114 | public Collection> copyToTargetStream(FFmpegTargetStream targetStream) throws FFmpegException {
115 | return copyToTargetStream(targetStream, (source, target) -> { /* Do nothing */ });
116 | }
117 |
118 | public Collection> copyToTargetStream(FFmpegTargetStream targetStream,
119 | BiConsumer, MediaTargetSubstream>>
120 | modifier) throws FFmpegException {
121 | return copyToTargetStream(targetStream, (source, targetStream_) -> {
122 | if (source instanceof FFmpegAudioSourceSubstream) {
123 | return targetStream_.registerAudioSubstream(
124 | ((FFmpegAudioSourceSubstream) source).getCodecContext().codec(),
125 | ((FFmpegAudioSourceSubstream) source).getFormat().getSampleRate(),
126 | ((FFmpegAudioSourceSubstream) source).getFormat().getChannels(),
127 | ((FFmpegAudioSourceSubstream) source).getFormat().getChannelLayout(),
128 | new HashMap<>());
129 | } else if (source instanceof FFmpegVideoSourceSubstream) {
130 | return targetStream_.registerVideoSubstream(
131 | ((FFmpegVideoSourceSubstream) source).getCodecContext().codec(),
132 | ((FFmpegVideoSourceSubstream) source).getFormat().getWidth(),
133 | ((FFmpegVideoSourceSubstream) source).getFormat().getHeight(),
134 | ((FFmpegVideoSourceSubstream) source).getFormat().getFramesPerSecond(),
135 | new HashMap<>());
136 | } else {
137 | // We don't know what to do with this
138 | return null;
139 | }
140 | }, modifier);
141 | }
142 |
143 | public Collection> copyToTargetStream(FFmpegTargetStream targetStream,
144 | SubstreamConverter converter,
145 | BiConsumer, MediaTargetSubstream>>
146 | modifier) throws FFmpegException {
147 | List> targetSubstreams = new ArrayList<>();
148 |
149 | for (MediaSourceSubstream> substream : getSubstreams()) {
150 | MediaTargetSubstream> targetSubstream = converter.convert(substream, targetStream);
151 | if (targetSubstream == null) {
152 | continue;
153 | }
154 |
155 | modifier.accept(substream, targetSubstream);
156 | targetSubstreams.add(targetSubstream);
157 | }
158 |
159 | return targetSubstreams;
160 | }
161 |
162 | @Override
163 | public List getSubstreams() {
164 | if (!registered) {
165 | throw new IllegalStateException("Substreams are not yet registered with registerStreams()");
166 | }
167 |
168 | return substreamList;
169 | }
170 |
171 | @Override
172 | public double getCreatedTime() {
173 | long start_time_realtime = input.getContext().start_time_realtime();
174 | if (start_time_realtime == avutil.AV_NOPTS_VALUE) return 0D;
175 | return (double)start_time_realtime / 1000000D;
176 | }
177 |
178 | public double getPosition() {
179 | return position;
180 | }
181 |
182 | @Override
183 | public double seek(double position) throws IOException {
184 | if (getPosition() > position) {
185 | throw new IllegalStateException("Cannot rewind");
186 | } else if (getPosition() == position) {
187 | return position;
188 | }
189 |
190 | Packet packet;
191 | while ((packet = readPacket()) != null) {
192 | if (packet.getPosition() + packet.getDuration() >= position) {
193 | return packet.getPosition();
194 | }
195 | }
196 |
197 | throw new EOFException();
198 | }
199 |
200 | @Override
201 | public void setCreatedTime(double createdTimeInSeconds) {
202 | input.getContext().start_time_realtime((long) (createdTimeInSeconds * 1000000D));
203 | }
204 |
205 | public int getPixelFormat() {
206 | return pixelFormat;
207 | }
208 |
209 | public void setPixelFormat(int pixelFormat) {
210 | if (registered) throw new IllegalStateException("already registered substreams");
211 |
212 | this.pixelFormat = pixelFormat;
213 | }
214 |
215 | @Override
216 | public Packet readPacket() throws IOException {
217 | try {
218 | while (true) {
219 | int result;
220 | AVPacket packet = avcodec.av_packet_alloc();
221 |
222 | // av_read_frame may not be thread safe
223 | synchronized (readLock) {
224 | if (!registered) registerStreams();
225 |
226 | for (; ; ) {
227 | result = avformat.av_read_frame(input.getContext(), packet);
228 | if (result != avutil.AVERROR_EAGAIN()) {
229 | break;
230 | }
231 | }
232 | }
233 |
234 | try {
235 | // Manual EOF checking here because an EOF is very important to the upper layers.
236 | if (result == avutil.AVERROR_EOF) throw new EOFException("pos: " + getPosition() + "s");
237 | else if (result == avutil.AVERROR_ENOMEM()) throw new OutOfMemoryError();
238 |
239 | FFmpegError.checkError("av_read_frame", result);
240 |
241 | // NOT USED: In case createdTime doesn't get set.
242 | if ((packet.flags() & avcodec.AV_PKT_FLAG_KEY) == avcodec.AV_PKT_FLAG_KEY &&
243 | getCreatedTime() <= 0D)
244 | setCreatedTime(System.currentTimeMillis() / 1000D);
245 |
246 | if ((packet.flags() & avcodec.AV_PKT_FLAG_CORRUPT) == avcodec.AV_PKT_FLAG_CORRUPT)
247 | throw new IOException("read corrupt packet");
248 |
249 | // Find the substream and its native context associated with this packet:
250 | FFmpegDecoderContext substream = getSubstream(packet.stream_index());
251 |
252 | // Handle any null contexts:
253 | if (substream == null)
254 | continue;
255 |
256 | int size = packet.size();
257 | if (size <= 0)
258 | continue;
259 |
260 | int finished;
261 | if (substream.isDecoding()) {
262 | finished = substream.decodePacket(packet);
263 | } else {
264 | finished = 0;
265 | }
266 |
267 | AVRational timebase = getFormatContext().streams(packet.stream_index()).time_base();
268 | double position = FFmpeg.timestampToSeconds(timebase, packet.pts());
269 | this.position = position;
270 | double duration = FFmpeg.timestampToSeconds(timebase, packet.duration());
271 | return new Packet((MediaSourceSubstream) substream, size, finished, position, duration);
272 | } finally {
273 | // VLC media player does this
274 | avcodec.av_packet_unref(packet);
275 | }
276 | }
277 | } catch (FFmpegException ex) {
278 | throw new IOException(ex);
279 | }
280 | }
281 |
282 | private static AVCodecContext newCodecContext(AVCodec codec, AVCodecParameters parameters) throws FFmpegException {
283 | AVCodecContext context = avcodec.avcodec_alloc_context3(codec);
284 | if (context == null) {
285 | throw new FFmpegException("Failed to allocate AVCodecContext");
286 | }
287 | avcodec.avcodec_parameters_to_context(context, parameters);
288 | avcodec.avcodec_open2(context, codec, (AVDictionary) null);
289 | return context;
290 | }
291 |
292 | public void registerSubstream(int stream_index,
293 | AVStream stream) throws FFmpegException {
294 | if (stream_index < 0 || stream_index >= substreams.length)
295 | throw new FFmpegException("substream ID invalid: " + stream_index);
296 |
297 | FFmpegDecoderContext decoderContext = substreams[stream_index];
298 | if (decoderContext != null)
299 | throw new FFmpegException("substream already registered: " + stream_index);
300 |
301 | AVCodec codec = avcodec.avcodec_find_decoder(stream.codecpar().codec_id());
302 |
303 | switch (stream.codecpar().codec_type()) {
304 | case avutil.AVMEDIA_TYPE_VIDEO:
305 | FFmpegVideoSourceSubstream videoSourceStream = new FFmpegVideoSourceSubstream(
306 | this,
307 | stream,
308 | newCodecContext(codec, stream.codecpar()),
309 | getPixelFormat()
310 | );
311 |
312 | substreamList.add(videoSourceStream);
313 | decoderContext = videoSourceStream;
314 | break;
315 | case avutil.AVMEDIA_TYPE_AUDIO:
316 | FFmpegAudioSourceSubstream audioSourceStream = new FFmpegAudioSourceSubstream(
317 | this,
318 | stream,
319 | newCodecContext(codec, stream.codecpar())
320 | );
321 |
322 | substreamList.add(audioSourceStream);
323 | decoderContext = audioSourceStream;
324 | break;
325 | }
326 |
327 | if (decoderContext == null)
328 | throw new FFmpegException("unsupported codec type: " + stream.codecpar().codec_type());
329 |
330 | substreams[stream_index] = decoderContext;
331 | }
332 |
333 | public FFmpegDecoderContext getSubstream(int stream_index) {
334 | return substreams[stream_index];
335 | }
336 |
337 | @Override
338 | public void close() throws Exception {
339 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegSourceStream.close() called");
340 |
341 | synchronized (closeLock) {
342 | if (!closed) {
343 | for (MediaSourceSubstream substream : substreamList) {
344 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "closing MediaSourceSubstream: " + substream.toString() + "...");
345 | substream.close();
346 | }
347 |
348 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "substreamList.clear()...");
349 | substreamList.clear();
350 |
351 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "input.close()...");
352 | input.close();
353 |
354 | closed = true;
355 |
356 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "getOnClosed().accept(this)...");
357 | getOnClosed().accept(this);
358 | }
359 | }
360 |
361 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "FFmpegSourceStream.close() completed");
362 | }
363 |
364 | @Override
365 | public AVFormatContext getFormatContext() {
366 | return input.getContext();
367 | }
368 |
369 | public interface SubstreamConverter {
370 | MediaTargetSubstream> convert(MediaSourceSubstream> source, FFmpegTargetStream targetStream)
371 | throws FFmpegException;
372 | }
373 | }
374 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/stream/source/SourceStream.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.stream.source;
2 |
3 | import com.github.manevolent.ffmpeg4j.source.MediaSourceSubstream;
4 | import com.github.manevolent.ffmpeg4j.stream.Stream;
5 | import com.github.manevolent.ffmpeg4j.stream.event.EventChannel;
6 |
7 | import java.io.IOException;
8 | import java.util.List;
9 |
10 | public abstract class SourceStream extends Stream {
11 | // Events
12 | private final EventChannel onReady = new EventChannel<>();
13 | private final EventChannel onClosed = new EventChannel<>();
14 | private double lastPacketTimestamp = 0D;
15 |
16 | /**
17 | * Gets the time the stream was created or started.
18 | * @return created time.
19 | */
20 | public abstract double getCreatedTime();
21 |
22 | public double getLastPacketTimestamp() {
23 | return lastPacketTimestamp;
24 | }
25 |
26 | public void updatePacketTimestamp(double newTimestamp) {
27 | lastPacketTimestamp = Math.max(newTimestamp, lastPacketTimestamp);
28 | }
29 |
30 | /**
31 | * Seeks to a specified point in the stream. The result of the seek operation may not be exact.
32 | * @param position position to seek to, in seconds.
33 | * @return final position of the seek operation, in seconds. Keep in mind due to the quantized nature of media streams this may not be equal to the
34 | * requested position, but is guaranteed to be as close as possible.
35 | */
36 | public abstract double seek(double position) throws IOException;
37 |
38 | public abstract Packet readPacket() throws IOException;
39 |
40 | @Override
41 | public abstract List getSubstreams();
42 |
43 | public EventChannel getOnReady() {
44 | return onReady;
45 | }
46 | public EventChannel getOnClosed() {
47 | return onClosed;
48 | }
49 |
50 | public abstract void setCreatedTime(double createdTimeInSeconds);
51 |
52 | public class Packet {
53 | private final MediaSourceSubstream sourceStream;
54 | private final int finished;
55 | private final long bytesProcessed;
56 | private final double position;
57 | private final double duration;
58 |
59 | public Packet(MediaSourceSubstream sourceSteram, long bytesProcessed, int finished, double position, double duration) {
60 | this.bytesProcessed = bytesProcessed;
61 | this.sourceStream = sourceSteram;
62 | this.finished = finished;
63 | this.position = position;
64 | this.duration = duration;
65 | }
66 |
67 | public MediaSourceSubstream getSourceStream() {
68 | return sourceStream;
69 | }
70 |
71 | public long getBytesProcessed() {
72 | return bytesProcessed;
73 | }
74 |
75 | public int getFinishedFrames() {
76 | return finished;
77 | }
78 |
79 | public double getPosition() {
80 | return position;
81 | }
82 |
83 | public double getDuration() {
84 | return duration;
85 | }
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/src/main/java/com/github/manevolent/ffmpeg4j/transcoder/Transcoder.java:
--------------------------------------------------------------------------------
1 | package com.github.manevolent.ffmpeg4j.transcoder;
2 |
3 | import com.github.manevolent.ffmpeg4j.AudioFrame;
4 | import com.github.manevolent.ffmpeg4j.Logging;
5 | import com.github.manevolent.ffmpeg4j.VideoFrame;
6 | import com.github.manevolent.ffmpeg4j.filter.audio.AudioFilter;
7 | import com.github.manevolent.ffmpeg4j.filter.audio.AudioFilterNone;
8 | import com.github.manevolent.ffmpeg4j.filter.video.VideoFilter;
9 | import com.github.manevolent.ffmpeg4j.filter.video.VideoFilterNone;
10 | import com.github.manevolent.ffmpeg4j.source.AudioSourceSubstream;
11 | import com.github.manevolent.ffmpeg4j.source.MediaSourceSubstream;
12 | import com.github.manevolent.ffmpeg4j.source.VideoSourceSubstream;
13 | import com.github.manevolent.ffmpeg4j.stream.output.TargetStream;
14 | import com.github.manevolent.ffmpeg4j.stream.source.SourceStream;
15 |
16 | import java.io.EOFException;
17 | import java.io.IOException;
18 | import java.util.Collection;
19 | import java.util.logging.Level;
20 |
21 | public class Transcoder {
22 | private final SourceStream sourceStream;
23 | private final TargetStream targetStream;
24 |
25 | private final AudioFilter audioFilter;
26 | private final VideoFilter videoFilter;
27 |
28 | private final double speed;
29 |
30 | public Transcoder(SourceStream sourceStream,
31 | TargetStream targetStream,
32 | AudioFilter audioFilter,
33 | VideoFilter videoFilter, double speed) {
34 | this.sourceStream = sourceStream;
35 | this.targetStream = targetStream;
36 |
37 | this.audioFilter = audioFilter;
38 | this.videoFilter = videoFilter;
39 |
40 | this.speed = speed;
41 | }
42 |
43 | public void transcode() throws Exception {
44 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder.transcode() called");
45 |
46 | try {
47 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "targetStream.writeHeader()...");
48 | // Write header to file
49 | targetStream.writeHeader();
50 |
51 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder: converting...");
52 | // Actual conversion
53 | MediaSourceSubstream substream;
54 | long start = System.nanoTime();
55 |
56 | double maximumPosition;
57 |
58 | while (true) {
59 | try {
60 | substream = handlePacket(sourceStream.readPacket());
61 | } catch (EOFException ex) {
62 | break;
63 | }
64 |
65 | if (substream == null) continue;
66 |
67 | while (true) {
68 | maximumPosition = (((double) System.nanoTime() - (double) start) / 1_000_000_000D) * speed;
69 |
70 | if (substream.getPosition() > maximumPosition)
71 | Thread.sleep((long) Math.ceil((substream.getPosition() - maximumPosition) * 1000D));
72 | else
73 | break;
74 | }
75 | }
76 |
77 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder: flushing audio filters...");
78 | for (AudioFrame audioFrame : audioFilter.flush())
79 | targetStream.getAudioTargetStream().write(audioFrame);
80 |
81 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder: flushing video filters...");
82 | for (VideoFrame videoFrame : videoFilter.flush())
83 | targetStream.getVideoTargetStream().write(videoFrame);
84 | } finally {
85 | // Close files
86 | try {
87 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder: closing source stream...");
88 | sourceStream.close();
89 | } catch (Exception ex) {
90 | Logging.LOGGER.log(Level.WARNING, "Problem closing sourceStream", ex);
91 | }
92 |
93 | try {
94 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder: closing target stream...");
95 | targetStream.close();
96 | } catch (Exception ex) {
97 | Logging.LOGGER.log(Level.WARNING, "Problem closing targetStream", ex);
98 | }
99 |
100 | // Close filters
101 | try {
102 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder: closing audio filter...");
103 | audioFilter.close();
104 | } catch (Exception ex) {
105 | Logging.LOGGER.log(Level.WARNING, "Problem closing audioFilter", ex);
106 | }
107 |
108 | try {
109 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder: closing video filter...");
110 | videoFilter.close();
111 | } catch (Exception ex) {
112 | Logging.LOGGER.log(Level.WARNING, "Problem closing videoFilter", ex);
113 | }
114 | }
115 |
116 | Logging.LOGGER.log(Logging.DEBUG_LOG_LEVEL, "Transcoder.transcode() completed");
117 | }
118 |
119 | private MediaSourceSubstream handlePacket(SourceStream.Packet packet) throws IOException {
120 | if (packet == null) return null;
121 |
122 | MediaSourceSubstream substream = packet.getSourceStream();
123 |
124 | switch (packet.getSourceStream().getMediaType()) {
125 | case AUDIO:
126 | handleSubstream((AudioSourceSubstream) substream);
127 | break;
128 | case VIDEO:
129 | handleSubstream((VideoSourceSubstream) substream);
130 | break;
131 | }
132 |
133 | return substream;
134 | }
135 |
136 | private void handleSubstream(AudioSourceSubstream substream) throws IOException {
137 | Collection audioFrames = substream.drain();
138 | if (targetStream.getAudioTargetStream() != null)
139 | for (AudioFrame frame : audioFrames)
140 | for (AudioFrame filteredFrame : audioFilter.apply(frame))
141 | targetStream.getAudioTargetStream().write(filteredFrame);
142 | }
143 |
144 | private void handleSubstream(VideoSourceSubstream substream) throws IOException {
145 | Collection videoFrames = substream.drain();
146 | if (targetStream.getVideoTargetStream() != null)
147 | for (VideoFrame frame : videoFrames)
148 | for (VideoFrame filteredFrame : videoFilter.apply(frame))
149 | targetStream.getVideoTargetStream().write(filteredFrame);
150 | }
151 |
152 | public static void convert(SourceStream sourceStream, TargetStream targetStream,
153 | double speed) throws Exception {
154 | new Transcoder(sourceStream, targetStream, new AudioFilterNone(), new VideoFilterNone(), speed).transcode();
155 | }
156 |
157 | public static void convert(SourceStream sourceStream, TargetStream targetStream,
158 | AudioFilter audioFilter, VideoFilter videoFilter,
159 | double speed) throws Exception {
160 | new Transcoder(sourceStream, targetStream, audioFilter, videoFilter, speed).transcode();
161 | }
162 | }
163 |
--------------------------------------------------------------------------------
/src/test/java/FFmpegInputTest.java:
--------------------------------------------------------------------------------
1 | import com.github.manevolent.ffmpeg4j.*;
2 | import com.github.manevolent.ffmpeg4j.source.*;
3 | import com.github.manevolent.ffmpeg4j.stream.source.*;
4 | import org.junit.*;
5 |
6 | import java.io.*;
7 | import java.util.*;
8 | import java.util.logging.*;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | public class FFmpegInputTest {
13 |
14 | interface TestRunner {
15 | void runTest(T object) throws Exception;
16 | }
17 |
18 | @BeforeClass
19 | public static void setupLogLevel() {
20 | Logging.DEBUG_LOG_LEVEL = Level.INFO;
21 | }
22 |
23 | private static void withSampleFile(TestRunner test) throws Exception {
24 | InputStream resource = FFmpegInputTest.class.getResourceAsStream("/sample-mp4-file-small.mp4");
25 | try (FFmpegInput input = FFmpegIO.openInputStream(resource, FFmpegIO.DEFAULT_BUFFER_SIZE)) {
26 | try (FFmpegSourceStream sourceStream = input.open("mp4")) {
27 | test.runTest(sourceStream);
28 | }
29 | }
30 | }
31 |
32 | /**
33 | * Double free can easily cause native crashes bringing the whole JVM down.
34 | * This test exists just to be sure there isn't a huge gap in the close logic.
35 | * Other gaps can still exist, however.
36 | */
37 | @Test(expected = IllegalStateException.class)
38 | public void testDoubleFree() throws Exception {
39 | withSampleFile(sourceStream -> {
40 | sourceStream.registerStreams().forEach(ss -> {
41 | try {
42 | ss.close();
43 | } catch (Exception e) {
44 | throw new AssertionError("Should not have failed to close", e);
45 | }
46 | });
47 | });
48 | }
49 |
50 | @Test
51 | public void testOpen() throws Exception {
52 | withSampleFile(sourceStream -> {
53 | Collection> substreams = sourceStream.registerStreams();
54 |
55 | assertEquals(2, substreams.size());
56 |
57 | VideoSourceSubstream vss = (VideoSourceSubstream) sourceStream.getSubstreams(VideoSourceSubstream.class).stream().findFirst()
58 | .orElseThrow(() -> new AssertionError("No video substream, but was expected"));
59 |
60 | AudioSourceSubstream audioStream = (AudioSourceSubstream) sourceStream.getSubstreams(AudioSourceSubstream.class).stream().findFirst()
61 | .orElseThrow(() -> new AssertionError("No audio substream, but was expected"));
62 |
63 | VideoFormat videoFormat = vss.getFormat();
64 | assertEquals("Unexpected video width", 320, videoFormat.getWidth());
65 | assertEquals("Unexpected video width", 240, videoFormat.getHeight());
66 | assertEquals("Unexpected video frame rate (FPS)", 15.0D, videoFormat.getFramesPerSecond(), 0d);
67 |
68 | AudioFormat audioFormat = audioStream.getFormat();
69 | assertEquals("Unexpected audio sample rate", 48_000, audioFormat.getSampleRate());
70 | assertEquals("Unexpected audio sample rate", 6, audioFormat.getChannels());
71 | });
72 | }
73 |
74 | @Test
75 | public void testVideoFrame() throws Exception {
76 | withSampleFile(sourceStream -> {
77 | sourceStream.registerStreams();
78 |
79 | VideoSourceSubstream vss = (VideoSourceSubstream) sourceStream.getSubstreams(VideoSourceSubstream.class).stream().findFirst()
80 | .orElseThrow(() -> new AssertionError("No video substream, but was expected"));
81 |
82 | VideoFrame frame = vss.next();
83 |
84 | assertNotNull(frame);
85 |
86 | // The frame size should match the expected format
87 | assertEquals(vss.getFormat().getWidth(), frame.getWidth());
88 | assertEquals(vss.getFormat().getHeight(), frame.getHeight());
89 |
90 | // The first frame should be at 0:00, and should have a duration of 1/15th a second
91 | assertEquals(0D, frame.getPosition(), 0D);
92 | assertEquals(1D / vss.getFormat().getFramesPerSecond(), frame.getTime(), 0D);
93 |
94 | // The frame should have some data in it
95 | assertTrue(frame.getData().length > 0);
96 |
97 | // Advancing the frame should result in a realistic position on the next frame
98 | VideoFrame frame2 = vss.next();
99 | assertEquals(1D / vss.getFormat().getFramesPerSecond(), frame2.getPosition(), 0D);
100 | });
101 | }
102 |
103 |
104 | @Test
105 | public void testAudioFrame() throws Exception {
106 | withSampleFile(sourceStream -> {
107 | sourceStream.registerStreams();
108 |
109 | AudioSourceSubstream audioStream = (AudioSourceSubstream) sourceStream.getSubstreams(AudioSourceSubstream.class).stream().findFirst()
110 | .orElseThrow(() -> new AssertionError("No audio substream, but was expected"));
111 |
112 | AudioFrame frame = audioStream.next();
113 |
114 | assertNotNull(frame);
115 |
116 | // The frame size should match the expected format
117 | assertEquals(audioStream.getFormat().getSampleRate(), frame.getFormat().getSampleRate());
118 | assertEquals(audioStream.getFormat().getChannels(), frame.getFormat().getChannels());
119 |
120 | // The first frame should be at 0:00, and should have an appropriate duration
121 | assertEquals(0D, frame.getPosition(), 0D);
122 | assertEquals((double)frame.getSamples().length
123 | / (double)frame.getFormat().getSampleRate()
124 | / (double)frame.getFormat().getChannels(),
125 | frame.getTime(),
126 | 0.001D); // 1ms accuracy seems reasonable
127 |
128 | // The frame should have some data in it
129 | assertTrue(frame.getSamples().length > 0);
130 |
131 | // Advancing the frame should result in a realistic position on the next frame
132 | AudioFrame frame2 = audioStream.next();
133 | assertEquals(frame.getTime(), frame2.getPosition(), 0D);
134 | });
135 | }
136 |
137 | @Test
138 | public void testSeek_NearEnd() throws Exception {
139 | double seekPosition = 29.9D; // 29 seconds, near the end of the file
140 | double spf = 1/15D;
141 | withSampleFile(sourceStream -> {
142 | // Switch off decoding frame images to speed up the test
143 | // You should do this in your app, too, if you don't care about data while you seek.
144 | sourceStream.registerStreams().forEach(ss -> ss.setDecoding(false));
145 | double realSeek = sourceStream.seek(seekPosition);
146 | assertEquals("Seek was not accurate enough", seekPosition, realSeek, spf);
147 | });
148 | }
149 |
150 | @Test(expected = EOFException.class)
151 | public void testSeek_PastEnd() throws Exception {
152 | double seekPosition = 40D; // 40 seconds, past the end of the file
153 | double fps = 1/15D;
154 | withSampleFile(sourceStream -> {
155 | // Switch off decoding frame images to speed up the test
156 | // You should do this in your app, too, if you don't care about data while you seek.
157 | sourceStream.registerStreams().forEach(ss -> ss.setDecoding(false));
158 | double realSeek = sourceStream.seek(seekPosition);
159 | assertEquals("Seek was not accurate enough", seekPosition, realSeek, fps);
160 | });
161 | }
162 |
163 | @Test
164 | public void testSeek_Rewind() throws Exception {
165 | double seekPosition = 10D;
166 | withSampleFile(sourceStream -> {
167 | sourceStream.registerStreams().forEach(ss -> ss.setDecoding(false));
168 | sourceStream.seek(seekPosition);
169 |
170 | // Rewinding is not supported since we don't have seekable data sources;
171 | // we only support InputStreams right now. It's definitely possible to
172 | // support seeking backwards with appropriate changes to FFmpegIO to support
173 | // a SeekableByteChannel or something like that.
174 | assertThrows(IllegalStateException.class, () -> {
175 | sourceStream.seek(0D);
176 | });
177 | });
178 | }
179 |
180 | }
181 |
--------------------------------------------------------------------------------
/src/test/java/FFmpegTest.java:
--------------------------------------------------------------------------------
1 |
2 | import com.github.manevolent.ffmpeg4j.*;
3 | import org.bytedeco.ffmpeg.avcodec.*;
4 | import org.bytedeco.ffmpeg.avformat.*;
5 | import org.junit.*;
6 |
7 | import java.util.logging.*;
8 |
9 | import static org.junit.Assert.assertEquals;
10 |
11 | public class FFmpegTest {
12 |
13 | @BeforeClass
14 | public static void setupLogLevel() {
15 | Logging.DEBUG_LOG_LEVEL = Level.INFO;
16 | }
17 |
18 | @Test(expected = FFmpegException.class)
19 | public void testGetInputFormat_Invalid() throws FFmpegException {
20 | FFmpeg.getInputFormatByName("does_not_exist");
21 | }
22 |
23 | @Test
24 | public void testGetInputFormat() throws FFmpegException {
25 | AVInputFormat mp3Format = FFmpeg.getInputFormatByName("mp3");
26 | assertEquals("mp3", mp3Format.name().getString());
27 |
28 | AVInputFormat mp4Format = FFmpeg.getInputFormatByName("mp4");
29 | assertEquals("QuickTime / MOV", mp4Format.long_name().getString());
30 |
31 | AVInputFormat movFormat = FFmpeg.getInputFormatByName("mov");
32 | assertEquals(mp4Format, movFormat);
33 |
34 | AVInputFormat webmFormat = FFmpeg.getInputFormatByName("webm");
35 | assertEquals("Matroska / WebM", webmFormat.long_name().getString());
36 | }
37 |
38 |
39 | @Test(expected = FFmpegException.class)
40 | public void testGetOutputFormat_Invalid() throws FFmpegException {
41 | AVOutputFormat mp3Format = FFmpeg.getOutputFormatByName("does_not_exist");
42 | }
43 |
44 | @Test
45 | public void testGetOutputFormat() throws FFmpegException {
46 | AVOutputFormat mp3Format = FFmpeg.getOutputFormatByName("mp3");
47 | assertEquals("mp3", mp3Format.name().getString());
48 |
49 | AVOutputFormat mp4Format = FFmpeg.getOutputFormatByName("mp4");
50 | assertEquals("MP4 (MPEG-4 Part 14)", mp4Format.long_name().getString());
51 |
52 | AVOutputFormat webmFormat = FFmpeg.getOutputFormatByName("webm");
53 | assertEquals("WebM", webmFormat.long_name().getString());
54 | }
55 |
56 | @Test
57 | public void testGetCodecByName() throws FFmpegException {
58 | AVCodec mp3Codec = FFmpeg.getCodecByName("mp3");
59 | assertEquals("mp3", mp3Codec.name().getString());
60 |
61 | AVCodec h264Codec = FFmpeg.getCodecByName("h264");
62 | assertEquals("h264", h264Codec.name().getString());
63 | }
64 |
65 | @Test
66 | public void testGetOutputFormatByMime() throws FFmpegException {
67 | AVOutputFormat mp4Format = FFmpeg.getOutputFormatByMime("audio/mpeg");
68 | assertEquals("mp2", mp4Format.name().getString());
69 |
70 | AVOutputFormat mp4VideoFormat = FFmpeg.getOutputFormatByMime("video/mp4");
71 | assertEquals("ipod", mp4VideoFormat.name().getString());
72 | }
73 |
74 | @Test
75 | public void testGetOutputFormatByExtension() throws FFmpegException {
76 | AVOutputFormat mp4Format = FFmpeg.getOutputFormatByExtension("mp4");
77 | assertEquals("mp4", mp4Format.name().getString());
78 | }
79 |
80 | @Test
81 | public void testGetInputFormatByExtension() throws FFmpegException {
82 | AVInputFormat mp4VideoFormat = FFmpeg.getInputFormatByExtension("mp4");
83 | assertEquals("mov,mp4,m4a,3gp,3g2,mj2", mp4VideoFormat.name().getString());
84 | }
85 |
86 |
87 | @Test(expected = FFmpegException.class)
88 | public void testGetOutputFormatByMime_Invalid() throws FFmpegException {
89 | FFmpeg.getOutputFormatByMime("bad/mime_type");
90 | }
91 |
92 | }
93 |
--------------------------------------------------------------------------------
/src/test/java/FFmpegTranscodeTest.java:
--------------------------------------------------------------------------------
1 | import com.github.manevolent.ffmpeg4j.AudioFormat;
2 | import com.github.manevolent.ffmpeg4j.FFmpegIO;
3 | import com.github.manevolent.ffmpeg4j.source.AudioSourceSubstream;
4 | import com.github.manevolent.ffmpeg4j.stream.output.FFmpegTargetStream;
5 | import com.github.manevolent.ffmpeg4j.stream.source.FFmpegSourceStream;
6 | import com.github.manevolent.ffmpeg4j.transcoder.Transcoder;
7 | import org.junit.Test;
8 |
9 | import java.nio.file.Files;
10 | import java.nio.file.Path;
11 | import java.nio.file.StandardOpenOption;
12 | import java.util.HashMap;
13 | import java.util.Map;
14 |
15 | public class FFmpegTranscodeTest {
16 | @Test
17 | public void testTranscode() throws Exception {
18 | Map options = new HashMap<>();
19 | options.put("strict", "experimental");
20 | Path tempFile = Files.createTempFile("temp-audio", null);
21 | FFmpegTargetStream targetStream = FFmpegIO.openChannel(Files.newByteChannel(tempFile, StandardOpenOption.WRITE)).asOutput().open("mp3");
22 | try (FFmpegSourceStream sourceStream = FFmpegIO.openInputStream(FFmpegTranscodeTest.class.getResourceAsStream("/example.ogg")).open("ogg")) {
23 | sourceStream.registerStreams();
24 |
25 | AudioSourceSubstream mediaSourceSubstream = (AudioSourceSubstream) sourceStream.getSubstreams().get(0);
26 | AudioFormat audioFormat = mediaSourceSubstream.getFormat();
27 |
28 | targetStream.registerAudioSubstream("libmp3lame", audioFormat, options);
29 |
30 | Transcoder.convert(sourceStream, targetStream, Double.MAX_VALUE);
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/test/resources/example.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Manevolent/ffmpeg4j/4f7db5710c8e0b762e23365fcf5705c26092b23b/src/test/resources/example.ogg
--------------------------------------------------------------------------------
/src/test/resources/sample-mp4-file-small.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Manevolent/ffmpeg4j/4f7db5710c8e0b762e23365fcf5705c26092b23b/src/test/resources/sample-mp4-file-small.mp4
--------------------------------------------------------------------------------