├── src
└── main
│ └── java
│ ├── util
│ └── Canvas.java
│ ├── WebcamCapture.java
│ ├── GrabScreen.java
│ ├── DemuxAndDecodeH264.java
│ └── EncodeAndMuxH264.java
├── readme.md
└── pom.xml
/src/main/java/util/Canvas.java:
--------------------------------------------------------------------------------
1 | package util;
2 |
3 | import javax.swing.*;
4 | import java.awt.*;
5 | import java.awt.image.BufferedImage;
6 |
7 | public class Canvas extends JPanel {
8 | private BufferedImage bufferedImage;
9 |
10 | @Override
11 | public void paint(Graphics g) {
12 | Graphics2D g2d = (Graphics2D) g;
13 | if (bufferedImage != null) {
14 | g2d.drawImage(bufferedImage, 0, 0, null);
15 | }
16 | }
17 |
18 | public void setImage(BufferedImage bufferedImage) {
19 | this.bufferedImage = bufferedImage;
20 | repaint();
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # ffmpeg java samples
2 |
3 |
4 | ### Demux and decode h264
5 |
6 | * demux MKV file
7 | * decode h264 video stream
8 | * convert yuv420p `AVFrame` to RGB `AVFrame`
9 | * convert `AVFrame` to java `BufferedImage`
10 |
11 | see [DemuxAndDecodeH264.java](https://github.com/vzhn/ffmpeg-java-samples/blob/master/src/main/java/DemuxAndDecodeH264.java)
12 |
13 | ### Encode and mux h264
14 | * draw pictures on java `BufferedImage`
15 | * convert `BufferedImage` to RGB `AVFrame`
16 | * convert RGB `AVFrame` to yuv420p `AVFrame`
17 | * encode `AVFrame` and get sequence of `AVPacket`'s
18 | * mux `AVPackets` to Matroska media container
19 |
20 | see [EncodeAndMuxH264.java](https://github.com/vzhn/ffmpeg-java-samples/blob/master/src/main/java/EncodeAndMuxH264.java)
21 |
22 |
23 | ### Grab screen
24 | * get picture data from `x11grab` device
25 | * convert picture data to RGB format
26 | * convert RGB data to java `BufferedImage`
27 |
28 | see [GrabScreen.java](https://github.com/vzhn/ffmpeg-java-samples/blob/master/src/main/java/GrabScreen.java)
29 |
30 | ### USB Webcam capture (Linux)
31 | Works well with my Logitech webcam
32 |
33 | * get picture data from `v4l2` device in `mjpeg` format
34 | * convert mjpeg to BufferedImage
35 | * show BufferedImage on JFrame
36 |
37 | see [WebcamCapture.java](https://github.com/vzhn/ffmpeg-java-samples/blob/master/src/main/java/WebcamCapture.java)
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | me.vzhilin
8 | h264-decoder
9 | 0.0.1-SNAPSHOT
10 |
11 |
12 | 4.1-1.4.4
13 | 9
14 | 9
15 |
16 |
17 |
18 |
19 | commons-cli
20 | commons-cli
21 | 1.4
22 |
23 |
24 |
25 | org.bytedeco.javacpp-presets
26 | ffmpeg
27 | ${ffmpeg-preset}
28 |
29 |
30 |
31 |
32 |
33 |
34 | org.apache.maven.plugins
35 | maven-compiler-plugin
36 | 3.5.1
37 |
38 | ${maven.compiler.source}
39 | ${maven.compiler.target}
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 | linux-x86_64
48 |
49 |
50 | unix
51 |
52 |
53 |
54 |
55 | org.bytedeco.javacpp-presets
56 | ffmpeg
57 | ${ffmpeg-preset}
58 | linux-x86_64
59 |
60 |
61 |
62 |
63 |
--------------------------------------------------------------------------------
/src/main/java/WebcamCapture.java:
--------------------------------------------------------------------------------
1 | import org.bytedeco.javacpp.avcodec;
2 | import util.Canvas;
3 |
4 | import javax.imageio.ImageIO;
5 | import javax.swing.*;
6 | import java.awt.*;
7 | import java.io.ByteArrayInputStream;
8 | import java.io.IOException;
9 |
10 | import static org.bytedeco.javacpp.avcodec.av_packet_unref;
11 | import static org.bytedeco.javacpp.avdevice.avdevice_register_all;
12 | import static org.bytedeco.javacpp.avformat.*;
13 | import static org.bytedeco.javacpp.avutil.*;
14 |
15 | public class WebcamCapture {
16 | public static final int WIDTH = 800;
17 | public static final int HEIGHT = 600;
18 | private Canvas canvas;
19 |
20 | public static void main(String... argv) throws IOException {
21 | new WebcamCapture().start();
22 | }
23 |
24 | private void start() throws IOException {
25 | JFrame frame = new JFrame();
26 | frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
27 | frame.setLayout(new BorderLayout());
28 | canvas = new Canvas();
29 | frame.add(canvas, BorderLayout.CENTER);
30 | frame.setVisible(true);
31 | frame.setSize(WIDTH, HEIGHT);
32 | startCapture();
33 | }
34 |
35 | private void startCapture() throws IOException {
36 | av_log_set_level(AV_LOG_VERBOSE);
37 | avdevice_register_all();
38 | AVInputFormat v4l2 = av_find_input_format("v4l2");
39 | if (v4l2 == null) {
40 | throw new RuntimeException("v4l2 not found");
41 | }
42 | AVFormatContext v4l2Device = avformat_alloc_context();
43 | if (v4l2Device == null) {
44 | throw new RuntimeException("failed to alloc AVFormatContext");
45 | }
46 |
47 | AVDictionary options = new AVDictionary();
48 | av_dict_set(options, "input_format", "mjpeg", 0);
49 |
50 | if(avformat_open_input(v4l2Device, "/dev/video0", v4l2, options) != 0) {
51 | throw new RuntimeException("Couldn't open input stream.\n");
52 | }
53 | av_dict_free(options);
54 |
55 | av_dump_format(v4l2Device, 0, "", 0);
56 | if (v4l2Device.nb_streams() == 0) {
57 | throw new RuntimeException("Stream not found!");
58 | }
59 |
60 | avcodec.AVPacket pkt = new avcodec.AVPacket();
61 | while (true) {
62 | av_read_frame(v4l2Device, pkt);
63 | byte[] data = new byte[pkt.size()];
64 | pkt.data().get(data);
65 | av_packet_unref(pkt);
66 |
67 | canvas.setImage(ImageIO.read(new ByteArrayInputStream(data)));
68 | }
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/src/main/java/GrabScreen.java:
--------------------------------------------------------------------------------
1 | import org.apache.commons.cli.*;
2 | import org.bytedeco.javacpp.*;
3 |
4 | import javax.swing.*;
5 | import java.awt.*;
6 | import java.awt.image.BufferedImage;
7 | import java.awt.image.DataBufferByte;
8 |
9 | import static java.lang.String.format;
10 | import static org.bytedeco.javacpp.avcodec.av_packet_unref;
11 | import static org.bytedeco.javacpp.avdevice.avdevice_register_all;
12 | import static org.bytedeco.javacpp.avformat.*;
13 | import static org.bytedeco.javacpp.avutil.*;
14 | import static org.bytedeco.javacpp.swscale.sws_freeContext;
15 | import static org.bytedeco.javacpp.swscale.sws_getContext;
16 |
17 | public final class GrabScreen {
18 | /** upper left corner coordinates */
19 | private static final String DEFAULT_X = "0";
20 | private static final String DEFAULT_Y = "0";
21 |
22 | /** screen fragment dimensions */
23 | private static final String DEFAULT_WIDTH = "640";
24 | private static final String DEFAULT_HEIGHT = "480";
25 |
26 | private int width;
27 | private int height;
28 | private int x;
29 | private int y;
30 | private String display;
31 |
32 | private AVInputFormat x11grab;
33 | private AVFormatContext x11GrabDevice;
34 | private avcodec.AVPacket pkt;
35 | private BufferedImage bufferedImage;
36 | private AVFrame rgbFrame;
37 | private swscale.SwsContext swsContext;
38 | private IntPointer bgr0Linesize;
39 |
40 | private GrabScreen() {}
41 |
42 | public static void main(String... argv) throws ParseException {
43 | av_log_set_level(AV_LOG_VERBOSE);
44 |
45 | Options options = new Options();
46 | options.addOption("help", false, "show help and exit");
47 | options.addOption("width", true, "width");
48 | options.addOption("height", true, "height");
49 | options.addOption("x", true, "x");
50 | options.addOption("y", true, "y");
51 | options.addOption("display", true, "display");
52 |
53 | CommandLine cmd = new DefaultParser().parse(options, argv);
54 | if (cmd.hasOption("help")) {
55 | HelpFormatter helpFormatter = new HelpFormatter();
56 | helpFormatter.printHelp("EncodeAndMuxH264 [options]", options);
57 | } else {
58 | System.out.println("options:");
59 | GrabScreen instance = new GrabScreen();
60 | instance.width = Integer.parseInt(getOption(cmd,"width", DEFAULT_WIDTH));
61 | instance.height = Integer.parseInt(getOption(cmd,"height", DEFAULT_HEIGHT));
62 | instance.x = Integer.parseInt(getOption(cmd,"x", DEFAULT_X));
63 | instance.y = Integer.parseInt(getOption(cmd,"y", DEFAULT_Y));
64 | instance.display = getOption(cmd, "display", System.getenv("DISPLAY"));
65 |
66 | instance.start();
67 | }
68 |
69 | GrabScreen instance = new GrabScreen();
70 | instance.start();
71 | }
72 |
73 | private static String getOption(CommandLine cmd, String key, String defaultValue) {
74 | String v = cmd.getOptionValue(key, defaultValue);
75 | System.out.println("\t" + key + " = \"" + v + "\"");
76 | return v;
77 | }
78 |
79 | private void setupX11GrabDevice() {
80 | avdevice_register_all();
81 | x11grab = av_find_input_format("x11grab");
82 | if (x11grab == null) {
83 | throw new RuntimeException("x11grab not found");
84 | }
85 | x11GrabDevice = avformat_alloc_context();
86 | if (x11GrabDevice == null) {
87 | throw new RuntimeException("x11grab device not found");
88 | }
89 |
90 | String url = format("%s.0+%d,%d", display, x, y);
91 | AVDictionary options = new AVDictionary();
92 | av_dict_set(options, "video_size", format("%dx%d", width, height), 0);
93 | if(avformat_open_input(x11GrabDevice, url, x11grab, options) != 0) {
94 | throw new RuntimeException("Couldn't open input stream.\n");
95 | }
96 | av_dict_free(options);
97 |
98 | av_dump_format(x11GrabDevice, 0, url, 0);
99 | if (x11GrabDevice.nb_streams() == 0) {
100 | throw new RuntimeException("Stream not found!");
101 | }
102 | int pixFormat = x11GrabDevice.streams(0).codecpar().format();
103 | if (pixFormat != AV_PIX_FMT_BGR0) {
104 | throw new RuntimeException("unsupported pixel format: " + pixFormat);
105 | }
106 | pkt = new avcodec.AVPacket();
107 | }
108 |
109 | private void start() {
110 | setupX11GrabDevice();
111 | allocRGB24Frame();
112 | allocSWSContext();
113 |
114 | JFrame frame = setupJFrame();
115 | PointerPointer pktDataPointer = new PointerPointer<>(1);
116 | while (frame.isShowing()) {
117 | av_read_frame(x11GrabDevice, pkt);
118 | pktDataPointer.put(pkt.data());
119 |
120 | swscale.sws_scale(
121 | swsContext, pktDataPointer, bgr0Linesize, 0,
122 | rgbFrame.height(), rgbFrame.data(), rgbFrame.linesize()
123 | );
124 |
125 | DataBufferByte buffer = (DataBufferByte) bufferedImage.getRaster().getDataBuffer();
126 | rgbFrame.data(0).get(buffer.getData());
127 | av_packet_unref(pkt);
128 |
129 | frame.repaint();
130 | }
131 | pktDataPointer.deallocate();
132 |
133 | av_frame_free(rgbFrame);
134 | avformat_close_input(x11GrabDevice);
135 | sws_freeContext(swsContext);
136 |
137 | frame.dispose();
138 | System.exit(0);
139 | }
140 |
141 | private JFrame setupJFrame() {
142 | this.bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
143 | JFrame frame = new JFrame() {
144 | @Override
145 | public void paint(Graphics g) {
146 | g.drawImage(bufferedImage, 0, 0, null);
147 | }
148 | };
149 | frame.setTitle("grab screen");
150 | frame.setSize(width, height);
151 | frame.setVisible(true);
152 | return frame;
153 | }
154 |
155 | private void allocRGB24Frame() {
156 | rgbFrame = av_frame_alloc();
157 | rgbFrame.format(AV_PIX_FMT_BGR24);
158 | rgbFrame.width(width);
159 | rgbFrame.height(height);
160 | int ret = av_frame_get_buffer(rgbFrame, 32);
161 | if (ret < 0) {
162 | throw new RuntimeException("Could not allocate the video frame data");
163 | }
164 | }
165 |
166 | private void allocSWSContext() {
167 | bgr0Linesize = new IntPointer(1);
168 | bgr0Linesize.put(4 * width);
169 | swsContext =
170 | sws_getContext(width, height, AV_PIX_FMT_BGR0,
171 | width, height, rgbFrame.format(), 0,
172 | null, null, (DoublePointer) null);
173 | }
174 | }
175 |
--------------------------------------------------------------------------------
/src/main/java/DemuxAndDecodeH264.java:
--------------------------------------------------------------------------------
1 | import org.bytedeco.javacpp.*;
2 |
3 | import javax.imageio.ImageIO;
4 | import java.awt.image.BufferedImage;
5 | import java.awt.image.DataBufferByte;
6 | import java.io.File;
7 | import java.io.IOException;
8 | import java.time.Duration;
9 | import java.time.temporal.ChronoUnit;
10 |
11 | import static org.bytedeco.javacpp.avcodec.*;
12 | import static org.bytedeco.javacpp.avformat.*;
13 | import static org.bytedeco.javacpp.avutil.*;
14 | import static org.bytedeco.javacpp.presets.avutil.AVERROR_EAGAIN;
15 |
16 | /**
17 | * Read and decode h264 video from matroska (MKV) container
18 | */
19 | public final class DemuxAndDecodeH264 {
20 | /** Matroska format context */
21 | private AVFormatContext avfmtCtx;
22 |
23 | /** Matroska video stream information */
24 | private AVStream videoStream;
25 |
26 | /** matroska packet */
27 | private AVPacket avpacket;
28 |
29 | /** H264 Decoder ID */
30 | private AVCodec codec;
31 |
32 | /** H264 Decoder context */
33 | private AVCodecContext codecContext;
34 |
35 | /** yuv420 frame */
36 | private AVFrame yuv420Frame;
37 |
38 | /** RGB frame */
39 | private AVFrame rgbFrame;
40 |
41 | /** java RGB frame */
42 | private BufferedImage img;
43 |
44 | /** yuv420 to rgb converter */
45 | private swscale.SwsContext sws_ctx;
46 |
47 | /** number of frame */
48 | private int nframe;
49 |
50 | /* 1/1000 of second */
51 | private AVRational tb1000;
52 |
53 | private DemuxAndDecodeH264() {
54 | tb1000 = new AVRational();
55 | tb1000.num(1);
56 | tb1000.den(1000);
57 | }
58 |
59 | public static void main(String... argv) throws IOException {
60 | new DemuxAndDecodeH264().start(argv);
61 | }
62 |
63 | private void start(String[] argv) throws IOException {
64 | av_log_set_level(AV_LOG_VERBOSE);
65 |
66 | openInput(argv[0]);
67 | findVideoStream();
68 | initDecoder();
69 | initRgbFrame();
70 | initYuv420Frame();
71 | getSwsContext();
72 |
73 | avpacket = new avcodec.AVPacket();
74 | while ((av_read_frame(avfmtCtx, avpacket)) >= 0) {
75 | if (avpacket.stream_index() == videoStream.index()) {
76 | processAVPacket(avpacket);
77 | }
78 | av_packet_unref(avpacket);
79 | }
80 | // now process delayed frames
81 | processAVPacket(null);
82 | free();
83 | }
84 |
85 | private AVFormatContext openInput(String file) throws IOException {
86 | avfmtCtx = new AVFormatContext(null);
87 | BytePointer filePointer = new BytePointer(file);
88 | int r = avformat.avformat_open_input(avfmtCtx, filePointer, null, null);
89 | filePointer.deallocate();
90 | if (r < 0) {
91 | avfmtCtx.close();
92 | throw new IOException("avformat_open_input error: " + r);
93 | }
94 | return avfmtCtx;
95 | }
96 |
97 | private void findVideoStream() throws IOException {
98 | int r = avformat_find_stream_info(avfmtCtx, (PointerPointer) null);
99 | if (r < 0) {
100 | avformat_close_input(avfmtCtx);
101 | avfmtCtx.close();
102 | throw new IOException("error: " + r);
103 | }
104 |
105 | PointerPointer decoderRet = new PointerPointer<>(1);
106 | int videoStreamNumber = av_find_best_stream(avfmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, decoderRet, 0);
107 | if (videoStreamNumber < 0) {
108 | throw new IOException("failed to find video stream");
109 | }
110 |
111 | if (decoderRet.get(AVCodec.class).id() != AV_CODEC_ID_H264) {
112 | throw new IOException("failed to find h264 stream");
113 | }
114 | decoderRet.deallocate();
115 | videoStream = avfmtCtx.streams(videoStreamNumber);
116 | }
117 |
118 | private void initDecoder() {
119 | codec = avcodec_find_decoder(AV_CODEC_ID_H264);
120 | codecContext = avcodec_alloc_context3(codec);
121 | if((codec.capabilities() & avcodec.AV_CODEC_CAP_TRUNCATED) != 0) {
122 | codecContext.flags(codecContext.flags() | avcodec.AV_CODEC_CAP_TRUNCATED);
123 | }
124 | avcodec_parameters_to_context(codecContext, videoStream.codecpar());
125 | if(avcodec_open2(codecContext, codec, (PointerPointer) null) < 0) {
126 | throw new RuntimeException("Error: could not open codec.\n");
127 | }
128 | }
129 |
130 | private void initYuv420Frame() {
131 | yuv420Frame = av_frame_alloc();
132 | if (yuv420Frame == null) {
133 | throw new RuntimeException("Could not allocate video frame\n");
134 | }
135 | }
136 |
137 | private void initRgbFrame() {
138 | rgbFrame = av_frame_alloc();
139 | rgbFrame.format(AV_PIX_FMT_BGR24);
140 | rgbFrame.width(codecContext.width());
141 | rgbFrame.height(codecContext.height());
142 | int ret = av_image_alloc(rgbFrame.data(),
143 | rgbFrame.linesize(),
144 | rgbFrame.width(),
145 | rgbFrame.height(),
146 | rgbFrame.format(),
147 | 1);
148 | if (ret < 0) {
149 | throw new RuntimeException("could not allocate buffer!");
150 | }
151 | img = new BufferedImage(rgbFrame.width(), rgbFrame.height(), BufferedImage.TYPE_3BYTE_BGR);
152 | }
153 |
154 | private void getSwsContext() {
155 | sws_ctx = swscale.sws_getContext(
156 | codecContext.width(), codecContext.height(), codecContext.pix_fmt(),
157 | rgbFrame.width(), rgbFrame.height(), rgbFrame.format(),
158 | 0, null, null, (DoublePointer) null);
159 | }
160 |
161 | private void processAVPacket(AVPacket avpacket) throws IOException {
162 | int ret = avcodec.avcodec_send_packet(codecContext, avpacket);
163 | if (ret < 0) {
164 | throw new RuntimeException("Error sending a packet for decoding\n");
165 | }
166 | receiveFrames();
167 | }
168 |
169 | private void receiveFrames() throws IOException {
170 | int ret = 0;
171 | while (ret >= 0) {
172 | ret = avcodec.avcodec_receive_frame(codecContext, yuv420Frame);
173 | if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
174 | continue;
175 | } else
176 | if (ret < 0) {
177 | throw new RuntimeException("error during decoding");
178 | }
179 | swscale.sws_scale(sws_ctx, yuv420Frame.data(), yuv420Frame.linesize(), 0,
180 | yuv420Frame.height(), rgbFrame.data(), rgbFrame.linesize());
181 |
182 | rgbFrame.best_effort_timestamp(yuv420Frame.best_effort_timestamp());
183 | processFrame(rgbFrame);
184 | }
185 | }
186 |
187 | private void processFrame(AVFrame rgbFrame) throws IOException {
188 | DataBufferByte buffer = (DataBufferByte) img.getRaster().getDataBuffer();
189 | rgbFrame.data(0).get(buffer.getData());
190 |
191 | long ptsMillis = av_rescale_q(rgbFrame.best_effort_timestamp(), videoStream.time_base(), tb1000);
192 | Duration d = Duration.of(ptsMillis, ChronoUnit.MILLIS);
193 |
194 | String name = String.format("img_%05d_%02d-%02d-%02d-%03d.png", ++nframe,
195 | d.toHoursPart(),
196 | d.toMinutesPart(),
197 | d.toSecondsPart(),
198 | d.toMillisPart());
199 | ImageIO.write(img, "png", new File(name));
200 | }
201 |
202 | private void free() {
203 | av_packet_unref(avpacket);
204 | avcodec.avcodec_close(codecContext);
205 | avcodec.avcodec_free_context(codecContext);
206 |
207 | swscale.sws_freeContext(sws_ctx);
208 | av_frame_free(rgbFrame);
209 | av_frame_free(yuv420Frame);
210 | avformat.avformat_close_input(avfmtCtx);
211 | avformat.avformat_free_context(avfmtCtx);
212 | }
213 | }
214 |
--------------------------------------------------------------------------------
/src/main/java/EncodeAndMuxH264.java:
--------------------------------------------------------------------------------
1 | import org.apache.commons.cli.*;
2 | import org.bytedeco.javacpp.*;
3 |
4 | import java.awt.*;
5 | import java.awt.image.BufferedImage;
6 | import java.awt.image.DataBufferByte;
7 |
8 | import static org.bytedeco.javacpp.avcodec.*;
9 | import static org.bytedeco.javacpp.avformat.*;
10 | import static org.bytedeco.javacpp.avutil.*;
11 | import static org.bytedeco.javacpp.swscale.SWS_BICUBIC;
12 | import static org.bytedeco.javacpp.swscale.sws_getContext;
13 |
14 | public final class EncodeAndMuxH264 {
15 | private final static String DEFAULT_FPS = "30";
16 | private static final String DEFAULT_BITRATE = "400000";
17 | private static final String DEFAULT_WIDTH = "640";
18 | private static final String DEFAULT_HEIGHT = "320";
19 | private static final String DEFAULT_GOP = "60";
20 | private static final String DEFAULT_MAX_B_FRAMES = "12";
21 | private static final String DEFAULT_N_FRAMES = "300";
22 | private static final String DEFAULT_PROFILE = "baseline";
23 | private static final String DEFAULT_FILE = "out.mkv";
24 |
25 | private AVFrame frame;
26 | private AVFrame rgbFrame;
27 | private swscale.SwsContext swsContext;
28 | private BufferedImage image;
29 | private AVCodecContext cc;
30 | private int fps;
31 | private int bitrate;
32 | private int width;
33 | private int height;
34 | private int gopSize;
35 | private int maxBFrames;
36 | private int nFrames;
37 | private String profile;
38 | private AVCodec codec;
39 | private AVFormatContext oc;
40 | private AVOutputFormat fmt;
41 | private String ofile;
42 | private AVRational streamTimebase;
43 | private AVRational codecTimebase;
44 | private AVPacket pkt;
45 |
46 | private EncodeAndMuxH264() {}
47 |
48 | public static void main(String... argv) throws ParseException {
49 | Options options = new Options();
50 | options.addOption("help", false, "show help and exit");
51 | options.addOption("fps", true, "fps");
52 | options.addOption("bitrate", true, "bitrate");
53 | options.addOption("width", true, "width");
54 | options.addOption("height", true, "height");
55 | options.addOption("gop", true, "gop");
56 | options.addOption("max_b_frames", true, "max_b_frames");
57 | options.addOption("n_frames", true, "number of frames");
58 | options.addOption("profile", true, "h264 profile");
59 | options.addOption("file", true, "output file name");
60 | CommandLine cmd = new DefaultParser().parse(options, argv);
61 | if (cmd.hasOption("help")) {
62 | HelpFormatter helpFormatter = new HelpFormatter();
63 | helpFormatter.printHelp("EncodeAndMuxH264 [options]", options);
64 | } else {
65 | System.out.println("options:");
66 | EncodeAndMuxH264 instance = new EncodeAndMuxH264();
67 | instance.fps = Integer.parseInt(getOption(cmd, "fps", DEFAULT_FPS));
68 | instance.bitrate = Integer.parseInt(getOption(cmd, "bitrate", DEFAULT_BITRATE));
69 | instance.width = Integer.parseInt(getOption(cmd,"width", DEFAULT_WIDTH));
70 | instance.height = Integer.parseInt(getOption(cmd,"height", DEFAULT_HEIGHT));
71 | instance.gopSize = Integer.parseInt(getOption(cmd,"gop", DEFAULT_GOP));
72 | instance.maxBFrames = Integer.parseInt(getOption(cmd,"max_b_frames", DEFAULT_MAX_B_FRAMES));
73 | instance.nFrames = Integer.parseInt(getOption(cmd,"n_frames", DEFAULT_N_FRAMES));
74 | instance.profile = getOption(cmd,"profile", DEFAULT_PROFILE);
75 | instance.ofile = getOption(cmd,"file", DEFAULT_FILE);
76 |
77 | instance.start();
78 | }
79 | }
80 |
81 | private static String getOption(CommandLine cmd, String key, String defaultValue) {
82 | String v = cmd.getOptionValue(key, defaultValue);
83 | System.out.println("\t" + key + " = \"" + v + "\"");
84 | return v;
85 | }
86 |
87 | private void start() {
88 | allocCodecContext();
89 |
90 | AVPacket pkt = av_packet_alloc();
91 |
92 | allocFrame(cc);
93 | allocRgbFrame(cc);
94 | allocSwsContext();
95 | allocOutputContext();
96 |
97 | encodeVideo(pkt);
98 | writeDelayedFrames();
99 |
100 | av_write_trailer(oc);
101 | free(cc, oc);
102 | }
103 |
104 | private void writeDelayedFrames() {
105 | sendFrame(null);
106 | }
107 |
108 | private void encodeVideo(AVPacket pkt) {
109 | for (int i = 0; i < nFrames; i++) {
110 | frame.pts(avutil.av_rescale_q(i, codecTimebase, streamTimebase));
111 |
112 | drawFrame(i);
113 | sendFrame(frame);
114 | }
115 | }
116 |
117 | private void sendFrame(AVFrame o) {
118 | int r = avcodec.avcodec_send_frame(cc, o);
119 | if (r == 0) {
120 | receivePacket();
121 | } else {
122 | throw new RuntimeException("error: " + r);
123 | }
124 | }
125 |
126 | private void drawFrame(int n) {
127 | Graphics gc = image.getGraphics();
128 | gc.clearRect(0, 0, image.getWidth(), image.getHeight());
129 | gc.setFont(gc.getFont().deriveFont(50f));
130 | gc.drawString(String.format("pts: %d", n), 200, 200);
131 | gc.dispose();
132 |
133 | DataBufferByte dataBufferByte = (DataBufferByte) image.getRaster().getDataBuffer();
134 | rgbFrame.data(0).put(dataBufferByte.getData());
135 |
136 | swscale.sws_scale(
137 | swsContext, rgbFrame.data(), rgbFrame.linesize(), 0,
138 | frame.height(), frame.data(), frame.linesize()
139 | );
140 | }
141 |
142 | private void allocOutputContext() {
143 | oc = new AVFormatContext();
144 | pkt = new AVPacket();
145 | int r = avformat_alloc_output_context2(oc, null, null, ofile);
146 | if (r < 0) {
147 | throw new RuntimeException("could not allocate output context");
148 | }
149 | fmt = oc.oformat();
150 | AVStream st = avformat_new_stream(oc, codec);
151 | avcodec_parameters_from_context(st.codecpar(), cc);
152 | st.time_base(cc.time_base());
153 |
154 | av_dump_format(oc, 0, ofile, 1);
155 |
156 | /* open the output file, if needed */
157 | PointerPointer pp = new PointerPointer(1);
158 | try {
159 | if (avio_open(pp, new BytePointer(ofile), AVIO_FLAG_WRITE) <0){
160 | throw new RuntimeException("Could not open " + fmt);
161 | }
162 | oc.pb(new AVIOContext(pp.get()));
163 | } finally {
164 | pp.deallocate();
165 | }
166 |
167 | /* Write the stream header, if any. */
168 | if (avformat_write_header(oc, (AVDictionary) null) < 0) {
169 | throw new RuntimeException("Error occurred when opening output file\n");
170 | }
171 |
172 | streamTimebase = st.time_base();
173 | }
174 |
175 | private void allocCodecContext() {
176 | codecTimebase = new avutil.AVRational();
177 | codecTimebase.num(1);
178 | codecTimebase.den(fps);
179 | codec = avcodec_find_encoder(AV_CODEC_ID_H264);
180 | cc = avcodec_alloc_context3(codec);
181 |
182 | cc.bit_rate(bitrate);
183 | cc.width(width);
184 | cc.height(height);
185 | cc.time_base(codecTimebase);
186 | cc.gop_size(gopSize);
187 | cc.max_b_frames(maxBFrames);
188 | if (profile != null && !"".equals(profile)) {
189 | av_opt_set(cc.priv_data(), "profile", profile, 0);
190 | }
191 |
192 | cc.pix_fmt(avutil.AV_PIX_FMT_YUV420P);
193 | cc.flags(cc.flags() | AV_CODEC_FLAG_GLOBAL_HEADER);
194 | if (avcodec_open2(cc, codec, (AVDictionary) null) < 0) {
195 | throw new RuntimeException("could not open codec");
196 | }
197 | }
198 |
199 | private void free(AVCodecContext cc, AVFormatContext oc) {
200 | avcodec_close(cc);
201 | avcodec_free_context(cc);
202 | av_free(rgbFrame.data(0));
203 | av_free(frame.data(0));
204 | av_free(rgbFrame);
205 | av_free(frame);
206 |
207 | avio_close(oc.pb());
208 | av_free(oc);
209 | }
210 |
211 | private void allocSwsContext() {
212 | swsContext = sws_getContext(rgbFrame.width(), rgbFrame.height(), rgbFrame.format(),
213 | frame.width(), frame.height(), frame.format(), SWS_BICUBIC,
214 | null, null, (DoublePointer) null);
215 |
216 | if (swsContext.isNull()) {
217 | throw new RuntimeException("Could not init sws context!");
218 | }
219 | }
220 |
221 | private void allocRgbFrame(AVCodecContext cc) {
222 | image = new BufferedImage(cc.width(), cc.height(), BufferedImage.TYPE_3BYTE_BGR);
223 |
224 | rgbFrame = av_frame_alloc();
225 | rgbFrame.format(AV_PIX_FMT_BGR24);
226 | rgbFrame.width(cc.width());
227 | rgbFrame.height(cc.height());
228 | int ret = av_frame_get_buffer(rgbFrame, 32);
229 | if (ret < 0) {
230 | throw new RuntimeException("Could not allocate the video frame data");
231 | }
232 | }
233 |
234 | private void allocFrame(AVCodecContext cc) {
235 | frame = av_frame_alloc();
236 | frame.format(cc.pix_fmt());
237 | frame.width(cc.width());
238 | frame.height(cc.height());
239 | int ret = av_frame_get_buffer(frame, 32);
240 | if (ret < 0) {
241 | throw new RuntimeException("Could not allocate the video frame data");
242 | }
243 | }
244 |
245 | private void receivePacket() {
246 | int r;
247 | while ((r = avcodec.avcodec_receive_packet(cc, pkt)) == 0) {
248 | r = av_interleaved_write_frame(oc, pkt);
249 | av_packet_unref(pkt);
250 | if (r != 0) {
251 | throw new RuntimeException("Error while writing video frame\n");
252 | }
253 | }
254 |
255 | if (r != AVERROR_EAGAIN() && r != AVERROR_EOF()) {
256 | throw new RuntimeException("error");
257 | }
258 | }
259 | }
--------------------------------------------------------------------------------