├── 10.10.3.video_decode_by_cuda_display_by_qml ├── qml.qrc ├── main.cpp ├── nv12render.h ├── 10.10.3.video_decode_by_cuda_display_by_qml.pro ├── videoitem.h ├── ffmpegdecoder.h ├── main.qml ├── videoitem.cpp ├── nv12render.cpp └── ffmpegdecoder.cpp ├── 10.09.video_decode_by_cpu_display_by_qml ├── qml.qrc ├── main.cpp ├── main.qml ├── i420render.h ├── videoitem.h ├── 10.09.video_decode_by_cpu_display_by_qml.pro ├── ffmpegdecoder.h ├── videoitem.cpp ├── ffmpegdecoder.cpp └── i420render.cpp ├── H264Parser ├── main.cpp ├── mainwindow.cpp ├── mainwindow.h ├── mainwindow.ui └── H264Parser.pro ├── RTSParser ├── main.cpp ├── mainwindow.h ├── mainwindow.cpp ├── RTSParser.pro ├── mainwindow.ui └── rtspdata.h ├── 10.08.1.video_decode_by_cpu_display_by_qopengl ├── main.cpp ├── mainwindow.cpp ├── mainwindow.h ├── i420render2.h ├── i420render.h ├── 10.08.1.video_decode_by_cpu_display_by_qopengl.pro ├── ffmpegdecoder.h ├── mainwindow.ui ├── ffmpegdecoder.cpp ├── i420render2.cpp └── i420render.cpp ├── 10.08.2.video_decode_by_cpu_display_by_qopengl ├── main.cpp ├── mainwindow.cpp ├── mainwindow.h ├── i420render2.h ├── i420render.h ├── 10.08.2.video_decode_by_cpu_display_by_qopengl.pro ├── ffmpegdecoder.h ├── mainwindow.ui ├── ffmpegdecoder.cpp ├── i420render2.cpp └── i420render.cpp ├── 10.10.1.video_decode_by_cuda_display_by_qwidget ├── main.cpp ├── mainwindow.h ├── mainwindow.cpp ├── 10.10.1.video_decode_by_cuda_display_by_qwidget.pro ├── mainwindow.ui └── ffmpegvideo.h ├── 10.10.2.video_decode_by_cuda_display_by_qopengl ├── main.cpp ├── mainwindow.h ├── mainwindow.cpp ├── 10.10.2.video_decode_by_cuda_display_by_qopengl.pro ├── mainwindow.ui └── ffmpegvideo.h ├── 10.19.audio_video_sync ├── main.cpp ├── mainwindow.h ├── mainwindow.cpp ├── 10.19.audio_video_sync.pro ├── mainwindow.ui └── ffmpegwidget.h ├── 10.07.video_decode_by_cpu_display_by_qwidget ├── main.cpp ├── mainwindow.h ├── mainwindow.cpp ├── 10.07.video_decode_by_cpu_display_by_qwidget.pro ├── ffmpegwidget.h ├── mainwindow.ui └── ffmpegwidget.cpp ├── 10.20.video_decode_add_filter_display_by_qwidget ├── main.cpp ├── mainwindow.h ├── 10.20.video_decode_add_filter_display_by_qwidget.pro ├── mainwindow.cpp ├── ffmpegwidget.h └── mainwindow.ui ├── 10.02.get_lib_version ├── 10.02.get_lib_version.pro └── main.c ├── 10.03.get_stream_info ├── 10.03.get_stream_info.pro └── main.c ├── 10.04.video_decode_flow ├── 10.04.video_decode_flow.pro └── main.c ├── 10.11.video_encode_yuv2h264 └── 10.11.video_encode_yuv2h264.pro ├── 10.12.video_encode_h2642mp4 ├── 10.12.video_encode_h2642mp4.pro └── main.c ├── 10.14.audio_decode_mp32pcm ├── 10.14.audio_decode_mp32pcm.pro └── main.c ├── 10.18.audio_encode_pcm2mp3 ├── 10.18.audio_encode_pcm2mp3.pro └── main.c ├── 10.05.video_decode_frame_save ├── 10.05.video_decode_frame_save.pro └── main.c ├── 10.15.audio_decode_swr_mp32pcm ├── 10.15.audio_decode_swr_mp32pcm.pro └── main.c ├── 10.23.video_muxer_mp3h2642mp4 └── 10.23.video_muxer_mp3h2642mp4.pro ├── 10.21.video_demuxer_mp42h264mp3 └── 10.21.video_demuxer_mp42h264mp3.pro ├── 10.22.video_demuxer_mp42yuvpcm └── 10.22.video_demuxer_mp42yuvpcm.pro ├── 10.06.1.video_decode_mp42yuv420p ├── 10.06.1.video_decode_mp42yuv420p.pro └── main.c ├── 10.06.2.video_decode_mp42yuv420sp ├── 10.06.2.video_decode_mp42yuv420sp.pro └── main.c ├── 10.13.video_encode_camera2h264 └── 10.13.video_encode_camera2h264.pro ├── 10.16.audio_player_decode_by_ffmpeg_play_by_qt ├── 10.16.audio_player_decode_by_ffmpeg_play_by_qt.pro └── main.cpp ├── 10.17.audio_player_decode_from_mem_play_by_qt └── 10.17.audio_player_decode_from_mem_play_by_qt.pro ├── .gitignore ├── LICENSE ├── ffmpeg.pro ├── ffmpeg.pri └── README.md /10.10.3.video_decode_by_cuda_display_by_qml/qml.qrc: -------------------------------------------------------------------------------- 1 | 2 | 3 | main.qml 4 | 5 | 6 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/qml.qrc: -------------------------------------------------------------------------------- 1 | 2 | 3 | main.qml 4 | 5 | 6 | -------------------------------------------------------------------------------- /H264Parser/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | 3 | #include 4 | 5 | int main(int argc, char *argv[]) 6 | { 7 | QApplication a(argc, argv); 8 | MainWindow w; 9 | w.show(); 10 | return a.exec(); 11 | } 12 | -------------------------------------------------------------------------------- /RTSParser/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | 3 | #include 4 | 5 | int main(int argc, char *argv[]) 6 | { 7 | QApplication a(argc, argv); 8 | MainWindow w; 9 | w.show(); 10 | return a.exec(); 11 | } 12 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include 3 | 4 | int main(int argc, char *argv[]) 5 | { 6 | QApplication a(argc, argv); 7 | MainWindow w; 8 | w.show(); 9 | 10 | return a.exec(); 11 | } 12 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include 3 | 4 | int main(int argc, char *argv[]) 5 | { 6 | QApplication a(argc, argv); 7 | MainWindow w; 8 | w.show(); 9 | 10 | return a.exec(); 11 | } 12 | -------------------------------------------------------------------------------- /10.10.1.video_decode_by_cuda_display_by_qwidget/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | 3 | #include 4 | 5 | int main(int argc, char *argv[]) 6 | { 7 | QApplication a(argc, argv); 8 | 9 | MainWindow w; 10 | w.show(); 11 | 12 | return a.exec(); 13 | } 14 | -------------------------------------------------------------------------------- /10.10.2.video_decode_by_cuda_display_by_qopengl/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | 3 | #include 4 | 5 | int main(int argc, char *argv[]) 6 | { 7 | QApplication a(argc, argv); 8 | 9 | MainWindow w; 10 | w.show(); 11 | 12 | return a.exec(); 13 | } 14 | -------------------------------------------------------------------------------- /10.19.audio_video_sync/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | 3 | #include 4 | 5 | int main(int argc, char *argv[]) 6 | { 7 | QApplication a(argc, argv); 8 | MainWindow w; 9 | w.setWindowTitle(QObject::tr("Player")); 10 | w.show(); 11 | 12 | return a.exec(); 13 | } 14 | -------------------------------------------------------------------------------- /H264Parser/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) 5 | : QMainWindow(parent) 6 | , ui(new Ui::MainWindow) 7 | { 8 | ui->setupUi(this); 9 | } 10 | 11 | MainWindow::~MainWindow() 12 | { 13 | delete ui; 14 | } 15 | 16 | -------------------------------------------------------------------------------- /10.07.video_decode_by_cpu_display_by_qwidget/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | 3 | #include 4 | 5 | int main(int argc, char *argv[]) 6 | { 7 | QApplication a(argc, argv); 8 | MainWindow w; 9 | w.setWindowTitle(QObject::tr("Player")); 10 | w.show(); 11 | 12 | return a.exec(); 13 | } 14 | -------------------------------------------------------------------------------- /10.20.video_decode_add_filter_display_by_qwidget/main.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | 3 | #include 4 | 5 | int main(int argc, char *argv[]) 6 | { 7 | QApplication a(argc, argv); 8 | MainWindow w; 9 | w.setWindowTitle(QObject::tr("Player")); 10 | w.show(); 11 | 12 | return a.exec(); 13 | } 14 | -------------------------------------------------------------------------------- /10.02.get_lib_version/10.02.get_lib_version.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 3_version 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.03.get_stream_info/10.03.get_stream_info.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 4_stream_info 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.04.video_decode_flow/10.04.video_decode_flow.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 5_decode_flow 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.11.video_encode_yuv2h264/10.11.video_encode_yuv2h264.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 12_encode_yuv2h264 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.12.video_encode_h2642mp4/10.12.video_encode_h2642mp4.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 13_encode_h2642mp4 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.14.audio_decode_mp32pcm/10.14.audio_decode_mp32pcm.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 15_decode_mp32pcm 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.18.audio_encode_pcm2mp3/10.18.audio_encode_pcm2mp3.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 19_encode_pcm2mp3 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.05.video_decode_frame_save/10.05.video_decode_frame_save.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 6_save_decoded_frame 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.15.audio_decode_swr_mp32pcm/10.15.audio_decode_swr_mp32pcm.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 16_decode_mp42pcm 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.23.video_muxer_mp3h2642mp4/10.23.video_muxer_mp3h2642mp4.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 24_muxer_mp3h2642mp4 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.21.video_demuxer_mp42h264mp3/10.21.video_demuxer_mp42h264mp3.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 22_demuxer_mp42h264mp3 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.cpp 17 | -------------------------------------------------------------------------------- /10.22.video_demuxer_mp42yuvpcm/10.22.video_demuxer_mp42yuvpcm.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 23_demuxer_mp42yuvpcm 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.cpp 17 | -------------------------------------------------------------------------------- /10.06.1.video_decode_mp42yuv420p/10.06.1.video_decode_mp42yuv420p.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 7_1_decode_mp42yuv420p 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.06.2.video_decode_mp42yuv420sp/10.06.2.video_decode_mp42yuv420sp.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 7_2_decode_mp42yuv420sp 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | SOURCES += \ 16 | main.c 17 | -------------------------------------------------------------------------------- /10.13.video_encode_camera2h264/10.13.video_encode_camera2h264.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = app 2 | CONFIG += console c++17 3 | CONFIG -= app_bundle 4 | CONFIG -= qt 5 | 6 | include(../ffmpeg.pri) 7 | 8 | DESTDIR = ../bin 9 | TARGET = 14_camera2h264 10 | OBJECTS_DIR = obj 11 | MOC_DIR = moc 12 | RCC_DIR = rcc 13 | UI_DIR = ui 14 | 15 | 16 | SOURCES += \ 17 | main.c 18 | -------------------------------------------------------------------------------- /10.16.audio_player_decode_by_ffmpeg_play_by_qt/10.16.audio_player_decode_by_ffmpeg_play_by_qt.pro: -------------------------------------------------------------------------------- 1 | QT += core multimedia testlib 2 | 3 | TEMPLATE = app 4 | CONFIG += console c++17 5 | CONFIG -= app_bundle 6 | 7 | include(../ffmpeg.pri) 8 | 9 | DESTDIR = ../bin 10 | TARGET = 17_audio_decode 11 | OBJECTS_DIR = obj 12 | MOC_DIR = moc 13 | RCC_DIR = rcc 14 | UI_DIR = ui 15 | 16 | SOURCES += \ 17 | main.cpp 18 | -------------------------------------------------------------------------------- /H264Parser/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | 6 | QT_BEGIN_NAMESPACE 7 | namespace Ui { class MainWindow; } 8 | QT_END_NAMESPACE 9 | 10 | class MainWindow : public QMainWindow 11 | { 12 | Q_OBJECT 13 | 14 | public: 15 | MainWindow(QWidget *parent = nullptr); 16 | ~MainWindow(); 17 | 18 | private: 19 | Ui::MainWindow *ui; 20 | }; 21 | #endif // MAINWINDOW_H 22 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) : 5 | QMainWindow(parent), 6 | ui(new Ui::MainWindow) 7 | { 8 | ui->setupUi(this); 9 | } 10 | 11 | MainWindow::~MainWindow() 12 | { 13 | delete ui; 14 | } 15 | 16 | void MainWindow::on_btnPlay_clicked() 17 | { 18 | ui->openGLWidget->setUrl(ui->lineUrl->text()); 19 | ui->openGLWidget->startVideo(); 20 | } 21 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) : 5 | QMainWindow(parent), 6 | ui(new Ui::MainWindow) 7 | { 8 | ui->setupUi(this); 9 | } 10 | 11 | MainWindow::~MainWindow() 12 | { 13 | delete ui; 14 | } 15 | 16 | void MainWindow::on_btnPlay_clicked() 17 | { 18 | ui->openGLWidget->setUrl(ui->lineUrl->text()); 19 | ui->openGLWidget->startVideo(); 20 | } 21 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | 6 | namespace Ui { 7 | class MainWindow; 8 | } 9 | 10 | class MainWindow : public QMainWindow 11 | { 12 | Q_OBJECT 13 | 14 | public: 15 | explicit MainWindow(QWidget *parent = 0); 16 | ~MainWindow(); 17 | 18 | private slots: 19 | void on_btnPlay_clicked(); 20 | 21 | private: 22 | Ui::MainWindow *ui; 23 | }; 24 | 25 | #endif // MAINWINDOW_H 26 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | 6 | namespace Ui { 7 | class MainWindow; 8 | } 9 | 10 | class MainWindow : public QMainWindow 11 | { 12 | Q_OBJECT 13 | 14 | public: 15 | explicit MainWindow(QWidget *parent = 0); 16 | ~MainWindow(); 17 | 18 | private slots: 19 | void on_btnPlay_clicked(); 20 | 21 | private: 22 | Ui::MainWindow *ui; 23 | }; 24 | 25 | #endif // MAINWINDOW_H 26 | -------------------------------------------------------------------------------- /10.19.audio_video_sync/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | 6 | QT_BEGIN_NAMESPACE 7 | namespace Ui { class MainWindow; } 8 | QT_END_NAMESPACE 9 | 10 | class MainWindow : public QMainWindow 11 | { 12 | Q_OBJECT 13 | 14 | public: 15 | MainWindow(QWidget *parent = nullptr); 16 | ~MainWindow(); 17 | 18 | private slots: 19 | void on_btnPlay_clicked(); 20 | 21 | void on_btnStop_clicked(); 22 | 23 | private: 24 | Ui::MainWindow *ui; 25 | }; 26 | #endif // MAINWINDOW_H 27 | -------------------------------------------------------------------------------- /10.19.audio_video_sync/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) 5 | : QMainWindow(parent) 6 | , ui(new Ui::MainWindow) 7 | { 8 | ui->setupUi(this); 9 | } 10 | 11 | MainWindow::~MainWindow() 12 | { 13 | delete ui; 14 | } 15 | 16 | 17 | void MainWindow::on_btnPlay_clicked() 18 | { 19 | ui->wgtPlayer->setUrl(ui->lineUrl->text()); 20 | ui->wgtPlayer->play(); 21 | } 22 | 23 | void MainWindow::on_btnStop_clicked() 24 | { 25 | ui->wgtPlayer->stop(); 26 | } 27 | -------------------------------------------------------------------------------- /RTSParser/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | 6 | #include "rtspdata.h" 7 | 8 | QT_BEGIN_NAMESPACE 9 | namespace Ui { class MainWindow; } 10 | QT_END_NAMESPACE 11 | 12 | class MainWindow : public QMainWindow 13 | { 14 | Q_OBJECT 15 | 16 | public: 17 | MainWindow(QWidget *parent = nullptr); 18 | ~MainWindow(); 19 | 20 | private slots: 21 | void on_btnRun_clicked(); 22 | 23 | private: 24 | Ui::MainWindow *ui; 25 | 26 | RTSPData *rtsp; 27 | }; 28 | #endif // MAINWINDOW_H 29 | -------------------------------------------------------------------------------- /10.07.video_decode_by_cpu_display_by_qwidget/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | 6 | QT_BEGIN_NAMESPACE 7 | namespace Ui { class MainWindow; } 8 | QT_END_NAMESPACE 9 | 10 | class MainWindow : public QMainWindow 11 | { 12 | Q_OBJECT 13 | 14 | public: 15 | MainWindow(QWidget *parent = nullptr); 16 | ~MainWindow(); 17 | 18 | private slots: 19 | void on_btnPlay_clicked(); 20 | 21 | void on_btnStop_clicked(); 22 | 23 | private: 24 | Ui::MainWindow *ui; 25 | }; 26 | #endif // MAINWINDOW_H 27 | -------------------------------------------------------------------------------- /10.07.video_decode_by_cpu_display_by_qwidget/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) 5 | : QMainWindow(parent) 6 | , ui(new Ui::MainWindow) 7 | { 8 | ui->setupUi(this); 9 | } 10 | 11 | MainWindow::~MainWindow() 12 | { 13 | delete ui; 14 | } 15 | 16 | 17 | void MainWindow::on_btnPlay_clicked() 18 | { 19 | ui->wgtPlayer->setUrl(ui->lineUrl->text()); 20 | ui->wgtPlayer->play(); 21 | } 22 | 23 | void MainWindow::on_btnStop_clicked() 24 | { 25 | ui->wgtPlayer->stop(); 26 | } 27 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include "videoitem.h" 5 | 6 | int main(int argc, char *argv[]) 7 | { 8 | QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling); 9 | 10 | QGuiApplication app(argc, argv); 11 | 12 | QQmlApplicationEngine engine; 13 | 14 | qmlRegisterType("VideoItem",1,0,"VideoItem"); 15 | 16 | engine.load(QUrl(QStringLiteral("qrc:/main.qml"))); 17 | if (engine.rootObjects().isEmpty()) 18 | return -1; 19 | 20 | return app.exec(); 21 | } 22 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include "videoitem.h" 5 | 6 | int main(int argc, char *argv[]) 7 | { 8 | QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling); 9 | 10 | QGuiApplication app(argc, argv); 11 | 12 | qmlRegisterType("VideoItem",1,0,"VideoItem"); 13 | 14 | QQmlApplicationEngine engine; 15 | engine.load(QUrl(QStringLiteral("qrc:/main.qml"))); 16 | if (engine.rootObjects().isEmpty()) 17 | return -1; 18 | 19 | return app.exec(); 20 | } 21 | -------------------------------------------------------------------------------- /10.10.1.video_decode_by_cuda_display_by_qwidget/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | #include 6 | 7 | QT_BEGIN_NAMESPACE 8 | namespace Ui { class MainWindow; } 9 | QT_END_NAMESPACE 10 | 11 | class MainWindow : public QMainWindow 12 | { 13 | Q_OBJECT 14 | 15 | public: 16 | MainWindow(QWidget *parent = nullptr); 17 | ~MainWindow(); 18 | 19 | private slots: 20 | void on_btnPS_clicked(); 21 | 22 | private: 23 | Ui::MainWindow *ui; 24 | 25 | bool isPlay=false; 26 | }; 27 | #endif // MAINWINDOW_H 28 | -------------------------------------------------------------------------------- /10.10.2.video_decode_by_cuda_display_by_qopengl/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | #include 6 | 7 | QT_BEGIN_NAMESPACE 8 | namespace Ui { class MainWindow; } 9 | QT_END_NAMESPACE 10 | 11 | class MainWindow : public QMainWindow 12 | { 13 | Q_OBJECT 14 | 15 | public: 16 | MainWindow(QWidget *parent = nullptr); 17 | ~MainWindow(); 18 | 19 | private slots: 20 | void on_btnPS_clicked(); 21 | 22 | private: 23 | Ui::MainWindow *ui; 24 | 25 | bool isPlay=false; 26 | }; 27 | #endif // MAINWINDOW_H 28 | -------------------------------------------------------------------------------- /RTSParser/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) 5 | : QMainWindow(parent) 6 | , ui(new Ui::MainWindow) 7 | , rtsp(new RTSPData) 8 | { 9 | ui->setupUi(this); 10 | } 11 | 12 | MainWindow::~MainWindow() 13 | { 14 | if(rtsp->isRunning()){ 15 | rtsp->requestInterruption(); 16 | rtsp->quit(); 17 | rtsp->deleteLater(); 18 | } 19 | delete ui; 20 | } 21 | 22 | 23 | void MainWindow::on_btnRun_clicked() 24 | { 25 | rtsp->rtspInit("rtsp://192.168.1.31/test"); 26 | rtsp->start(); 27 | } 28 | 29 | -------------------------------------------------------------------------------- /H264Parser/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 800 10 | 600 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /10.10.1.video_decode_by_cuda_display_by_qwidget/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) 5 | : QMainWindow(parent) 6 | , ui(new Ui::MainWindow) 7 | { 8 | ui->setupUi(this); 9 | } 10 | 11 | MainWindow::~MainWindow() 12 | { 13 | delete ui; 14 | } 15 | 16 | void MainWindow::on_btnPS_clicked() 17 | { 18 | QString url = ui->lineUrl->text().trimmed(); 19 | if(url.isEmpty()){ 20 | QMessageBox::information(this,tr("Warning"),"Please input url",QMessageBox::Ok); 21 | return; 22 | } 23 | ui->widget->play(url); 24 | } 25 | -------------------------------------------------------------------------------- /10.10.2.video_decode_by_cuda_display_by_qopengl/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) 5 | : QMainWindow(parent) 6 | , ui(new Ui::MainWindow) 7 | { 8 | ui->setupUi(this); 9 | } 10 | 11 | MainWindow::~MainWindow() 12 | { 13 | delete ui; 14 | } 15 | 16 | void MainWindow::on_btnPS_clicked() 17 | { 18 | QString url = ui->lineUrl->text().trimmed(); 19 | if(url.isEmpty()){ 20 | QMessageBox::information(this,tr("Warning"),"Please input url",QMessageBox::Ok); 21 | return; 22 | } 23 | ui->widget->play(url); 24 | } 25 | -------------------------------------------------------------------------------- /10.17.audio_player_decode_from_mem_play_by_qt/10.17.audio_player_decode_from_mem_play_by_qt.pro: -------------------------------------------------------------------------------- 1 | QT -= gui 2 | QT += sql multimedia 3 | 4 | CONFIG += c++17 console 5 | CONFIG -= app_bundle 6 | 7 | # You can make your code fail to compile if it uses deprecated APIs. 8 | # In order to do so, uncomment the following line. 9 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 10 | 11 | include(../ffmpeg.pri) 12 | 13 | DESTDIR = ../bin 14 | TARGET = 18_decode_from_mem 15 | OBJECTS_DIR = obj 16 | MOC_DIR = moc 17 | RCC_DIR = rcc 18 | UI_DIR = ui 19 | 20 | SOURCES += \ 21 | main.cpp 22 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/main.qml: -------------------------------------------------------------------------------- 1 | import QtQuick 2.9 2 | import QtQuick.Window 2.2 3 | import QtQuick.Controls 2.3 4 | 5 | import VideoItem 1.0 6 | 7 | Window { 8 | visible: true 9 | width: 1280 10 | height: 720 11 | title: qsTr("Hello World") 12 | 13 | VideoItem{ 14 | id:videoitem 15 | anchors.fill: parent 16 | } 17 | 18 | Button { 19 | id: button 20 | x: 29 21 | y: 27 22 | text: qsTr("Play") 23 | 24 | onClicked: { 25 | videoitem.setUrl("C:\\Users\\hyper\\Videos\\Sample.wmv") 26 | videoitem.start() 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /H264Parser/H264Parser.pro: -------------------------------------------------------------------------------- 1 | QT += core gui 2 | 3 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 4 | 5 | CONFIG += c++17 6 | 7 | # You can make your code fail to compile if it uses deprecated APIs. 8 | # In order to do so, uncomment the following line. 9 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 10 | 11 | SOURCES += \ 12 | main.cpp \ 13 | mainwindow.cpp 14 | 15 | HEADERS += \ 16 | mainwindow.h 17 | 18 | FORMS += \ 19 | mainwindow.ui 20 | 21 | # Default rules for deployment. 22 | qnx: target.path = /tmp/$${TARGET}/bin 23 | else: unix:!android: target.path = /opt/$${TARGET}/bin 24 | !isEmpty(target.path): INSTALLS += target 25 | -------------------------------------------------------------------------------- /10.20.video_decode_add_filter_display_by_qwidget/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | #include 6 | 7 | QT_BEGIN_NAMESPACE 8 | namespace Ui { class MainWindow; } 9 | QT_END_NAMESPACE 10 | 11 | class MainWindow : public QMainWindow 12 | { 13 | Q_OBJECT 14 | 15 | public: 16 | MainWindow(QWidget *parent = nullptr); 17 | ~MainWindow(); 18 | 19 | private slots: 20 | void on_btnPlay_clicked(); 21 | 22 | void on_btnStop_clicked(); 23 | 24 | void on_spinBoxContrast_valueChanged(int c); 25 | 26 | void on_spinBoxLightness_valueChanged(int b); 27 | 28 | private: 29 | Ui::MainWindow *ui; 30 | 31 | int contrast=5,brightness=5; 32 | }; 33 | #endif // MAINWINDOW_H 34 | -------------------------------------------------------------------------------- /10.02.get_lib_version/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "libavcodec/avcodec.h" 4 | #include "libavfilter/avfilter.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavutil/avutil.h" 7 | #include "libavutil/ffversion.h" 8 | #include "libswresample/swresample.h" 9 | #include "libswscale/swscale.h" 10 | #include "libpostproc/postprocess.h" 11 | 12 | int main() 13 | { 14 | unsigned codecVer = avcodec_version(); 15 | int ver_major,ver_minor,ver_micro; 16 | ver_major = (codecVer>>16)&0xff; 17 | ver_minor = (codecVer>>8)&0xff; 18 | ver_micro = (codecVer)&0xff; 19 | printf("FFmpeg version is: %s .\navcodec version is: %d=%d.%d.%d.\n", 20 | FFMPEG_VERSION, 21 | codecVer,ver_major,ver_minor,ver_micro); 22 | 23 | return 0; 24 | } 25 | -------------------------------------------------------------------------------- /10.20.video_decode_add_filter_display_by_qwidget/10.20.video_decode_add_filter_display_by_qwidget.pro: -------------------------------------------------------------------------------- 1 | QT += core gui 2 | 3 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 4 | 5 | CONFIG += c++17 6 | 7 | # You can make your code fail to compile if it uses deprecated APIs. 8 | # In order to do so, uncomment the following line. 9 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 10 | 11 | include(../ffmpeg.pri) 12 | 13 | DESTDIR = ../bin 14 | TARGET = 21_filter 15 | OBJECTS_DIR = obj 16 | MOC_DIR = moc 17 | RCC_DIR = rcc 18 | UI_DIR = ui 19 | 20 | SOURCES += \ 21 | ffmpegwidget.cpp \ 22 | main.cpp \ 23 | mainwindow.cpp 24 | 25 | HEADERS += \ 26 | ffmpegwidget.h \ 27 | mainwindow.h 28 | 29 | FORMS += \ 30 | mainwindow.ui 31 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/nv12render.h: -------------------------------------------------------------------------------- 1 | #ifndef NV12RENDER_H 2 | #define NV12RENDER_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | class NV12Render : public QOpenGLFunctions 12 | { 13 | public: 14 | /// 15 | /// \brief NV12Render 构造函数 16 | /// 17 | NV12Render(); 18 | 19 | /// 20 | /// \brief render 渲染绘制纹理 21 | /// \param p 纹理数据指针 22 | /// \param width 纹理图片的宽度 23 | /// \param height 纹理图片的高度 24 | /// 25 | void render(uchar *p, int width, int height); 26 | 27 | private: 28 | QOpenGLShaderProgram program;//着色程序对象 29 | GLuint idY,idUV;//纹理分量ID 30 | QOpenGLBuffer vbo;//纹理buffer 31 | }; 32 | 33 | #endif // NV12RENDER_H 34 | -------------------------------------------------------------------------------- /RTSParser/RTSParser.pro: -------------------------------------------------------------------------------- 1 | QT += core gui 2 | 3 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 4 | 5 | CONFIG += c++17 6 | 7 | # You can make your code fail to compile if it uses deprecated APIs. 8 | # In order to do so, uncomment the following line. 9 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 10 | 11 | TARGET=rtsparser 12 | OBJECTS_DIR=obj 13 | MOC_DIR=moc 14 | RCC_DIR=rcc 15 | UI_DIR=ui 16 | 17 | SOURCES += \ 18 | main.cpp \ 19 | mainwindow.cpp \ 20 | rtspdata.cpp 21 | 22 | HEADERS += \ 23 | mainwindow.h \ 24 | rtspdata.h 25 | 26 | FORMS += \ 27 | mainwindow.ui 28 | 29 | # Default rules for deployment. 30 | qnx: target.path = /tmp/$${TARGET}/bin 31 | else: unix:!android: target.path = /opt/$${TARGET}/bin 32 | !isEmpty(target.path): INSTALLS += target 33 | -------------------------------------------------------------------------------- /10.10.1.video_decode_by_cuda_display_by_qwidget/10.10.1.video_decode_by_cuda_display_by_qwidget.pro: -------------------------------------------------------------------------------- 1 | QT += core gui 2 | 3 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 4 | 5 | CONFIG += c++17 6 | 7 | # You can make your code fail to compile if it uses deprecated APIs. 8 | # In order to do so, uncomment the following line. 9 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 10 | 11 | include(../ffmpeg.pri) 12 | 13 | DESTDIR = ../bin 14 | TARGET = 11_1_decode_by_cuda_qwidget 15 | OBJECTS_DIR = obj 16 | MOC_DIR = moc 17 | RCC_DIR = rcc 18 | UI_DIR = ui 19 | 20 | SOURCES += \ 21 | ffmpegvideo.cpp \ 22 | main.cpp \ 23 | mainwindow.cpp 24 | 25 | HEADERS += \ 26 | ffmpegvideo.h \ 27 | mainwindow.h 28 | 29 | FORMS += \ 30 | mainwindow.ui 31 | -------------------------------------------------------------------------------- /10.10.2.video_decode_by_cuda_display_by_qopengl/10.10.2.video_decode_by_cuda_display_by_qopengl.pro: -------------------------------------------------------------------------------- 1 | QT += core gui 2 | 3 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 4 | 5 | CONFIG += c++17 6 | 7 | # You can make your code fail to compile if it uses deprecated APIs. 8 | # In order to do so, uncomment the following line. 9 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 10 | 11 | include(../ffmpeg.pri) 12 | 13 | DESTDIR = ../bin 14 | TARGET = 11_2_decode_by_cuda_qopengl 15 | OBJECTS_DIR = obj 16 | MOC_DIR = moc 17 | RCC_DIR = rcc 18 | UI_DIR = ui 19 | 20 | SOURCES += \ 21 | ffmpegvideo.cpp \ 22 | main.cpp \ 23 | mainwindow.cpp 24 | 25 | HEADERS += \ 26 | ffmpegvideo.h \ 27 | mainwindow.h 28 | 29 | FORMS += \ 30 | mainwindow.ui 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # C++ objects and libs 2 | *.slo 3 | *.lo 4 | *.o 5 | *.a 6 | *.la 7 | *.lai 8 | *.so 9 | *.so.* 10 | *.dll 11 | *.dylib 12 | 13 | # Qt-es 14 | object_script.*.Release 15 | object_script.*.Debug 16 | *_plugin_import.cpp 17 | /.qmake.cache 18 | /.qmake.stash 19 | *.pro.user 20 | *.pro.user.* 21 | *.qbs.user 22 | *.qbs.user.* 23 | *.moc 24 | moc_*.cpp 25 | moc_*.h 26 | qrc_*.cpp 27 | ui_*.h 28 | *.qmlc 29 | *.jsc 30 | Makefile* 31 | *build-* 32 | *.qm 33 | *.prl 34 | 35 | # Qt unit tests 36 | target_wrapper.* 37 | 38 | # QtCreator 39 | *.autosave 40 | 41 | # QtCreator Qml 42 | *.qmlproject.user 43 | *.qmlproject.user.* 44 | 45 | # QtCreator CMake 46 | CMakeLists.txt.user* 47 | 48 | # QtCreator 4.8< compilation database 49 | compile_commands.json 50 | 51 | # QtCreator local machine specific files for imported projects 52 | *creator.user* 53 | 54 | 55 | temp 56 | .qmake.stash 57 | lib 58 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/i420render2.h: -------------------------------------------------------------------------------- 1 | #ifndef I420RENDER2_H 2 | #define I420RENDER2_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include "ffmpegdecoder.h" 12 | 13 | class I420Render2 : public QOpenGLWidget,public QOpenGLFunctions 14 | { 15 | Q_OBJECT 16 | public: 17 | I420Render2(QWidget *parent =nullptr); 18 | ~I420Render2(); 19 | 20 | void setUrl(QString url); 21 | 22 | void startVideo(); 23 | 24 | void initializeGL(); 25 | void resizeGL(int w,int h); 26 | void paintGL(); 27 | 28 | private: 29 | //shader程序 30 | QOpenGLShaderProgram m_program; 31 | QOpenGLBuffer vbo; 32 | 33 | int idY,idU,idV; 34 | 35 | int width,height; 36 | 37 | FFmpegDecoder *decoder; 38 | 39 | uchar* ptr; 40 | }; 41 | 42 | #endif // I420RENDER2_H 43 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/i420render2.h: -------------------------------------------------------------------------------- 1 | #ifndef I420RENDER2_H 2 | #define I420RENDER2_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include "ffmpegdecoder.h" 12 | 13 | class I420Render2 : public QOpenGLWidget,public QOpenGLFunctions 14 | { 15 | Q_OBJECT 16 | public: 17 | I420Render2(QWidget *parent =nullptr); 18 | ~I420Render2(); 19 | 20 | void setUrl(QString url); 21 | 22 | void startVideo(); 23 | 24 | void initializeGL(); 25 | void resizeGL(int w,int h); 26 | void paintGL(); 27 | 28 | private: 29 | //shader程序 30 | QOpenGLShaderProgram m_program; 31 | QOpenGLBuffer vbo; 32 | 33 | int idY,idU,idV; 34 | 35 | int width,height; 36 | 37 | FFmpegDecoder *decoder; 38 | 39 | uchar* ptr; 40 | }; 41 | 42 | #endif // I420RENDER2_H 43 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/i420render.h: -------------------------------------------------------------------------------- 1 | #ifndef I420RENDER2_H 2 | #define I420RENDER2_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include 12 | 13 | #include "ffmpegdecoder.h" 14 | 15 | class I420Render : public QOpenGLFunctions 16 | { 17 | public: 18 | I420Render(); 19 | ~I420Render(); 20 | 21 | void init(); 22 | void updateTextureInfo(int w, int h); 23 | void updateTextureData(const YUVData &data); 24 | void paint(); 25 | void resize(int w, int h); 26 | 27 | private: 28 | //shader程序 29 | QOpenGLShaderProgram m_program; 30 | QOpenGLTexture *mTexY = nullptr,*mTexU=nullptr,*mTexV=nullptr; 31 | 32 | bool mTextureAlloced = false; 33 | 34 | QVector vertices; 35 | QVector textures; 36 | }; 37 | 38 | #endif // I420RENDER2_H 39 | -------------------------------------------------------------------------------- /10.20.video_decode_add_filter_display_by_qwidget/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | 4 | MainWindow::MainWindow(QWidget *parent) 5 | : QMainWindow(parent) 6 | , ui(new Ui::MainWindow) 7 | { 8 | ui->setupUi(this); 9 | } 10 | 11 | MainWindow::~MainWindow() 12 | { 13 | delete ui; 14 | } 15 | 16 | 17 | void MainWindow::on_btnPlay_clicked() 18 | { 19 | QString url = ui->lineUrl->text(); 20 | if(url.isEmpty()){ 21 | QMessageBox::information(this,tr("Warning"),"Please input url first.",QMessageBox::Ok); 22 | return; 23 | } 24 | ui->wgtPlayer->play(url); 25 | } 26 | 27 | void MainWindow::on_btnStop_clicked() 28 | { 29 | ui->wgtPlayer->stop(); 30 | } 31 | 32 | void MainWindow::on_spinBoxContrast_valueChanged(int c) 33 | { 34 | contrast=c; 35 | ui->wgtPlayer->setFilterDescr(c,brightness); 36 | } 37 | 38 | void MainWindow::on_spinBoxLightness_valueChanged(int b) 39 | { 40 | brightness=b; 41 | ui->wgtPlayer->setFilterDescr(contrast,b); 42 | } 43 | -------------------------------------------------------------------------------- /10.03.get_stream_info/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "libavcodec/avcodec.h" 4 | #include "libavfilter/avfilter.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavutil/avutil.h" 7 | #include "libavutil/ffversion.h" 8 | #include "libswresample/swresample.h" 9 | #include "libswscale/swscale.h" 10 | #include "libpostproc/postprocess.h" 11 | 12 | int main() 13 | { 14 | AVFormatContext *fmt_ctx = avformat_alloc_context();//创建对象并初始化 15 | int ret=0; 16 | char* fileName="C:\\Users\\hyper\\Videos\\Sample.wmv";//文件地址 17 | 18 | do{ 19 | //打开文件 20 | if ((ret = avformat_open_input(&fmt_ctx, fileName, NULL, NULL))<0) 21 | break;//Cannot open video file 22 | 23 | //查找流信息(音频流和视频流) 24 | if ((ret = avformat_find_stream_info(fmt_ctx, NULL)) < 0) { 25 | printf("Cannot find stream information\n"); 26 | break; 27 | } 28 | 29 | av_dump_format(fmt_ctx,0,fileName,0);//输出视频信息 30 | }while(0); 31 | 32 | avformat_close_input(&fmt_ctx);//关闭文件 33 | 34 | return ret; 35 | } 36 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/i420render.h: -------------------------------------------------------------------------------- 1 | #ifndef I420RENDER_H 2 | #define I420RENDER_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include "ffmpegdecoder.h" 12 | 13 | class I420Render : public QOpenGLWidget,public QOpenGLFunctions 14 | { 15 | Q_OBJECT 16 | public: 17 | I420Render(QWidget *parent =nullptr); 18 | ~I420Render(); 19 | 20 | void setUrl(QString url); 21 | 22 | void startVideo(); 23 | 24 | void initializeGL(); 25 | void resizeGL(int w,int h); 26 | void paintGL(); 27 | 28 | private: 29 | //shader程序 30 | QOpenGLShaderProgram m_program; 31 | //shader中yuv变量地址 32 | GLuint m_textureUniformY, m_textureUniformU , m_textureUniformV; 33 | //创建纹理 34 | GLuint m_idy , m_idu , m_idv; 35 | 36 | int width,height; 37 | 38 | FFmpegDecoder *decoder; 39 | 40 | uchar* ptr; 41 | }; 42 | 43 | #endif // I420RENDER_H 44 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/i420render.h: -------------------------------------------------------------------------------- 1 | #ifndef I420RENDER_H 2 | #define I420RENDER_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include "ffmpegdecoder.h" 12 | 13 | class I420Render : public QOpenGLWidget,public QOpenGLFunctions 14 | { 15 | Q_OBJECT 16 | public: 17 | I420Render(QWidget *parent =nullptr); 18 | ~I420Render(); 19 | 20 | void setUrl(QString url); 21 | 22 | void startVideo(); 23 | 24 | void initializeGL(); 25 | void resizeGL(int w,int h); 26 | void paintGL(); 27 | 28 | private: 29 | //shader程序 30 | QOpenGLShaderProgram m_program; 31 | //shader中yuv变量地址 32 | GLuint m_textureUniformY, m_textureUniformU , m_textureUniformV; 33 | //创建纹理 34 | GLuint m_idy , m_idu , m_idv; 35 | 36 | int width,height; 37 | 38 | FFmpegDecoder *decoder; 39 | 40 | uchar* ptr; 41 | }; 42 | 43 | #endif // I420RENDER_H 44 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/10.10.3.video_decode_by_cuda_display_by_qml.pro: -------------------------------------------------------------------------------- 1 | QT += quick qml quickwidgets multimedia gui opengl 2 | 3 | CONFIG += c++17 4 | 5 | # You can make your code fail to compile if it uses deprecated APIs. 6 | # In order to do so, uncomment the following line. 7 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 8 | 9 | include(../ffmpeg.pri) 10 | 11 | DESTDIR = ../bin 12 | TARGET = 11_3_decode_by_cuda_qml 13 | OBJECTS_DIR = obj 14 | MOC_DIR = moc 15 | RCC_DIR = rcc 16 | UI_DIR = ui 17 | 18 | HEADERS += \ 19 | ffmpegdecoder.h \ 20 | nv12render.h \ 21 | videoitem.h 22 | 23 | 24 | SOURCES += \ 25 | ffmpegdecoder.cpp \ 26 | main.cpp \ 27 | nv12render.cpp \ 28 | videoitem.cpp 29 | 30 | RESOURCES += qml.qrc 31 | 32 | # Additional import path used to resolve QML modules in Qt Creator's code model 33 | QML_IMPORT_PATH = 34 | 35 | # Additional import path used to resolve QML modules just for Qt Quick Designer 36 | QML_DESIGNER_IMPORT_PATH = 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 JackeyLea 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /RTSParser/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 800 10 | 600 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 20 | 100 21 | 130 22 | 84 23 | 27 24 | 25 | 26 | 27 | run 28 | 29 | 30 | 31 | 32 | 33 | 34 | 0 35 | 0 36 | 800 37 | 24 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /10.19.audio_video_sync/10.19.audio_video_sync.pro: -------------------------------------------------------------------------------- 1 | QT += core gui multimedia testlib 2 | 3 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 4 | 5 | CONFIG += c++17 6 | 7 | # The following define makes your compiler emit warnings if you use 8 | # any Qt feature that has been marked deprecated (the exact warnings 9 | # depend on your compiler). Please consult the documentation of the 10 | # deprecated API in order to know how to port your code away from it. 11 | DEFINES += QT_DEPRECATED_WARNINGS 12 | 13 | # You can also make your code fail to compile if it uses deprecated APIs. 14 | # In order to do so, uncomment the following line. 15 | # You can also select to disable deprecated APIs only up to a certain version of Qt. 16 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 17 | 18 | include(../ffmpeg.pri) 19 | 20 | DESTDIR = ../bin 21 | TARGET = 20_sync 22 | OBJECTS_DIR = obj 23 | MOC_DIR = moc 24 | RCC_DIR = rcc 25 | UI_DIR = ui 26 | 27 | SOURCES += \ 28 | ffmpegwidget.cpp \ 29 | main.cpp \ 30 | mainwindow.cpp 31 | 32 | HEADERS += \ 33 | ffmpegwidget.h \ 34 | mainwindow.h 35 | 36 | FORMS += \ 37 | mainwindow.ui 38 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/videoitem.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include "ffmpegdecoder.h" 7 | 8 | class VideoItem : public QQuickFramebufferObject 9 | { 10 | Q_OBJECT 11 | public: 12 | VideoItem(QQuickItem *parent = nullptr); 13 | void timerEvent(QTimerEvent *) override; 14 | 15 | YUVData getFrame(); 16 | bool infoDirty() const 17 | { 18 | return m_infoChanged; 19 | } 20 | void makeInfoDirty(bool dirty) 21 | { 22 | m_infoChanged = dirty; 23 | } 24 | int videoWidth() const 25 | { 26 | return m_videoWidth; 27 | } 28 | int videoHeght() const 29 | { 30 | return m_videoHeight; 31 | } 32 | public slots: 33 | void setUrl(const QString &url); 34 | void start(); 35 | void stop(); 36 | 37 | protected slots: 38 | void onVideoInfoReady(int width, int height); 39 | public: 40 | Renderer *createRenderer() const override; 41 | 42 | FFmpegDecoder *m_decoder = nullptr; 43 | 44 | int m_videoWidth; 45 | int m_videoHeight; 46 | bool m_infoChanged = false; 47 | }; 48 | 49 | -------------------------------------------------------------------------------- /10.07.video_decode_by_cpu_display_by_qwidget/10.07.video_decode_by_cpu_display_by_qwidget.pro: -------------------------------------------------------------------------------- 1 | QT += core gui 2 | 3 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 4 | 5 | CONFIG += c++17 6 | 7 | include(../ffmpeg.pri) 8 | 9 | DESTDIR = ../bin 10 | TEMPLATE = app 11 | TARGET = 8_decode_qwidget 12 | OBJECTS_DIR = obj 13 | MOC_DIR = moc 14 | RCC_DIR = rcc 15 | UI_DIR = ui 16 | 17 | # The following define makes your compiler emit warnings if you use 18 | # any Qt feature that has been marked deprecated (the exact warnings 19 | # depend on your compiler). Please consult the documentation of the 20 | # deprecated API in order to know how to port your code away from it. 21 | DEFINES += QT_DEPRECATED_WARNINGS 22 | 23 | # You can also make your code fail to compile if it uses deprecated APIs. 24 | # In order to do so, uncomment the following line. 25 | # You can also select to disable deprecated APIs only up to a certain version of Qt. 26 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 27 | 28 | SOURCES += \ 29 | ffmpegwidget.cpp \ 30 | main.cpp \ 31 | mainwindow.cpp 32 | 33 | HEADERS += \ 34 | ffmpegwidget.h \ 35 | mainwindow.h 36 | 37 | FORMS += \ 38 | mainwindow.ui 39 | -------------------------------------------------------------------------------- /ffmpeg.pro: -------------------------------------------------------------------------------- 1 | TEMPLATE = subdirs 2 | 3 | SUBDIRS += 10.02.get_lib_version \ 4 | 10.03.get_stream_info \ 5 | 10.04.video_decode_flow \ 6 | 10.05.video_decode_frame_save \ 7 | 10.06.1.video_decode_mp42yuv420p \ 8 | 10.06.2.video_decode_mp42yuv420sp \ 9 | 10.07.video_decode_by_cpu_display_by_qwidget \ 10 | 10.08.1.video_decode_by_cpu_display_by_qopengl \ 11 | 10.08.2.video_decode_by_cpu_display_by_qopengl \ 12 | 10.09.video_decode_by_cpu_display_by_qml \ 13 | 10.10.1.video_decode_by_cuda_display_by_qwidget \ 14 | 10.10.2.video_decode_by_cuda_display_by_qopengl \ 15 | 10.10.3.video_decode_by_cuda_display_by_qml \ 16 | 10.11.video_encode_yuv2h264 \ 17 | 10.12.video_encode_h2642mp4 \ 18 | 10.13.video_encode_camera2h264 \ 19 | 10.14.audio_decode_mp32pcm \ 20 | 10.15.audio_decode_swr_mp32pcm \ 21 | 10.16.audio_player_decode_by_ffmpeg_play_by_qt \ 22 | 10.17.audio_player_decode_from_mem_play_by_qt \ 23 | 10.18.audio_encode_pcm2mp3 \ 24 | 10.19.audio_video_sync \ 25 | 10.20.video_decode_add_filter_display_by_qwidget \ 26 | 10.21.video_demuxer_mp42h264mp3 \ 27 | 10.22.video_demuxer_mp42yuvpcm \ 28 | 10.23.video_muxer_mp3h2642mp4 29 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/videoitem.h: -------------------------------------------------------------------------------- 1 | #ifndef VIDEOITEM_H 2 | #define VIDEOITEM_H 3 | 4 | #include 5 | #include 6 | #include 7 | 8 | #include "ffmpegdecoder.h" 9 | 10 | class VideoItem : public QQuickFramebufferObject 11 | { 12 | Q_OBJECT 13 | public: 14 | /// 15 | /// \brief VideoItem qml界面对象构造函数 16 | /// \param parent qml界面父指针 17 | /// 18 | explicit VideoItem(QQuickItem* parent = nullptr); 19 | 20 | /// 21 | /// \brief ~VideoItem 释放所有资源 22 | ~VideoItem() override; 23 | 24 | /// 25 | /// \brief createRenderer 创建一个渲染器对象 26 | /// \return 创建的对象指针 27 | /// 28 | Renderer *createRenderer() const override; 29 | 30 | /// 31 | /// \brief getFrame 从缓冲区里面获取一帧数据 32 | /// \param ptr 数据指针 33 | /// \param w 图像宽度 34 | /// \param h 图像高度 35 | /// 36 | void getFrame(uchar **ptr, int *w, int *h); 37 | 38 | /// 39 | /// \brief setUrl 设置解码视频流地址,可在qml中调用 40 | /// \param url 新的视频流地址 41 | /// 42 | Q_INVOKABLE void setUrl(QString url); 43 | 44 | /// 45 | /// \brief start 启动视频解码线程,开始显示视频 46 | /// 47 | Q_INVOKABLE void start(); 48 | /// 49 | /// \brief start 停止视频解码线程,不显示视频 50 | /// 51 | Q_INVOKABLE void stop(); 52 | 53 | private: 54 | FFmpegDecoder *m_decoder;//解码对象 55 | 56 | QTimer *m_timer=nullptr;//界面刷新定时器 57 | 58 | QString m_url;//视频流地址变量 59 | }; 60 | 61 | #endif // VIDEORENDER_H 62 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/10.09.video_decode_by_cpu_display_by_qml.pro: -------------------------------------------------------------------------------- 1 | QT += quick multimedia opengl 2 | CONFIG += c++17 3 | 4 | # The following define makes your compiler emit warnings if you use 5 | # any feature of Qt which as been marked deprecated (the exact warnings 6 | # depend on your compiler). Please consult the documentation of the 7 | # deprecated API in order to know how to port your code away from it. 8 | DEFINES += QT_DEPRECATED_WARNINGS 9 | 10 | # You can also make your code fail to compile if you use deprecated APIs. 11 | # In order to do so, uncomment the following line. 12 | # You can also select to disable deprecated APIs only up to a certain version of Qt. 13 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 14 | 15 | include(../ffmpeg.pri) 16 | 17 | DESTDIR = ../bin 18 | TARGET = 10_decode_qml 19 | OBJECTS_DIR = obj 20 | MOC_DIR = moc 21 | RCC_DIR = rcc 22 | UI_DIR = uis 23 | 24 | HEADERS += \ 25 | ffmpegdecoder.h \ 26 | videoitem.h \ 27 | i420render.h 28 | 29 | SOURCES += main.cpp \ 30 | ffmpegdecoder.cpp \ 31 | videoitem.cpp \ 32 | i420render.cpp 33 | 34 | RESOURCES += qml.qrc 35 | 36 | # Additional import path used to resolve QML modules in Qt Creator's code model 37 | QML_IMPORT_PATH = 38 | 39 | # Additional import path used to resolve QML modules just for Qt Quick Designer 40 | QML_DESIGNER_IMPORT_PATH = 41 | 42 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/10.08.1.video_decode_by_cpu_display_by_qopengl.pro: -------------------------------------------------------------------------------- 1 | #------------------------------------------------- 2 | # 3 | # Project created by QtCreator 2021-04-02T17:31:10 4 | # 5 | #------------------------------------------------- 6 | 7 | QT += core gui 8 | 9 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 10 | 11 | TEMPLATE = app 12 | 13 | # The following define makes your compiler emit warnings if you use 14 | # any feature of Qt which has been marked as deprecated (the exact warnings 15 | # depend on your compiler). Please consult the documentation of the 16 | # deprecated API in order to know how to port your code away from it. 17 | DEFINES += QT_DEPRECATED_WARNINGS 18 | 19 | # You can also make your code fail to compile if you use deprecated APIs. 20 | # In order to do so, uncomment the following line. 21 | # You can also select to disable deprecated APIs only up to a certain version of Qt. 22 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 23 | 24 | include(../ffmpeg.pri) 25 | 26 | DESTDIR = ../bin 27 | TARGET = 9_1_decode_qopengl 28 | OBJECTS_DIR = obj 29 | MOC_DIR = moc 30 | RCC_DIR = rcc 31 | UI_DIR = ui 32 | 33 | SOURCES += \ 34 | main.cpp \ 35 | mainwindow.cpp \ 36 | ffmpegdecoder.cpp \ 37 | i420render.cpp 38 | 39 | HEADERS += \ 40 | mainwindow.h \ 41 | ffmpegdecoder.h \ 42 | i420render.h 43 | 44 | FORMS += \ 45 | mainwindow.ui 46 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/10.08.2.video_decode_by_cpu_display_by_qopengl.pro: -------------------------------------------------------------------------------- 1 | #------------------------------------------------- 2 | # 3 | # Project created by QtCreator 2021-04-02T17:31:10 4 | # 5 | #------------------------------------------------- 6 | 7 | QT += core gui 8 | 9 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 10 | 11 | TEMPLATE = app 12 | 13 | CONFIG += c++17 14 | 15 | # The following define makes your compiler emit warnings if you use 16 | # any feature of Qt which has been marked as deprecated (the exact warnings 17 | # depend on your compiler). Please consult the documentation of the 18 | # deprecated API in order to know how to port your code away from it. 19 | DEFINES += QT_DEPRECATED_WARNINGS 20 | 21 | # You can also make your code fail to compile if you use deprecated APIs. 22 | # In order to do so, uncomment the following line. 23 | # You can also select to disable deprecated APIs only up to a certain version of Qt. 24 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 25 | 26 | include(../ffmpeg.pri) 27 | 28 | DESTDIR = ../bin 29 | TARGET = 9_2_decode_qopengl2 30 | OBJECTS_DIR = obj 31 | MOC_DIR = moc 32 | RCC_DIR = rcc 33 | UI_DIR = ui 34 | 35 | SOURCES += \ 36 | i420render2.cpp \ 37 | main.cpp \ 38 | mainwindow.cpp \ 39 | ffmpegdecoder.cpp 40 | 41 | HEADERS += \ 42 | i420render2.h \ 43 | mainwindow.h \ 44 | ffmpegdecoder.h 45 | 46 | FORMS += \ 47 | mainwindow.ui 48 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/ffmpegdecoder.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGDECODER_H 2 | #define FFMPEGDECODER_H 3 | 4 | #include 5 | #include 6 | 7 | extern "C"{ 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | } 24 | 25 | class FFmpegDecoder : public QThread 26 | { 27 | Q_OBJECT 28 | public: 29 | FFmpegDecoder(); 30 | ~FFmpegDecoder(); 31 | 32 | void setUrl(QString url); 33 | 34 | static enum AVPixelFormat get_hw_format(AVCodecContext *ctx, 35 | const enum AVPixelFormat *pix_fmts); 36 | static int hw_decoder_init(AVCodecContext *ctx, const enum AVHWDeviceType type); 37 | 38 | int width(){return videoWidth;} 39 | int height(){return videoHeight;} 40 | 41 | uchar* getFrame(){ 42 | return out_buffer; 43 | } 44 | 45 | protected: 46 | void run(); 47 | 48 | signals: 49 | void sigNewFrame(); 50 | 51 | private: 52 | QString _filePath; 53 | uchar* out_buffer; 54 | 55 | int ret=0; 56 | 57 | int videoWidth=0,videoHeight=0; 58 | }; 59 | 60 | #endif // FFMPEGDECODER_H 61 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/ffmpegdecoder.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGDECODER_H 2 | #define FFMPEGDECODER_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | extern "C"{ 11 | #include 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | 22 | #include 23 | 24 | #include 25 | } 26 | 27 | 28 | class FFmpegDecoder : public QThread 29 | { 30 | Q_OBJECT 31 | public: 32 | FFmpegDecoder(); 33 | ~FFmpegDecoder(); 34 | 35 | void setUrl(QString const url); 36 | 37 | int width(); 38 | int height(); 39 | 40 | /// 41 | /// \brief getFrame 从解码结果缓存队列中取第一帧显示 42 | /// \return 第一帧数据指针 43 | /// 44 | uchar* getFrame(){ 45 | return out_buffer; 46 | } 47 | 48 | protected: 49 | void run(); 50 | 51 | signals: 52 | void sigFirst(uchar* p,int w,int h); 53 | void newFrame(); 54 | 55 | private: 56 | AVFormatContext *fmtCtx =NULL; 57 | const AVCodec *videoCodec =NULL; 58 | AVCodecContext *videoCodecCtx=NULL; 59 | AVPacket *pkt = NULL; 60 | AVFrame *yuvFrame = NULL; 61 | AVFrame *rgbFrame = NULL; 62 | 63 | struct SwsContext *img_ctx=NULL; 64 | 65 | uchar *out_buffer= nullptr; 66 | 67 | int videoStreamIndex =-1; 68 | int numBytes = -1; 69 | 70 | QString _url; 71 | 72 | bool isFirst = true; 73 | 74 | int w,h; 75 | }; 76 | 77 | #endif // FFMPEGDECODER_H 78 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/ffmpegdecoder.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGDECODER_H 2 | #define FFMPEGDECODER_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | extern "C"{ 11 | #include 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | 22 | #include 23 | 24 | #include 25 | } 26 | 27 | 28 | class FFmpegDecoder : public QThread 29 | { 30 | Q_OBJECT 31 | public: 32 | FFmpegDecoder(); 33 | ~FFmpegDecoder(); 34 | 35 | void setUrl(QString const url); 36 | 37 | int width(); 38 | int height(); 39 | 40 | /// 41 | /// \brief getFrame 从解码结果缓存队列中取第一帧显示 42 | /// \return 第一帧数据指针 43 | /// 44 | uchar* getFrame(){ 45 | return out_buffer; 46 | } 47 | 48 | protected: 49 | void run(); 50 | 51 | signals: 52 | void sigFirst(uchar* p,int w,int h); 53 | void newFrame(); 54 | 55 | private: 56 | AVFormatContext *fmtCtx =NULL; 57 | const AVCodec *videoCodec =NULL; 58 | AVCodecContext *videoCodecCtx=NULL; 59 | AVPacket *pkt = NULL; 60 | AVFrame *yuvFrame = NULL; 61 | AVFrame *rgbFrame = NULL; 62 | 63 | struct SwsContext *img_ctx=NULL; 64 | 65 | uchar *out_buffer= nullptr; 66 | 67 | int videoStreamIndex =-1; 68 | int numBytes = -1; 69 | 70 | QString _url; 71 | 72 | bool isFirst = true; 73 | 74 | int w,h; 75 | }; 76 | 77 | #endif // FFMPEGDECODER_H 78 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 696 10 | 574 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 20 | 21 | Url 22 | 23 | 24 | 25 | 26 | 27 | 28 | C:\\Users\\hyper\\Videos\\Sample.wmv 29 | 30 | 31 | 32 | 33 | 34 | 35 | Play 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 0 48 | 0 49 | 696 50 | 22 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | I420Render 60 | QOpenGLWidget 61 |
i420render.h
62 |
63 |
64 | 65 | 66 |
67 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 696 10 | 574 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 20 | 21 | Url 22 | 23 | 24 | 25 | 26 | 27 | 28 | C:\\Users\\hyper\\Videos\\Sample.wmv 29 | 30 | 31 | 32 | 33 | 34 | 35 | Play 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 0 48 | 0 49 | 696 50 | 22 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | I420Render2 60 | QOpenGLWidget 61 |
i420render2.h
62 |
63 |
64 | 65 | 66 |
67 | -------------------------------------------------------------------------------- /10.07.video_decode_by_cpu_display_by_qwidget/ffmpegwidget.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGWIDGET_H 2 | #define FFMPEGWIDGET_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include 12 | 13 | extern "C"{ 14 | #include 15 | 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | #include 26 | 27 | #include 28 | } 29 | 30 | using namespace std; 31 | 32 | class FFmpegVideo : public QThread 33 | { 34 | Q_OBJECT 35 | public: 36 | explicit FFmpegVideo(); 37 | ~FFmpegVideo(); 38 | 39 | void setUrl(QString url); 40 | 41 | bool open_input_file(); 42 | 43 | protected: 44 | void run(); 45 | 46 | signals: 47 | void sendQImage(QImage); 48 | 49 | private: 50 | AVFormatContext *fmtCtx =NULL; 51 | const AVCodec *videoCodec =NULL; 52 | AVCodecContext *videoCodecCtx=NULL; 53 | AVPacket *pkt = NULL; 54 | AVFrame *yuvFrame = NULL; 55 | AVFrame *rgbFrame = NULL; 56 | 57 | struct SwsContext *img_ctx=NULL; 58 | 59 | unsigned char *out_buffer=nullptr; 60 | 61 | int videoStreamIndex =-1; 62 | int numBytes = -1; 63 | 64 | QString _url; 65 | }; 66 | 67 | class FFmpegWidget : public QWidget 68 | { 69 | Q_OBJECT 70 | public: 71 | explicit FFmpegWidget(QWidget *parent = nullptr); 72 | ~FFmpegWidget(); 73 | 74 | void setUrl(QString url); 75 | 76 | void play(); 77 | void stop(); 78 | 79 | protected: 80 | void paintEvent(QPaintEvent *); 81 | 82 | private slots: 83 | void receiveQImage(const QImage &rImg); 84 | 85 | private: 86 | FFmpegVideo *ffmpeg; 87 | 88 | QImage img; 89 | }; 90 | 91 | #endif // FFMPEGWIDGET_H 92 | -------------------------------------------------------------------------------- /10.10.1.video_decode_by_cuda_display_by_qwidget/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 800 10 | 600 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 0 20 | 21 | 22 | 0 23 | 24 | 25 | 0 26 | 27 | 28 | 0 29 | 30 | 31 | 0 32 | 33 | 34 | 35 | 36 | C:\\Users\\hyper\\Videos\\Sample.wmv 37 | 38 | 39 | 40 | 41 | 42 | 43 | Play 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 0 56 | 0 57 | 800 58 | 22 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | FFmpegWidget 67 | QWidget 68 |
ffmpegvideo.h
69 | 1 70 |
71 |
72 | 73 | 74 |
75 | -------------------------------------------------------------------------------- /10.10.2.video_decode_by_cuda_display_by_qopengl/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 800 10 | 600 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 0 20 | 21 | 22 | 0 23 | 24 | 25 | 0 26 | 27 | 28 | 0 29 | 30 | 31 | 0 32 | 33 | 34 | 35 | 36 | C:\\Users\\hyper\\Videos\\Sample.wmv 37 | 38 | 39 | 40 | 41 | 42 | 43 | Play 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 0 56 | 0 57 | 800 58 | 22 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | FFmpegWidget 67 | QWidget 68 |
ffmpegvideo.h
69 | 1 70 |
71 |
72 | 73 | 74 |
75 | -------------------------------------------------------------------------------- /ffmpeg.pri: -------------------------------------------------------------------------------- 1 | 2 | # 包含FFmpeg库文件位置 3 | 4 | DEFINES += FFMPEG4 5 | 6 | # windows平台 VS2019 7 | win32{ 8 | DEFINES +=_CRT_SECURE_NO_WARNINGS #非安全函数警告 9 | QMAKE_CXXFLAGS += /WX#警告作为错误 10 | if(contains(DEFINES,FFMPEG6)){ 11 | #使用6.0版 12 | INCLUDEPATH+= . $$PWD/lib/6.x/include/ 13 | LIBS += -L$$PWD/lib/6.x/lib \ 14 | -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc \ 15 | -lswresample -lswscale 16 | } 17 | if(contains(DEFINES,FFMPEG5)){ 18 | #使用5.1.3版 19 | INCLUDEPATH+= . $$PWD/lib/5.x/include/ 20 | LIBS += -L$$PWD/lib/5.x/lib \ 21 | -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc \ 22 | -lswresample -lswscale 23 | } 24 | if(contains(DEFINES,FFMPEG4)){ 25 | #使用4.4.4版 26 | INCLUDEPATH+= . $$PWD/lib/4.x/include/ 27 | LIBS += -L$$PWD/lib/4.x/lib/ \ 28 | -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc \ 29 | -lswresample -lswscale 30 | } 31 | 32 | if(contains(DEFINES,SYSTEM)){ 33 | #使用系统库 34 | message("no system lib on windows,please use other conf instead.") 35 | } 36 | } 37 | 38 | # linux平台 GCC 39 | unix{ 40 | QMAKE_CXXFLAGS += -Werror#警告作为错误 41 | if(contains(DEFINES,FFMPEG6)){ 42 | #使用6.0版 43 | INCLUDEPATH+= . $$PWD/lib/6.x/include/ /usr/include/mfx/ 44 | LIBS += -L$$PWD/lib/6.x/lib \ 45 | -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc \ 46 | -lswresample -lswscale 47 | } 48 | if(contains(DEFINES,FFMPEG5)){ 49 | #使用5.1.3版 50 | INCLUDEPATH+= . $$PWD/lib/5.x/include/ 51 | LIBS += -L$$PWD/lib/5.x/lib \ 52 | -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc \ 53 | -lswresample -lswscale 54 | } 55 | if(contains(DEFINES,FFMPEG4)){ 56 | #使用4.4.4版 57 | INCLUDEPATH+= . $$PWD/lib/4.x/include/ 58 | LIBS += -L$$PWD/lib/4.x/lib/ \ 59 | -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc \ 60 | -lswresample -lswscale 61 | } 62 | 63 | if(contains(DEFINES,SYSTEM)){ 64 | #使用系统库 65 | INCLUDEPATH+= . /usr/include/x86_64-linux-gnu/ 66 | LIBS += -L/usr/lib/x86_64-linux-gnu/ \ 67 | -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc \ 68 | -lswresample -lswscale 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /10.19.audio_video_sync/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 800 10 | 617 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 1 20 | 21 | 22 | 1 23 | 24 | 25 | 1 26 | 27 | 28 | 1 29 | 30 | 31 | 0 32 | 33 | 34 | 35 | 36 | Play 37 | 38 | 39 | 40 | 41 | 42 | 43 | Url 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | Stop 54 | 55 | 56 | 57 | 58 | 59 | 60 | C:\\Users\\hyper\\Videos\\Sample.mkv 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | FFmpegWidget 71 | QWidget 72 |
ffmpegwidget.h
73 | 1 74 |
75 |
76 | 77 | 78 |
79 | -------------------------------------------------------------------------------- /10.07.video_decode_by_cpu_display_by_qwidget/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 800 10 | 617 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 1 20 | 21 | 22 | 1 23 | 24 | 25 | 1 26 | 27 | 28 | 1 29 | 30 | 31 | 0 32 | 33 | 34 | 35 | 36 | Play 37 | 38 | 39 | 40 | 41 | 42 | 43 | Url 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | Stop 54 | 55 | 56 | 57 | 58 | 59 | 60 | C:\\Users\\hyper\\Videos\\Sample.wmv 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | FFmpegWidget 71 | QWidget 72 |
ffmpegwidget.h
73 | 1 74 |
75 |
76 | 77 | 78 |
79 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/ffmpegdecoder.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGDECODER_H 2 | #define FFMPEGDECODER_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | extern "C"{ 12 | #include 13 | 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | 23 | #include 24 | 25 | #include 26 | } 27 | 28 | const static int bufferSize = 1280*720; 29 | 30 | struct YUVData{ 31 | YUVData(){ 32 | Y.reserve(bufferSize); 33 | U.reserve(bufferSize); 34 | V.reserve(bufferSize); 35 | } 36 | QByteArray Y; 37 | QByteArray U; 38 | QByteArray V; 39 | int yLineSize; 40 | int uLineSize; 41 | int vLineSize; 42 | int height; 43 | }; 44 | 45 | class FFmpegDecoder : public QThread 46 | { 47 | Q_OBJECT 48 | public: 49 | FFmpegDecoder(); 50 | ~FFmpegDecoder(); 51 | 52 | void setUrl(QString const url); 53 | 54 | int width(); 55 | int height(); 56 | 57 | /// 58 | /// \brief getFrame 从解码结果缓存队列中取第一帧显示 59 | /// \return 第一帧数据指针 60 | /// 61 | YUVData getFrame(){ 62 | if(frameBuffer.isEmpty()){ 63 | return YUVData{}; 64 | } 65 | return frameBuffer.takeFirst(); 66 | } 67 | 68 | protected: 69 | void run(); 70 | 71 | signals: 72 | void sigFirst(uchar* p,int w,int h); 73 | void newFrame(); 74 | void videoInfoReady(int w,int h); 75 | 76 | private: 77 | AVFormatContext *fmtCtx =NULL; 78 | const AVCodec *videoCodec =NULL; 79 | AVCodecContext *videoCodecCtx=NULL; 80 | AVPacket *pkt = NULL; 81 | AVFrame *yuvFrame = NULL; 82 | AVFrame *rgbFrame = NULL; 83 | 84 | int videoStreamIndex =-1; 85 | 86 | QString _url; 87 | 88 | bool isFirst = true; 89 | 90 | int w,h; 91 | 92 | YUVData m_yuvData; 93 | 94 | QContiguousCache frameBuffer; 95 | }; 96 | 97 | #endif // FFMPEGDECODER_H 98 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ffmpeg_beginner 2 | 3 | FFmpeg4入门系列教程代码 4 | 5 | FFmpeg5/6对应代码见其他分支 6 | 7 | ## 编译说明 8 | 9 | - 使用FFmpeg-BuildsLatest Auto-Build (2023-09-04 12:49) 15d0b26 测试 10 | - 尽量保证编译结果没有错误、没有警告、没有deprecated方法调用 11 | - 如果提示系统没有mfx头文件,可以从Intel Media SDK下载,下载后把api/include目录重命名为mfx,然后复制到include目录中 12 | 13 | ## 源码说明 14 | 15 | ### 10.02.get_lib_version 16 | 17 | 获取库版本信息并解析输出可读信息 18 | 19 | ### 10.03.get_stream_info 20 | 21 | 输出视频的基本信息(时长、码率、编码方式等等) 22 | 23 | ### 10.04.video_decode_flow 24 | 25 | 视频解码的基本流程 26 | 27 | ### 10.05.video_decode_frame_save 28 | 29 | 解码视频并保存其中的50帧画面为ppm格式图片 30 | 31 | ### 10.06.video_decode_mp42yuv 32 | 33 | 视频解码的基本流程并输出视频信息,将解码后的视频数据保存为YUV格式文件 34 | 35 | 分别解码为YUV420P/YUV420SP 36 | 37 | ### 10.07.video_decode_by_cpu_display_by_qwidget 38 | 39 | 使用CPU解码视频,然后使用Qt的QWidget显示画面 40 | 41 | ### 10.08.video_decode_by_cpu_display_by_qopengl 42 | 43 | 使用CPU解码视频,然后使用Qt的QOpenGL显示画面 44 | 45 | 两种方法仅供参考 46 | 47 | ### 10.09.video_decode_by_cpu_display_by_qml 48 | 49 | 使用CPU解码视频,然后使用QML显示画面 50 | 51 | ### 10.10.video_decode_by_cuda_display_by_qt 52 | 53 | 使用CUDA解码视频并使用Qt的QWidget/QOpenGL/QML显示视频 54 | 55 | ### 10.11.video_encode_yuv2h264 56 | 57 | 将yuv源视频文件编码为h264格式的文件 58 | 59 | ### 10.12.video_encode_h2642mp4 60 | 61 | 将h264编码为mp4格式文件 62 | 63 | ### 10.13.video_encode_camera2h264 64 | 65 | 将摄像头捕获的视频直接编码为H264格式 66 | 67 | ### 10.14.audio_decode_mp32pcm 68 | 69 | 将mp3文件解码为pcm文件 70 | 71 | ### 10.15.audio_decode_swr_mp32pcm 72 | 73 | 将mp3音频重采样解码为pcm 74 | 75 | ### 10.16.audio_player_decode_by_ffmpeg_play_by_qt 76 | 77 | 使用FFmpeg解码音频,使用Qt播放音频 78 | 79 | ### 10.17.audio_player_decode_from_mem_play_by_qt 80 | 81 | 解码内存中的mp3数据并使用Qt播放 82 | 83 | ### 10.18.audio_encode_pcm2mp3 84 | 85 | 将pcm格式文件编码为mp3格式 86 | 87 | ### 10.19.audio_video_sync 88 | 89 | Qt简单视频播放器,带音视频同步 90 | 91 | ### 10.20.video_decode_add_filter_display_by_qwidget 92 | 93 | 使用CPU解码视频,并添加滤镜,然后使用QWidget显示画面 94 | 95 | ### 10.21.video_demuxer_mp42h264mp3 96 | 97 | 将mp4分解为h264和mp3 98 | 99 | ### 10.22.video_demuxer_mp42yuvpcm 100 | 101 | 将mp4分解为h264和mp3,并在此基础上将h264解码为yuv,将mp3解码为pcm 102 | 103 | ### 10.23.video_muxer_mp3h2642mp4 104 | 105 | 将h264和mp3合并为mp4 106 | 107 | ### RTSParser 108 | 109 | 收RTSP流,并解析流中的H264数据 110 | 111 | ### 待添加 112 | 113 | 本系列的目的就是将雷霄华同志的教程进行新版本适配,其在CSDN发布的所有文章涉及的代码都会进行适配 -------------------------------------------------------------------------------- /10.20.video_decode_add_filter_display_by_qwidget/ffmpegwidget.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGWIDGET_H 2 | #define FFMPEGWIDGET_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include 12 | 13 | extern "C"{ 14 | #include 15 | 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | #include 26 | #include 27 | #include 28 | 29 | #include 30 | 31 | #include 32 | } 33 | 34 | using namespace std; 35 | 36 | class FFmpegVideo : public QThread 37 | { 38 | Q_OBJECT 39 | public: 40 | explicit FFmpegVideo(); 41 | ~FFmpegVideo(); 42 | 43 | void init_variables(); 44 | void free_variables(); 45 | 46 | void setUrl(QString url); 47 | bool open_input_file(); 48 | 49 | bool initFilter(); 50 | 51 | void setCL(int c,int l); 52 | 53 | protected: 54 | void run(); 55 | 56 | signals: 57 | void sendQImage(QImage); 58 | 59 | private: 60 | AVFormatContext *fmtCtx =NULL; 61 | const AVCodec *videoCodec =NULL; 62 | AVCodecContext *videoCodecCtx=NULL; 63 | AVPacket *pkt = NULL; 64 | AVFrame *yuvFrame = NULL; 65 | AVFrame *rgbFrame = NULL; 66 | 67 | AVFilterContext *bufSinkCtx; 68 | AVFilterContext *bufSrcCtx; 69 | AVFilterGraph *filterGraph; 70 | 71 | QString filterDescr="eq=contrast=1:brightness=0"; 72 | QString _url; 73 | 74 | struct SwsContext *img_ctx=NULL; 75 | 76 | unsigned char *out_buffer=nullptr; 77 | 78 | int videoWidth=0,videoHeight=0; 79 | 80 | int videoStreamIndex =-1; 81 | int numBytes = -1; 82 | 83 | bool runFlag=true; 84 | int ret=-1; 85 | }; 86 | 87 | class FFmpegWidget : public QWidget 88 | { 89 | Q_OBJECT 90 | public: 91 | explicit FFmpegWidget(QWidget *parent = nullptr); 92 | ~FFmpegWidget(); 93 | 94 | void play(QString url); 95 | void stop(); 96 | 97 | void setFilterDescr(int c,int b); 98 | 99 | protected: 100 | void paintEvent(QPaintEvent *); 101 | 102 | private slots: 103 | void receiveQImage(const QImage &rImg); 104 | 105 | private: 106 | FFmpegVideo *ffmpeg; 107 | 108 | QImage img; 109 | }; 110 | 111 | #endif // FFMPEGWIDGET_H 112 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/main.qml: -------------------------------------------------------------------------------- 1 | import QtQuick 2.11 2 | import QtQuick.Window 2.11 3 | import QtQuick.Dialogs.qml 1.0 4 | import QtQuick.Extras 1.4 5 | import QtMultimedia 5.9 6 | import QtQuick.Controls 2.3 7 | import QtQuick.Layouts 1.3 8 | import VideoItem 1.0 9 | 10 | Window { 11 | objectName: "mainWindow" 12 | width: 1280 13 | height: 720 14 | visible: true 15 | title: qsTr("Mpplayer") 16 | 17 | property string url: textField.text //地址变量 18 | 19 | Item{ 20 | id:root 21 | anchors.fill: parent 22 | VideoItem{//视频模块 23 | id: videoitem 24 | anchors.fill: parent 25 | } 26 | 27 | Label { 28 | id: label 29 | x: 24 30 | y: 372 31 | width: 51 32 | height: 31 33 | text: qsTr("Url") 34 | transformOrigin: Item.Center 35 | } 36 | 37 | TextField { 38 | id: textField 39 | x: 81 40 | y: 363 41 | width: 507 42 | height: 40 43 | text: qsTr("C:\\Users\\hyper\\Videos\\Sample.wmv") 44 | } 45 | 46 | Button { 47 | id: button 48 | x: 24 49 | y: 416 50 | text: qsTr("Play1") 51 | 52 | onClicked: { 53 | videoitem.setUrl(url) 54 | console.log(url) 55 | videoitem.start() 56 | } 57 | } 58 | 59 | Button { 60 | id: button1 61 | x: 139 62 | y: 416 63 | text: qsTr("Play2") 64 | onClicked: { 65 | videoitem.setUrl(url) 66 | console.log(url) 67 | videoitem.start() 68 | } 69 | } 70 | 71 | Button { 72 | id: button2 73 | x: 258 74 | y: 416 75 | text: qsTr("Play3") 76 | onClicked: { 77 | videoitem.setUrl(url) 78 | console.log(url) 79 | videoitem.start() 80 | } 81 | } 82 | 83 | Button { 84 | id: button3 85 | x: 390 86 | y: 416 87 | text: qsTr("Play4") 88 | onClicked: { 89 | videoitem.setUrl(url) 90 | console.log(url) 91 | videoitem.start() 92 | } 93 | } 94 | 95 | Button { 96 | id: button4 97 | x: 515 98 | y: 416 99 | text: qsTr("Stop") 100 | onClicked: { 101 | videoitem.stop(); 102 | } 103 | } 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /10.10.1.video_decode_by_cuda_display_by_qwidget/ffmpegvideo.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGVIDEO_H 2 | #define FFMPEGVIDEO_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include 12 | 13 | extern "C"{ 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | } 30 | 31 | using namespace std; 32 | 33 | class FFmpegVideo : public QThread 34 | { 35 | Q_OBJECT 36 | public: 37 | explicit FFmpegVideo(); 38 | ~FFmpegVideo(); 39 | 40 | void setPath(QString url); 41 | 42 | void ffmpeg_init_variables(); 43 | void ffmpeg_free_variables(); 44 | bool open_input_file(); 45 | static enum AVPixelFormat get_hw_format(AVCodecContext *ctx, 46 | const enum AVPixelFormat *pix_fmts); 47 | static int hw_decoder_init(AVCodecContext *ctx, const enum AVHWDeviceType type); 48 | 49 | void stopThread(); 50 | 51 | protected: 52 | void run(); 53 | 54 | signals: 55 | void sendQImage(const QImage &img); 56 | 57 | private: 58 | AVFormatContext *fmtCtx =NULL; 59 | const AVCodec *videoCodec =NULL; 60 | AVCodecContext *videoCodecCtx=NULL; 61 | AVPacket *pkt = NULL; 62 | AVFrame *yuvFrame = NULL; 63 | AVFrame *rgbFrame = NULL; 64 | AVFrame *nv12Frame = NULL; 65 | AVStream *videoStream = NULL; 66 | 67 | uchar *out_buffer; 68 | struct SwsContext *img_ctx=NULL; 69 | 70 | QString _filePath; 71 | 72 | int videoStreamIndex =-1; 73 | int numBytes = -1; 74 | 75 | int ret =0; 76 | 77 | bool initFlag=false,openFlag=false,stopFlag=false; 78 | }; 79 | 80 | 81 | class FFmpegWidget : public QWidget 82 | { 83 | Q_OBJECT 84 | public: 85 | explicit FFmpegWidget(QWidget *parent = nullptr); 86 | ~FFmpegWidget(); 87 | 88 | void play(QString url); 89 | void stop(); 90 | 91 | protected: 92 | void paintEvent(QPaintEvent *); 93 | 94 | private slots: 95 | void receiveQImage(const QImage &rImg); 96 | 97 | private: 98 | FFmpegVideo *ffmpeg; 99 | 100 | QImage img; 101 | }; 102 | 103 | #endif // FFMPEGVIDEO_H 104 | -------------------------------------------------------------------------------- /10.10.2.video_decode_by_cuda_display_by_qopengl/ffmpegvideo.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGVIDEO_H 2 | #define FFMPEGVIDEO_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include 12 | 13 | extern "C"{ 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | } 30 | 31 | using namespace std; 32 | 33 | class FFmpegVideo : public QThread 34 | { 35 | Q_OBJECT 36 | public: 37 | explicit FFmpegVideo(); 38 | ~FFmpegVideo(); 39 | 40 | void setPath(QString url); 41 | 42 | void ffmpeg_init_variables(); 43 | void ffmpeg_free_variables(); 44 | bool open_input_file(); 45 | static enum AVPixelFormat get_hw_format(AVCodecContext *ctx, 46 | const enum AVPixelFormat *pix_fmts); 47 | static int hw_decoder_init(AVCodecContext *ctx, const enum AVHWDeviceType type); 48 | 49 | void stopThread(); 50 | 51 | protected: 52 | void run(); 53 | 54 | signals: 55 | void sendQImage(const QImage &img); 56 | 57 | private: 58 | AVFormatContext *fmtCtx =NULL; 59 | const AVCodec *videoCodec =NULL; 60 | AVCodecContext *videoCodecCtx=NULL; 61 | AVPacket *pkt = NULL; 62 | AVFrame *yuvFrame = NULL; 63 | AVFrame *rgbFrame = NULL; 64 | AVFrame *nv12Frame = NULL; 65 | AVStream *videoStream = NULL; 66 | 67 | uchar *out_buffer; 68 | struct SwsContext *img_ctx=NULL; 69 | 70 | QString _filePath; 71 | 72 | int videoStreamIndex =-1; 73 | int numBytes = -1; 74 | 75 | int ret =0; 76 | 77 | bool initFlag=false,openFlag=false,stopFlag=false; 78 | }; 79 | 80 | 81 | class FFmpegWidget : public QWidget 82 | { 83 | Q_OBJECT 84 | public: 85 | explicit FFmpegWidget(QWidget *parent = nullptr); 86 | ~FFmpegWidget(); 87 | 88 | void play(QString url); 89 | void stop(); 90 | 91 | protected: 92 | void paintEvent(QPaintEvent *); 93 | 94 | private slots: 95 | void receiveQImage(const QImage &rImg); 96 | 97 | private: 98 | FFmpegVideo *ffmpeg; 99 | 100 | QImage img; 101 | }; 102 | 103 | #endif // FFMPEGVIDEO_H 104 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/videoitem.cpp: -------------------------------------------------------------------------------- 1 | #include "videoitem.h" 2 | #include "i420render.h" 3 | #include 4 | #include 5 | 6 | //************VideoItemRender************// 7 | class VideoFboItem : public QQuickFramebufferObject::Renderer 8 | { 9 | public: 10 | VideoFboItem(){ 11 | m_render.init(); 12 | } 13 | 14 | void render() override{ 15 | m_render.paint(); 16 | m_window->resetOpenGLState(); 17 | } 18 | QOpenGLFramebufferObject *createFramebufferObject(const QSize &size) override{ 19 | QOpenGLFramebufferObjectFormat format; 20 | format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil); 21 | format.setSamples(4); 22 | m_render.resize(size.width(), size.height()); 23 | return new QOpenGLFramebufferObject(size, format); 24 | } 25 | void synchronize(QQuickFramebufferObject *item) override{ 26 | VideoItem *pItem = qobject_cast(item); 27 | if (pItem) 28 | { 29 | if (!m_window) 30 | { 31 | m_window = pItem->window(); 32 | } 33 | if (pItem->infoDirty()) 34 | { 35 | m_render.updateTextureInfo(pItem->videoWidth(), pItem->videoHeght()); 36 | pItem->makeInfoDirty(false); 37 | } 38 | ba = pItem->getFrame(); 39 | m_render.updateTextureData(ba); 40 | } 41 | } 42 | private: 43 | I420Render m_render; 44 | QQuickWindow *m_window = nullptr; 45 | 46 | YUVData ba; 47 | }; 48 | 49 | //************VideoItem************// 50 | VideoItem::VideoItem(QQuickItem *parent) : QQuickFramebufferObject (parent) 51 | { 52 | m_decoder = new FFmpegDecoder; 53 | connect(m_decoder,&FFmpegDecoder::videoInfoReady,this,&VideoItem::onVideoInfoReady); 54 | 55 | startTimer(24); 56 | } 57 | 58 | void VideoItem::timerEvent(QTimerEvent *) 59 | { 60 | update(); 61 | } 62 | 63 | YUVData VideoItem::getFrame() 64 | { 65 | return m_decoder->getFrame(); 66 | } 67 | 68 | void VideoItem::setUrl(const QString &url) 69 | { 70 | m_decoder->setUrl(url); 71 | } 72 | 73 | void VideoItem::start() 74 | { 75 | m_decoder->start(); 76 | } 77 | 78 | void VideoItem::stop() 79 | { 80 | if(m_decoder->isRunning()){ 81 | m_decoder->quit(); 82 | m_decoder->wait(1000); 83 | } 84 | } 85 | 86 | void VideoItem::onVideoInfoReady(int width, int height) 87 | { 88 | if (m_videoWidth != width) 89 | { 90 | m_videoWidth = width; 91 | makeInfoDirty(true); 92 | } 93 | if (m_videoHeight != height) 94 | { 95 | m_videoHeight = height; 96 | makeInfoDirty(true); 97 | } 98 | } 99 | 100 | QQuickFramebufferObject::Renderer *VideoItem::createRenderer() const 101 | { 102 | return new VideoFboItem; 103 | } 104 | 105 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/videoitem.cpp: -------------------------------------------------------------------------------- 1 | #include "videoitem.h" 2 | #include 3 | #include 4 | 5 | #include "nv12render.h" 6 | 7 | class VideoFboItem 8 | :public QQuickFramebufferObject::Renderer 9 | { 10 | public: 11 | /// 12 | /// \brief VideoFboItem 构造函数,调用成员变量的构造函数 13 | /// 14 | VideoFboItem() 15 | { 16 | //qDebug()<<"VideoFboItem"; 17 | } 18 | 19 | /// 20 | /// \brief ~VideoFboItem 析构函数,释放资源 21 | /// 22 | ~VideoFboItem() override{ 23 | } 24 | 25 | /// 26 | /// \brief synchronize 数据同步函数,获取界面解码后的帧数据和宽度高度 27 | /// \param item 父界面的对象指针 28 | /// 29 | void synchronize(QQuickFramebufferObject *item) override{ 30 | VideoItem* pItem= dynamic_cast(item); 31 | if(pItem){ 32 | //qDebug()<<"synchroinze: "<getFrame(&ptr,&videoW,&videoH); 34 | } 35 | } 36 | 37 | /// 38 | /// \brief render 调用OpenGL的纹理绘制函数绘制图片到界面 39 | /// 40 | void render() override{ 41 | //qDebug()<<"render: "<stop(); 74 | delete m_timer; 75 | stop(); 76 | delete m_decoder; 77 | } 78 | 79 | QQuickFramebufferObject::Renderer *VideoItem::createRenderer() const 80 | { 81 | //qDebug()<<"Renderer"; 82 | return new VideoFboItem; 83 | } 84 | 85 | void VideoItem::getFrame(uchar **ptr, int *w, int *h) 86 | { 87 | //qDebug()<<"get frame ptr: "<getFrame(); 89 | *w = (int)m_decoder->width(); 90 | *h = (int)m_decoder->height(); 91 | 92 | return; 93 | } 94 | 95 | void VideoItem::setUrl(QString url) 96 | { 97 | if(m_url != url){ 98 | stop(); 99 | } 100 | 101 | m_url=url; 102 | m_decoder->setUrl(url); 103 | } 104 | 105 | void VideoItem::start() 106 | { 107 | stop(); 108 | m_decoder->start(); 109 | } 110 | 111 | void VideoItem::stop() 112 | { 113 | if(m_decoder->isRunning()){ 114 | m_decoder->requestInterruption(); 115 | m_decoder->quit(); 116 | m_decoder->wait(); 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /10.19.audio_video_sync/ffmpegwidget.h: -------------------------------------------------------------------------------- 1 | #ifndef FFMPEGWIDGET_H 2 | #define FFMPEGWIDGET_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | #include 20 | 21 | extern "C"{ 22 | #include 23 | 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | 33 | #include 34 | 35 | #include 36 | #include "libavutil/avutil.h" 37 | #include "libswresample/swresample.h" 38 | #include "libpostproc/postprocess.h" 39 | } 40 | 41 | using namespace std; 42 | 43 | #define MAX_AUDIO_FRAME_SIZE 192000 44 | 45 | class FFmpegAudio : public QThread{ 46 | Q_OBJECT 47 | public: 48 | explicit FFmpegAudio(); 49 | ~FFmpegAudio(); 50 | 51 | void setUrl(QString url); 52 | 53 | bool open_input_file(); 54 | 55 | protected: 56 | void run(); 57 | 58 | private: 59 | AVFormatContext *fmtCtx =NULL; 60 | const AVCodec *audioCodec =NULL; 61 | AVCodecContext *audioCodecCtx=NULL; 62 | AVPacket *pkt = NULL; 63 | AVFrame *audioFrame = NULL; 64 | 65 | struct SwrContext *swr_ctx=NULL; 66 | 67 | uint8_t *audio_out_buffer=nullptr; 68 | 69 | int out_channels; 70 | int out_sample_rate; 71 | enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16; 72 | 73 | int audioStreamIndex=-1; 74 | int numBytes = -1; 75 | 76 | QAudioOutput *audioOutput; 77 | QIODevice *streamOut; 78 | 79 | QString _url; 80 | }; 81 | 82 | class FFmpegVideo : public QThread 83 | { 84 | Q_OBJECT 85 | public: 86 | explicit FFmpegVideo(); 87 | ~FFmpegVideo(); 88 | 89 | void setUrl(QString url); 90 | 91 | bool open_input_file(); 92 | 93 | protected: 94 | void run(); 95 | 96 | signals: 97 | void sendQImage(QImage); 98 | 99 | private: 100 | AVFormatContext *fmtCtx =NULL; 101 | const AVCodec *videoCodec =NULL; 102 | AVCodecContext *videoCodecCtx=NULL; 103 | AVPacket *pkt = NULL; 104 | AVFrame *yuvFrame = NULL; 105 | AVFrame *rgbFrame = NULL; 106 | 107 | struct SwsContext *img_ctx=NULL; 108 | 109 | unsigned char *out_buffer=nullptr; 110 | 111 | int videoStreamIndex =-1; 112 | int numBytes = -1; 113 | 114 | QString _url; 115 | }; 116 | 117 | class FFmpegWidget : public QWidget 118 | { 119 | Q_OBJECT 120 | public: 121 | explicit FFmpegWidget(QWidget *parent = nullptr); 122 | ~FFmpegWidget(); 123 | 124 | void setUrl(QString url); 125 | 126 | void play(); 127 | void stop(); 128 | 129 | protected: 130 | void paintEvent(QPaintEvent *); 131 | 132 | private slots: 133 | void receiveQImage(const QImage &rImg); 134 | 135 | private: 136 | FFmpegAudio *fa; 137 | FFmpegVideo *ffmpeg; 138 | 139 | QImage img; 140 | }; 141 | 142 | #endif // FFMPEGWIDGET_H 143 | -------------------------------------------------------------------------------- /10.20.video_decode_add_filter_display_by_qwidget/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 1126 10 | 604 11 | 12 | 13 | 14 | MainWindow 15 | 16 | 17 | 18 | 19 | 1 20 | 21 | 22 | 1 23 | 24 | 25 | 1 26 | 27 | 28 | 1 29 | 30 | 31 | 0 32 | 33 | 34 | 35 | 36 | Contrast 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | Stop 47 | 48 | 49 | 50 | 51 | 52 | 53 | Play 54 | 55 | 56 | 57 | 58 | 59 | 60 | C:\\Users\\hyper\\Videos\\Sample.mkv 61 | 62 | 63 | 64 | 65 | 66 | 67 | 10 68 | 69 | 70 | 5 71 | 72 | 73 | 74 | 75 | 76 | 77 | Lightness 78 | 79 | 80 | 81 | 82 | 83 | 84 | 10 85 | 86 | 87 | 5 88 | 89 | 90 | 91 | 92 | 93 | 94 | Qt::Horizontal 95 | 96 | 97 | 98 | 40 99 | 20 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | FFmpegWidget 111 | QWidget 112 |
ffmpegwidget.h
113 | 1 114 |
115 |
116 | 117 | 118 |
119 | -------------------------------------------------------------------------------- /RTSParser/rtspdata.h: -------------------------------------------------------------------------------- 1 | #ifndef RTSPDATA_H 2 | #define RTSPDATA_H 3 | 4 | #include 5 | #include 6 | #include 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include // gethostbyname 15 | #include // htons/ntohs 16 | 17 | #define RTSP_DEFAULT_PORT (554) 18 | #define RTSP_TIMEOUT (3000) // milliseconds 19 | 20 | typedef struct RtspCntHeader { 21 | uint8_t magic; // 0x24 22 | uint8_t channel; 23 | uint16_t length; 24 | uint8_t payload[0]; // >> RtpHeader 25 | } RtspCntHeader_st; 26 | 27 | typedef struct RtpCntHeader { 28 | #if 0 29 | uint8_t version:2; 30 | uint8_t padding:1; 31 | uint8_t externsion:1; 32 | uint8_t CSrcId:4; 33 | uint8_t marker:1; 34 | uint8_t pt:7; // payload type 35 | #else 36 | uint8_t exts; 37 | uint8_t type; 38 | #endif 39 | uint16_t seqNo; // Sequence number 40 | uint32_t ts; // Timestamp 41 | uint32_t SyncSrcId; // Synchronization source (SSRC) identifier 42 | // Contributing source (SSRC_n) identifier 43 | uint8_t payload[0]; // Frame data 44 | } RtpCntHeader_st; 45 | 46 | #define RTSP_VERSIION "RTSP/1.0" 47 | #define RTSP_SESSION_NAME "Session:" 48 | #define RTSP_USERAGENT "User-Agent: Darkise rtsp player 1.0\r\n" 49 | 50 | class RTSPData : public QThread 51 | { 52 | Q_OBJECT 53 | public: 54 | RTSPData(); 55 | // Init RTSP environment 56 | int rtspInit(char const* url); 57 | 58 | // Is the RTSP client started, if not try to start it 59 | int isStart(); 60 | int rtsp_read(); 61 | // Try to get a packet from socket 62 | int rtsp_packet(); 63 | 64 | /// Operations: 65 | // RTSP operations 66 | int options(int to); 67 | int describe(int to); 68 | int setup(int to); 69 | int play(int to); 70 | int get_params(int to); 71 | int teardown(int to); 72 | int _setup_interleaved(int to); 73 | int _set_range(int to); 74 | // 75 | int _send_request(char const* req, int size); 76 | int _wait_response(int to, char resp[], size_t size); 77 | int _parse_session(char const* resp); // SETUP 78 | int _parse_sdp(char const* resp); // DESCRIBE 79 | 80 | // Rtp content operations 81 | int rtsp_init(); 82 | 83 | protected: 84 | void run(); 85 | 86 | private: 87 | // Command handler 88 | char _url[256]; 89 | char host[64]; 90 | int port = RTSP_DEFAULT_PORT; 91 | int rtspSocket = -1; 92 | int rtspTimeout = RTSP_TIMEOUT; 93 | // RTSP control sequeue number 94 | int CSeq; 95 | // Use only UDP protocol 96 | int clientPort[2] = { 37477, 37478 }; 97 | int serverPort[2]; 98 | // Get from SETUP's response, PLAY and TEARDOWN need it 99 | char sessionId[32]; 100 | /* Media attributes: 101 | * a=x-dimensions:1920,1080 102 | * a=control:rtsp://192.168.199.30:554/h264/ch1/main/av_stream/trackID=1 103 | * a=rtpmap:96 H264/90000 104 | * a=fmtp:96 profile-level-id=420029; packetization-mode=1; sprop-parameter-sets=Z00AKY2NQDwBE/LCAAAOEAACvyAI,aO44gA== 105 | * a=Media_header:MEDIAINFO=494D4B48010100000400000100000000000000000000000000000000000000000000000000000000; 106 | * Concerned only 107 | */ 108 | int video_width, video_height; 109 | char control[256]; 110 | 111 | int rtp_size = 16*1024*1024; 112 | uint8_t* rtp_content; // Buffer for rtp 113 | int rtp_read, rtp_write; 114 | 115 | uint8_t *packet_buffer; 116 | uint32_t packet_wpos; 117 | }; 118 | 119 | #endif // RTSPDATA_H 120 | -------------------------------------------------------------------------------- /10.14.audio_decode_mp32pcm/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "libavcodec/avcodec.h" 4 | #include "libavfilter/avfilter.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavutil/avutil.h" 7 | #include "libavutil/ffversion.h" 8 | #include "libswresample/swresample.h" 9 | #include "libswscale/swscale.h" 10 | #include "libpostproc/postprocess.h" 11 | 12 | int main() 13 | { 14 | const char inFileName[] = "C:\\Users\\hyper\\Music\\Sample.mp3"; 15 | const char outFileName[] = "test.pcm"; 16 | FILE *file=fopen(outFileName,"w+b"); 17 | if(!file){ 18 | printf("Cannot open output file.\n"); 19 | return -1; 20 | } 21 | 22 | AVFormatContext *fmtCtx =avformat_alloc_context(); 23 | AVCodecContext *codecCtx = NULL; 24 | AVPacket *pkt=av_packet_alloc(); 25 | AVFrame *frame = av_frame_alloc(); 26 | 27 | int aStreamIndex = -1; 28 | 29 | do{ 30 | 31 | if(avformat_open_input(&fmtCtx,inFileName,NULL,NULL)<0){ 32 | printf("Cannot open input file.\n"); 33 | break; 34 | } 35 | if(avformat_find_stream_info(fmtCtx,NULL)<0){ 36 | printf("Cannot find any stream in file.\n"); 37 | break; 38 | } 39 | 40 | av_dump_format(fmtCtx,0,inFileName,0); 41 | 42 | for(size_t i=0;inb_streams;i++){ 43 | if(fmtCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO){ 44 | aStreamIndex=(int)i; 45 | break; 46 | } 47 | } 48 | if(aStreamIndex==-1){ 49 | printf("Cannot find audio stream.\n"); 50 | break; 51 | } 52 | 53 | AVCodecParameters *aCodecPara = fmtCtx->streams[aStreamIndex]->codecpar; 54 | const AVCodec *codec = avcodec_find_decoder(aCodecPara->codec_id); 55 | if(!codec){ 56 | printf("Cannot find any codec for audio.\n"); 57 | break; 58 | } 59 | codecCtx = avcodec_alloc_context3(codec); 60 | if(avcodec_parameters_to_context(codecCtx,aCodecPara)<0){ 61 | printf("Cannot alloc codec context.\n"); 62 | break; 63 | } 64 | codecCtx->pkt_timebase = fmtCtx->streams[aStreamIndex]->time_base; 65 | 66 | if(avcodec_open2(codecCtx,codec,NULL)<0){ 67 | printf("Cannot open audio codec.\n"); 68 | break; 69 | } 70 | 71 | while(av_read_frame(fmtCtx,pkt)>=0){ 72 | if(pkt->stream_index==aStreamIndex){ 73 | if(avcodec_send_packet(codecCtx,pkt)>=0){ 74 | while(avcodec_receive_frame(codecCtx,frame)>=0){ 75 | /* 76 | Planar(平面),其数据格式排列方式为 (特别记住,该处是以点nb_samples采样点来交错,不是以字节交错): 77 | LLLLLLRRRRRRLLLLLLRRRRRRLLLLLLRRRRRRL...(每个LLLLLLRRRRRR为一个音频帧) 78 | 而不带P的数据格式(即交错排列)排列方式为: 79 | LRLRLRLRLRLRLRLRLRLRLRLRLRLRLRLRLRLRL...(每个LR为一个音频样本) 80 | */ 81 | if(av_sample_fmt_is_planar(codecCtx->sample_fmt)){ 82 | int numBytes =av_get_bytes_per_sample(codecCtx->sample_fmt); 83 | //pcm播放时是LRLRLR格式,所以要交错保存数据 84 | for(int i=0;inb_samples;i++){ 85 | for(int ch=0;chchannels;ch++){ 86 | fwrite((char*)frame->data[ch]+numBytes*i,1,numBytes,file); 87 | } 88 | } 89 | } 90 | } 91 | } 92 | } 93 | av_packet_unref(pkt); 94 | } 95 | }while(0); 96 | 97 | av_frame_free(&frame); 98 | av_packet_free(&pkt); 99 | avcodec_close(codecCtx); 100 | avcodec_free_context(&codecCtx); 101 | avformat_free_context(fmtCtx); 102 | 103 | fclose(file); 104 | 105 | return 0; 106 | } 107 | -------------------------------------------------------------------------------- /10.04.video_decode_flow/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include "libavcodec/avcodec.h" 4 | #include "libavfilter/avfilter.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavutil/avutil.h" 7 | #include "libavutil/ffversion.h" 8 | #include "libswresample/swresample.h" 9 | #include "libswscale/swscale.h" 10 | #include "libpostproc/postprocess.h" 11 | 12 | int main() { 13 | char filePath[] = "C:\\Users\\hyper\\Videos\\Sample.wmv";//文件地址 14 | int videoStreamIndex = -1;//视频流所在流序列中的索引 15 | int ret=0;//默认返回值 16 | 17 | //需要的变量名并初始化 18 | AVFormatContext *fmtCtx=NULL; 19 | AVPacket *pkt =NULL; 20 | AVCodecContext *codecCtx=NULL; 21 | AVCodecParameters *avCodecPara=NULL; 22 | const AVCodec *codec=NULL; 23 | 24 | do{ 25 | //=========================== 创建AVFormatContext结构体 ===============================// 26 | //分配一个AVFormatContext,FFMPEG所有的操作都要通过这个AVFormatContext来进行 27 | fmtCtx = avformat_alloc_context(); 28 | //==================================== 打开文件 ======================================// 29 | if ((ret=avformat_open_input(&fmtCtx, filePath, NULL, NULL)) != 0) { 30 | printf("cannot open video file\n"); 31 | break; 32 | } 33 | 34 | //=================================== 获取视频流信息 ===================================// 35 | if ((ret=avformat_find_stream_info(fmtCtx, NULL)) < 0) { 36 | printf("cannot retrive video info\n"); 37 | break; 38 | } 39 | 40 | //循环查找视频中包含的流信息,直到找到视频类型的流 41 | //便将其记录下来 保存到videoStreamIndex变量中 42 | for (unsigned int i = 0; i < fmtCtx->nb_streams; i++) { 43 | if (fmtCtx->streams[ i ]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { 44 | videoStreamIndex = i; 45 | break;//找到视频流就退出 46 | } 47 | } 48 | 49 | //如果videoStream为-1 说明没有找到视频流 50 | if (videoStreamIndex == -1) { 51 | printf("cannot find video stream\n"); 52 | break; 53 | } 54 | 55 | //打印输入和输出信息:长度 比特率 流格式等 56 | av_dump_format(fmtCtx, 0, filePath, 0); 57 | 58 | //================================= 查找解码器 ===================================// 59 | avCodecPara = fmtCtx->streams[ videoStreamIndex ]->codecpar; 60 | codec = avcodec_find_decoder(avCodecPara->codec_id); 61 | if (codec == NULL) { 62 | printf("cannot find decoder\n"); 63 | break; 64 | } 65 | //根据解码器参数来创建解码器内容 66 | codecCtx = avcodec_alloc_context3(codec); 67 | avcodec_parameters_to_context(codecCtx, avCodecPara); 68 | if (codecCtx == NULL) { 69 | printf("Cannot alloc context."); 70 | break; 71 | } 72 | 73 | //================================ 打开解码器 ===================================// 74 | if ((ret=avcodec_open2(codecCtx, codec, NULL)) < 0) { // 具体采用什么解码器ffmpeg经过封装 我们无须知道 75 | printf("cannot open decoder\n"); 76 | break; 77 | } 78 | 79 | //=========================== 分配AVPacket结构体 ===============================// 80 | int i = 0;//用于帧计数 81 | pkt = av_packet_alloc(); //分配一个packet 82 | av_new_packet(pkt, codecCtx->width * codecCtx->height); //调整packet的数据 83 | 84 | //=========================== 读取视频信息 ===============================// 85 | while (av_read_frame(fmtCtx, pkt) >= 0) { //读取的是一帧视频 数据存入一个AVPacket的结构中 86 | if(pkt->stream_index==videoStreamIndex){ 87 | i++;//只计算视频帧 88 | } 89 | av_packet_unref(pkt);//重置pkt的内容 90 | } 91 | printf("There are %d frames int total.\n", i); 92 | }while(0); 93 | //===========================释放所有指针===============================// 94 | av_packet_free(&pkt); 95 | avcodec_close(codecCtx); 96 | avformat_close_input(&fmtCtx); 97 | avformat_free_context(fmtCtx); 98 | 99 | return ret; 100 | } 101 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/nv12render.cpp: -------------------------------------------------------------------------------- 1 | #include "nv12render.h" 2 | 3 | NV12Render::NV12Render() 4 | { 5 | initializeOpenGLFunctions(); 6 | 7 | const char *vsrc = 8 | "attribute vec4 vertexIn; \ 9 | attribute vec4 textureIn; \ 10 | varying vec4 textureOut; \ 11 | void main(void) \ 12 | { \ 13 | gl_Position = vertexIn; \ 14 | textureOut = textureIn; \ 15 | }"; 16 | 17 | const char *fsrc = 18 | "varying mediump vec4 textureOut;\n" 19 | "uniform sampler2D textureY;\n" 20 | "uniform sampler2D textureUV;\n" 21 | "void main(void)\n" 22 | "{\n" 23 | "highp vec3 yuv; \n" 24 | "highp vec3 rgb; \n" 25 | "yuv.x = texture2D(textureY, textureOut.st).r - 0.0625; \n" 26 | "yuv.y = texture2D(textureUV, textureOut.st).r - 0.5; \n" 27 | "yuv.z = texture2D(textureUV, textureOut.st).g - 0.5; \n" 28 | "rgb = mat3( 1, 1, 1, \n" 29 | "0, -0.39465, 2.03211, \n" 30 | "1.13983, -0.58060, 0) * yuv; \n" 31 | "gl_FragColor = vec4(rgb, 1); \n" 32 | "}\n"; 33 | 34 | program.addCacheableShaderFromSourceCode(QOpenGLShader::Vertex,vsrc); 35 | program.addCacheableShaderFromSourceCode(QOpenGLShader::Fragment,fsrc); 36 | program.link(); 37 | 38 | GLfloat points[]{ 39 | -1.0f,1.0f, 40 | -1.0f,-1.0f, 41 | 1.0f,1.0f, 42 | 1.0f,-1.0f, 43 | 44 | 0.0f,1.0f, 45 | 0.0f,0.0f, 46 | 1.0f,1.0f, 47 | 1.0f,0.0f 48 | }; 49 | 50 | vbo.create(); 51 | vbo.bind(); 52 | vbo.allocate(points,sizeof(points)); 53 | 54 | GLuint ids[2]; 55 | glGenTextures(2,ids); 56 | idY = ids[0]; 57 | idUV = ids[1]; 58 | } 59 | 60 | void NV12Render::render(uchar *p, int width, int height) 61 | { 62 | if(p==nullptr) { 63 | //qDebug()<<"No data in buffer."; 64 | return; 65 | } 66 | 67 | //qDebug()<<"opengl render: "<> 1,height >> 1,0,GL_RG,GL_UNSIGNED_BYTE,p + width*height); 90 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 91 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 92 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 93 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 94 | 95 | program.setUniformValue("textureUV",0); 96 | program.setUniformValue("textureY",1); 97 | glDrawArrays(GL_TRIANGLE_STRIP,0,4); 98 | program.disableAttributeArray("vertexIn"); 99 | program.disableAttributeArray("textureIn"); 100 | program.release(); 101 | 102 | //qDebug()<<"opengl render finished: "<nb_streams; 53 | for(int i=0;istreams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){ 55 | videoStreamIndex = i; 56 | continue; 57 | } 58 | } 59 | 60 | if(videoStreamIndex==-1){ 61 | printf("Cannot find video stream in file %s.\n",_url.toLocal8Bit().data()); 62 | return; 63 | } 64 | 65 | AVCodecParameters *videoCodecPara = fmtCtx->streams[videoStreamIndex]->codecpar; 66 | 67 | if(!(videoCodec = avcodec_find_decoder(videoCodecPara->codec_id))){ 68 | printf("Cannot find valid decode codec.\n"); 69 | return; 70 | } 71 | 72 | if(!(videoCodecCtx = avcodec_alloc_context3(videoCodec))){ 73 | printf("Cannot find valid decode codec context.\n"); 74 | return; 75 | } 76 | 77 | if(avcodec_parameters_to_context(videoCodecCtx,videoCodecPara)<0){ 78 | printf("Cannot initialize parameters.\n"); 79 | return; 80 | } 81 | if(avcodec_open2(videoCodecCtx,videoCodec,NULL)<0){ 82 | printf("Cannot open codec.\n"); 83 | return; 84 | } 85 | 86 | w = videoCodecCtx->width; 87 | h = videoCodecCtx->height; 88 | emit videoInfoReady(w,h); 89 | 90 | while(av_read_frame(fmtCtx,pkt)>=0){ 91 | if(pkt->stream_index == videoStreamIndex){ 92 | if(avcodec_send_packet(videoCodecCtx,pkt)>=0){ 93 | int ret; 94 | while((ret=avcodec_receive_frame(videoCodecCtx,yuvFrame))>=0){ 95 | if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) 96 | return; 97 | else if (ret < 0) { 98 | fprintf(stderr, "Error during decoding\n"); 99 | continue; 100 | } 101 | 102 | m_yuvData.Y.resize(yuvFrame->linesize[0]*yuvFrame->height); 103 | m_yuvData.Y =QByteArray((char*)yuvFrame->data[0],m_yuvData.Y.size()); 104 | m_yuvData.U.resize(yuvFrame->linesize[1]*yuvFrame->height/2); 105 | m_yuvData.U =QByteArray((char*)yuvFrame->data[1],m_yuvData.Y.size()/4); 106 | m_yuvData.V.resize(yuvFrame->linesize[2]*yuvFrame->height/2); 107 | m_yuvData.V =QByteArray((char*)yuvFrame->data[2],m_yuvData.Y.size()/4); 108 | m_yuvData.yLineSize = yuvFrame->linesize[0]; 109 | m_yuvData.uLineSize = yuvFrame->linesize[1]; 110 | m_yuvData.vLineSize = yuvFrame->linesize[2]; 111 | m_yuvData.height = yuvFrame->height; 112 | 113 | frameBuffer.append(m_yuvData); 114 | 115 | QThread::msleep(33); 116 | } 117 | } 118 | av_packet_unref(pkt); 119 | } 120 | } 121 | } 122 | #endif 123 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/ffmpegdecoder.cpp: -------------------------------------------------------------------------------- 1 | #ifndef __rasp_pi2__ 2 | 3 | #include "ffmpegdecoder.h" 4 | 5 | FFmpegDecoder::FFmpegDecoder() 6 | { 7 | fmtCtx = avformat_alloc_context(); 8 | pkt = av_packet_alloc(); 9 | yuvFrame = av_frame_alloc(); 10 | rgbFrame = av_frame_alloc(); 11 | } 12 | 13 | FFmpegDecoder::~FFmpegDecoder() 14 | { 15 | if(!pkt) av_packet_free(&pkt); 16 | if(!yuvFrame) av_frame_free(&yuvFrame); 17 | if(!rgbFrame) av_frame_free(&rgbFrame); 18 | if(!videoCodecCtx) avcodec_free_context(&videoCodecCtx); 19 | if(!videoCodecCtx) avcodec_close(videoCodecCtx); 20 | if(!fmtCtx) avformat_close_input(&fmtCtx); 21 | } 22 | 23 | void FFmpegDecoder::setUrl(const QString url) 24 | { 25 | _url= url; 26 | } 27 | 28 | int FFmpegDecoder::width() 29 | { 30 | return w; 31 | } 32 | 33 | int FFmpegDecoder::height() 34 | { 35 | return h; 36 | } 37 | 38 | void FFmpegDecoder::run() 39 | { 40 | if(avformat_open_input(&fmtCtx,_url.toLocal8Bit().data(),NULL,NULL)<0){ 41 | printf("Cannot open input file.\n"); 42 | return; 43 | } 44 | 45 | if(avformat_find_stream_info(fmtCtx,NULL)<0){ 46 | printf("Cannot find any stream in file.\n"); 47 | return; 48 | } 49 | 50 | int streamCnt=fmtCtx->nb_streams; 51 | for(int i=0;istreams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){ 53 | videoStreamIndex = i; 54 | continue; 55 | } 56 | } 57 | 58 | if(videoStreamIndex==-1){ 59 | printf("Cannot find video stream in file %s.\n",_url.toLocal8Bit().data()); 60 | return; 61 | } 62 | 63 | AVCodecParameters *videoCodecPara = fmtCtx->streams[videoStreamIndex]->codecpar; 64 | 65 | if(!(videoCodec = avcodec_find_decoder(videoCodecPara->codec_id))){ 66 | printf("Cannot find valid decode codec.\n"); 67 | return; 68 | } 69 | 70 | if(!(videoCodecCtx = avcodec_alloc_context3(videoCodec))){ 71 | printf("Cannot find valid decode codec context.\n"); 72 | return; 73 | } 74 | 75 | if(avcodec_parameters_to_context(videoCodecCtx,videoCodecPara)<0){ 76 | printf("Cannot initialize parameters.\n"); 77 | return; 78 | } 79 | if(avcodec_open2(videoCodecCtx,videoCodec,NULL)<0){ 80 | printf("Cannot open codec.\n"); 81 | return; 82 | } 83 | 84 | w = videoCodecCtx->width; 85 | h = videoCodecCtx->height; 86 | 87 | numBytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,w,h,1); 88 | out_buffer = (unsigned char *)av_malloc(numBytes*sizeof(uchar)); 89 | 90 | while(av_read_frame(fmtCtx,pkt)>=0){ 91 | if(pkt->stream_index == videoStreamIndex){ 92 | if(avcodec_send_packet(videoCodecCtx,pkt)>=0){ 93 | int ret; 94 | while((ret=avcodec_receive_frame(videoCodecCtx,yuvFrame))>=0){ 95 | if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) 96 | return; 97 | else if (ret < 0) { 98 | fprintf(stderr, "Error during decoding\n"); 99 | continue; 100 | } 101 | 102 | if(isFirst){ 103 | isFirst=false; 104 | emit sigFirst(out_buffer,w,h); 105 | } 106 | 107 | int bytes =0; 108 | for(int i=0;idata[0]+yuvFrame->linesize[0]*i,w); 110 | bytes+=w; 111 | } 112 | 113 | int u=h>>1; 114 | for(int i=0;idata[1]+yuvFrame->linesize[1]*i,w/2); 116 | bytes+=w/2; 117 | } 118 | 119 | for(int i=0;idata[2]+yuvFrame->linesize[2]*i,w/2); 121 | bytes+=w/2; 122 | } 123 | 124 | emit newFrame(); 125 | 126 | QThread::msleep(24); 127 | } 128 | } 129 | av_packet_unref(pkt); 130 | } 131 | } 132 | } 133 | #endif 134 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/ffmpegdecoder.cpp: -------------------------------------------------------------------------------- 1 | #ifndef __rasp_pi2__ 2 | 3 | #include "ffmpegdecoder.h" 4 | 5 | FFmpegDecoder::FFmpegDecoder() 6 | { 7 | fmtCtx = avformat_alloc_context(); 8 | pkt = av_packet_alloc(); 9 | yuvFrame = av_frame_alloc(); 10 | rgbFrame = av_frame_alloc(); 11 | } 12 | 13 | FFmpegDecoder::~FFmpegDecoder() 14 | { 15 | if(!pkt) av_packet_free(&pkt); 16 | if(!yuvFrame) av_frame_free(&yuvFrame); 17 | if(!rgbFrame) av_frame_free(&rgbFrame); 18 | if(!videoCodecCtx) avcodec_free_context(&videoCodecCtx); 19 | if(!videoCodecCtx) avcodec_close(videoCodecCtx); 20 | if(!fmtCtx) avformat_close_input(&fmtCtx); 21 | } 22 | 23 | void FFmpegDecoder::setUrl(const QString url) 24 | { 25 | _url= url; 26 | } 27 | 28 | int FFmpegDecoder::width() 29 | { 30 | return w; 31 | } 32 | 33 | int FFmpegDecoder::height() 34 | { 35 | return h; 36 | } 37 | 38 | void FFmpegDecoder::run() 39 | { 40 | if(avformat_open_input(&fmtCtx,_url.toLocal8Bit().data(),NULL,NULL)<0){ 41 | printf("Cannot open input file.\n"); 42 | return; 43 | } 44 | 45 | if(avformat_find_stream_info(fmtCtx,NULL)<0){ 46 | printf("Cannot find any stream in file.\n"); 47 | return; 48 | } 49 | 50 | int streamCnt=fmtCtx->nb_streams; 51 | for(int i=0;istreams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){ 53 | videoStreamIndex = i; 54 | continue; 55 | } 56 | } 57 | 58 | if(videoStreamIndex==-1){ 59 | printf("Cannot find video stream in file %s.\n",_url.toLocal8Bit().data()); 60 | return; 61 | } 62 | 63 | AVCodecParameters *videoCodecPara = fmtCtx->streams[videoStreamIndex]->codecpar; 64 | 65 | if(!(videoCodec = avcodec_find_decoder(videoCodecPara->codec_id))){ 66 | printf("Cannot find valid decode codec.\n"); 67 | return; 68 | } 69 | 70 | if(!(videoCodecCtx = avcodec_alloc_context3(videoCodec))){ 71 | printf("Cannot find valid decode codec context.\n"); 72 | return; 73 | } 74 | 75 | if(avcodec_parameters_to_context(videoCodecCtx,videoCodecPara)<0){ 76 | printf("Cannot initialize parameters.\n"); 77 | return; 78 | } 79 | if(avcodec_open2(videoCodecCtx,videoCodec,NULL)<0){ 80 | printf("Cannot open codec.\n"); 81 | return; 82 | } 83 | 84 | w = videoCodecCtx->width; 85 | h = videoCodecCtx->height; 86 | 87 | numBytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,w,h,1); 88 | out_buffer = (unsigned char *)av_malloc(numBytes*sizeof(uchar)); 89 | 90 | while(av_read_frame(fmtCtx,pkt)>=0){ 91 | if(pkt->stream_index == videoStreamIndex){ 92 | if(avcodec_send_packet(videoCodecCtx,pkt)>=0){ 93 | int ret; 94 | while((ret=avcodec_receive_frame(videoCodecCtx,yuvFrame))>=0){ 95 | if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) 96 | return; 97 | else if (ret < 0) { 98 | fprintf(stderr, "Error during decoding\n"); 99 | continue; 100 | } 101 | 102 | if(isFirst){ 103 | isFirst=false; 104 | emit sigFirst(out_buffer,w,h); 105 | } 106 | 107 | int bytes =0; 108 | for(int i=0;idata[0]+yuvFrame->linesize[0]*i,w); 110 | bytes+=w; 111 | } 112 | 113 | int u=h>>1; 114 | for(int i=0;idata[1]+yuvFrame->linesize[1]*i,w/2); 116 | bytes+=w/2; 117 | } 118 | 119 | for(int i=0;idata[2]+yuvFrame->linesize[2]*i,w/2); 121 | bytes+=w/2; 122 | } 123 | 124 | emit newFrame(); 125 | 126 | QThread::msleep(24); 127 | } 128 | } 129 | av_packet_unref(pkt); 130 | } 131 | } 132 | } 133 | #endif 134 | -------------------------------------------------------------------------------- /10.06.1.video_decode_mp42yuv420p/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include "libavcodec/avcodec.h" 4 | #include "libavfilter/avfilter.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavutil/avutil.h" 7 | #include "libavutil/ffversion.h" 8 | #include "libswresample/swresample.h" 9 | #include "libswscale/swscale.h" 10 | #include "libpostproc/postprocess.h" 11 | 12 | int main() { 13 | FILE *fp=fopen("result.yuv","w+b"); 14 | if(fp==NULL){ 15 | printf("Cannot open file.\n"); 16 | return -1; 17 | } 18 | 19 | char filePath[] = "C:\\Users\\hyper\\Videos\\Sample.wmv";//文件地址 20 | int videoStreamIndex = -1;//视频流所在流序列中的索引 21 | int ret=0;//默认返回值 22 | 23 | //需要的变量名并初始化 24 | AVFormatContext *fmtCtx=NULL; 25 | AVPacket *pkt =NULL; 26 | AVCodecContext *codecCtx=NULL; 27 | AVCodecParameters *avCodecPara=NULL; 28 | const AVCodec *codec=NULL; 29 | AVFrame *yuvFrame = av_frame_alloc(); 30 | 31 | do{ 32 | //=========================== 创建AVFormatContext结构体 ===============================// 33 | //分配一个AVFormatContext,FFMPEG所有的操作都要通过这个AVFormatContext来进行 34 | fmtCtx = avformat_alloc_context(); 35 | //==================================== 打开文件 ======================================// 36 | if ((ret=avformat_open_input(&fmtCtx, filePath, NULL, NULL)) != 0) { 37 | printf("cannot open video file\n"); 38 | break; 39 | } 40 | 41 | //=================================== 获取视频流信息 ===================================// 42 | if ((ret=avformat_find_stream_info(fmtCtx, NULL)) < 0) { 43 | printf("cannot retrive video info\n"); 44 | break; 45 | } 46 | 47 | //循环查找视频中包含的流信息,直到找到视频类型的流 48 | //便将其记录下来 保存到videoStreamIndex变量中 49 | for (unsigned int i = 0; i < fmtCtx->nb_streams; i++) { 50 | if (fmtCtx->streams[ i ]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { 51 | videoStreamIndex = i; 52 | break;//找到视频流就退出 53 | } 54 | } 55 | 56 | //如果videoStream为-1 说明没有找到视频流 57 | if (videoStreamIndex == -1) { 58 | printf("cannot find video stream\n"); 59 | break; 60 | } 61 | 62 | //打印输入和输出信息:长度 比特率 流格式等 63 | av_dump_format(fmtCtx, 0, filePath, 0); 64 | 65 | //================================= 查找解码器 ===================================// 66 | avCodecPara = fmtCtx->streams[ videoStreamIndex ]->codecpar; 67 | codec = avcodec_find_decoder(avCodecPara->codec_id); 68 | if (codec == NULL) { 69 | printf("cannot find decoder\n"); 70 | break; 71 | } 72 | //根据解码器参数来创建解码器内容 73 | codecCtx = avcodec_alloc_context3(codec); 74 | avcodec_parameters_to_context(codecCtx, avCodecPara); 75 | if (codecCtx == NULL) { 76 | printf("Cannot alloc context."); 77 | break; 78 | } 79 | 80 | //================================ 打开解码器 ===================================// 81 | if ((ret=avcodec_open2(codecCtx, codec, NULL)) < 0) { // 具体采用什么解码器ffmpeg经过封装 我们无须知道 82 | printf("cannot open decoder\n"); 83 | break; 84 | } 85 | 86 | int w=codecCtx->width;//视频宽度 87 | int h=codecCtx->height;//视频高度 88 | 89 | //=========================== 分配AVPacket结构体 ===============================// 90 | pkt = av_packet_alloc(); //分配一个packet 91 | av_new_packet(pkt, codecCtx->width * codecCtx->height); //调整packet的数据 92 | 93 | //=========================== 读取视频信息 ===============================// 94 | int frameCnt = 0;//帧数 95 | while (av_read_frame(fmtCtx, pkt) >= 0) { //读取的是一帧视频 数据存入一个AVPacket的结构中 96 | if (pkt->stream_index == videoStreamIndex){ 97 | if (avcodec_send_packet(codecCtx, pkt) == 0){ 98 | while (avcodec_receive_frame(codecCtx, yuvFrame) == 0){ 99 | fwrite(yuvFrame->data[0],1,w*h,fp);//y 100 | fwrite(yuvFrame->data[1],1,w*h/4,fp);//u 101 | fwrite(yuvFrame->data[2],1,w*h/4,fp);//v 102 | 103 | printf("save frame %d to file.\n",frameCnt++); 104 | fflush(fp); 105 | } 106 | } 107 | } 108 | av_packet_unref(pkt);//重置pkt的内容 109 | } 110 | }while(0); 111 | //===========================释放所有指针===============================// 112 | av_packet_free(&pkt); 113 | avcodec_close(codecCtx); 114 | avformat_close_input(&fmtCtx); 115 | avformat_free_context(fmtCtx); 116 | av_frame_free(&yuvFrame); 117 | 118 | return ret; 119 | } 120 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/i420render2.cpp: -------------------------------------------------------------------------------- 1 | #include "i420render2.h" 2 | 3 | I420Render2::I420Render2(QWidget *parent) 4 | :QOpenGLWidget(parent) 5 | { 6 | decoder = new FFmpegDecoder; 7 | connect(decoder,&FFmpegDecoder::sigFirst,[=](uchar* p,int w,int h){ 8 | ptr = p; 9 | width = w; 10 | height = h; 11 | }); 12 | connect(decoder,&FFmpegDecoder::newFrame,[=](){ 13 | update(); 14 | }); 15 | } 16 | 17 | I420Render2::~I420Render2() 18 | { 19 | 20 | } 21 | 22 | void I420Render2::setUrl(QString url) 23 | { 24 | decoder->setUrl(url); 25 | } 26 | 27 | void I420Render2::startVideo() 28 | { 29 | decoder->start(); 30 | } 31 | 32 | void I420Render2::initializeGL() 33 | { 34 | initializeOpenGLFunctions(); 35 | const char *vsrc = 36 | "attribute vec4 vertexIn; \ 37 | attribute vec4 textureIn; \ 38 | varying vec4 textureOut; \ 39 | void main(void) \ 40 | { \ 41 | gl_Position = vertexIn; \ 42 | textureOut = textureIn; \ 43 | }"; 44 | 45 | const char *fsrc = 46 | "varying vec4 textureOut;\n" 47 | "uniform sampler2D textureY;\n" 48 | "uniform sampler2D textureU;\n" 49 | "uniform sampler2D textureV;\n" 50 | "void main(void)\n" 51 | "{\n" 52 | "vec3 yuv; \n" 53 | "vec3 rgb; \n" 54 | "yuv.x = texture2D(textureY, textureOut.st).r; \n" 55 | "yuv.y = texture2D(textureU, textureOut.st).r - 0.5; \n" 56 | "yuv.z = texture2D(textureV, textureOut.st).r - 0.5; \n" 57 | "rgb = mat3( 1, 1, 1, \n" 58 | "0, -0.39465, 2.03211, \n" 59 | "1.13983, -0.58060, 0) * yuv; \n" 60 | "gl_FragColor = vec4(rgb, 1); \n" 61 | "}\n"; 62 | 63 | m_program.addCacheableShaderFromSourceCode(QOpenGLShader::Vertex,vsrc); 64 | m_program.addCacheableShaderFromSourceCode(QOpenGLShader::Fragment,fsrc); 65 | m_program.link(); 66 | 67 | GLfloat points[]{ 68 | -1.0f, 1.0f, 69 | 1.0f, 1.0f, 70 | 1.0f, -1.0f, 71 | -1.0f, -1.0f, 72 | 73 | 0.0f,0.0f, 74 | 1.0f,0.0f, 75 | 1.0f,1.0f, 76 | 0.0f,1.0f 77 | }; 78 | 79 | vbo.create(); 80 | vbo.bind(); 81 | vbo.allocate(points,sizeof(points)); 82 | 83 | GLuint ids[3]; 84 | glGenTextures(3,ids); 85 | idY = ids[0]; 86 | idU = ids[1]; 87 | idV = ids[2]; 88 | } 89 | 90 | void I420Render2::resizeGL(int w, int h) 91 | { 92 | if(h<=0) h=1; 93 | 94 | glViewport(0,0,w,h); 95 | } 96 | 97 | void I420Render2::paintGL() 98 | { 99 | if(!ptr) return; 100 | 101 | glClearColor(1.0f, 1.0f, 1.0f, 1.0f); 102 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 103 | glDisable(GL_DEPTH_TEST); 104 | 105 | m_program.bind(); 106 | vbo.bind(); 107 | m_program.enableAttributeArray("vertexIn"); 108 | m_program.enableAttributeArray("textureIn"); 109 | m_program.setAttributeBuffer("vertexIn",GL_FLOAT, 0, 2, 2*sizeof(GLfloat)); 110 | m_program.setAttributeBuffer("textureIn",GL_FLOAT,2 * 4 * sizeof(GLfloat),2,2*sizeof(GLfloat)); 111 | 112 | glActiveTexture(GL_TEXTURE0); 113 | glBindTexture(GL_TEXTURE_2D,idY); 114 | glTexImage2D(GL_TEXTURE_2D,0,GL_RED,width,height,0,GL_RED,GL_UNSIGNED_BYTE,ptr); 115 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 116 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 117 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 118 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 119 | 120 | glActiveTexture(GL_TEXTURE1); 121 | glBindTexture(GL_TEXTURE_2D,idU); 122 | glTexImage2D(GL_TEXTURE_2D,0,GL_RED,width >> 1,height >> 1,0,GL_RED,GL_UNSIGNED_BYTE,ptr + width*height); 123 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 124 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 125 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 126 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 127 | 128 | glActiveTexture(GL_TEXTURE2); 129 | glBindTexture(GL_TEXTURE_2D,idV); 130 | glTexImage2D(GL_TEXTURE_2D,0,GL_RED,width >> 1,height >> 1,0,GL_RED,GL_UNSIGNED_BYTE,ptr + width*height*5/4); 131 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 132 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 133 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 134 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 135 | 136 | m_program.setUniformValue("textureY",0); 137 | m_program.setUniformValue("textureU",1); 138 | m_program.setUniformValue("textureV",2); 139 | glDrawArrays(GL_QUADS,0,4); 140 | m_program.disableAttributeArray("vertexIn"); 141 | m_program.disableAttributeArray("textureIn"); 142 | m_program.release(); 143 | } 144 | -------------------------------------------------------------------------------- /10.08.2.video_decode_by_cpu_display_by_qopengl/i420render2.cpp: -------------------------------------------------------------------------------- 1 | #include "i420render2.h" 2 | 3 | I420Render2::I420Render2(QWidget *parent) 4 | :QOpenGLWidget(parent) 5 | { 6 | decoder = new FFmpegDecoder; 7 | connect(decoder,&FFmpegDecoder::sigFirst,[=](uchar* p,int w,int h){ 8 | ptr = p; 9 | width = w; 10 | height = h; 11 | }); 12 | connect(decoder,&FFmpegDecoder::newFrame,[=](){ 13 | update(); 14 | }); 15 | } 16 | 17 | I420Render2::~I420Render2() 18 | { 19 | 20 | } 21 | 22 | void I420Render2::setUrl(QString url) 23 | { 24 | decoder->setUrl(url); 25 | } 26 | 27 | void I420Render2::startVideo() 28 | { 29 | decoder->start(); 30 | } 31 | 32 | void I420Render2::initializeGL() 33 | { 34 | initializeOpenGLFunctions(); 35 | const char *vsrc = 36 | "attribute vec4 vertexIn; \ 37 | attribute vec4 textureIn; \ 38 | varying vec4 textureOut; \ 39 | void main(void) \ 40 | { \ 41 | gl_Position = vertexIn; \ 42 | textureOut = textureIn; \ 43 | }"; 44 | 45 | const char *fsrc = 46 | "varying vec4 textureOut;\n" 47 | "uniform sampler2D textureY;\n" 48 | "uniform sampler2D textureU;\n" 49 | "uniform sampler2D textureV;\n" 50 | "void main(void)\n" 51 | "{\n" 52 | "vec3 yuv; \n" 53 | "vec3 rgb; \n" 54 | "yuv.x = texture2D(textureY, textureOut.st).r; \n" 55 | "yuv.y = texture2D(textureU, textureOut.st).r - 0.5; \n" 56 | "yuv.z = texture2D(textureV, textureOut.st).r - 0.5; \n" 57 | "rgb = mat3( 1, 1, 1, \n" 58 | "0, -0.39465, 2.03211, \n" 59 | "1.13983, -0.58060, 0) * yuv; \n" 60 | "gl_FragColor = vec4(rgb, 1); \n" 61 | "}\n"; 62 | 63 | m_program.addCacheableShaderFromSourceCode(QOpenGLShader::Vertex,vsrc); 64 | m_program.addCacheableShaderFromSourceCode(QOpenGLShader::Fragment,fsrc); 65 | m_program.link(); 66 | 67 | GLfloat points[]{ 68 | -1.0f, 1.0f, 69 | 1.0f, 1.0f, 70 | 1.0f, -1.0f, 71 | -1.0f, -1.0f, 72 | 73 | 0.0f,0.0f, 74 | 1.0f,0.0f, 75 | 1.0f,1.0f, 76 | 0.0f,1.0f 77 | }; 78 | 79 | vbo.create(); 80 | vbo.bind(); 81 | vbo.allocate(points,sizeof(points)); 82 | 83 | GLuint ids[3]; 84 | glGenTextures(3,ids); 85 | idY = ids[0]; 86 | idU = ids[1]; 87 | idV = ids[2]; 88 | } 89 | 90 | void I420Render2::resizeGL(int w, int h) 91 | { 92 | if(h<=0) h=1; 93 | 94 | glViewport(0,0,w,h); 95 | } 96 | 97 | void I420Render2::paintGL() 98 | { 99 | if(!ptr) return; 100 | 101 | glClearColor(1.0f, 1.0f, 1.0f, 1.0f); 102 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 103 | glDisable(GL_DEPTH_TEST); 104 | 105 | m_program.bind(); 106 | vbo.bind(); 107 | m_program.enableAttributeArray("vertexIn"); 108 | m_program.enableAttributeArray("textureIn"); 109 | m_program.setAttributeBuffer("vertexIn",GL_FLOAT, 0, 2, 2*sizeof(GLfloat)); 110 | m_program.setAttributeBuffer("textureIn",GL_FLOAT,2 * 4 * sizeof(GLfloat),2,2*sizeof(GLfloat)); 111 | 112 | glActiveTexture(GL_TEXTURE0); 113 | glBindTexture(GL_TEXTURE_2D,idY); 114 | glTexImage2D(GL_TEXTURE_2D,0,GL_RED,width,height,0,GL_RED,GL_UNSIGNED_BYTE,ptr); 115 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 116 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 117 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 118 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 119 | 120 | glActiveTexture(GL_TEXTURE1); 121 | glBindTexture(GL_TEXTURE_2D,idU); 122 | glTexImage2D(GL_TEXTURE_2D,0,GL_RED,width >> 1,height >> 1,0,GL_RED,GL_UNSIGNED_BYTE,ptr + width*height); 123 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 124 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 125 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 126 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 127 | 128 | glActiveTexture(GL_TEXTURE2); 129 | glBindTexture(GL_TEXTURE_2D,idV); 130 | glTexImage2D(GL_TEXTURE_2D,0,GL_RED,width >> 1,height >> 1,0,GL_RED,GL_UNSIGNED_BYTE,ptr + width*height*5/4); 131 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 132 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 133 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 134 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 135 | 136 | m_program.setUniformValue("textureY",0); 137 | m_program.setUniformValue("textureU",1); 138 | m_program.setUniformValue("textureV",2); 139 | glDrawArrays(GL_QUADS,0,4); 140 | m_program.disableAttributeArray("vertexIn"); 141 | m_program.disableAttributeArray("textureIn"); 142 | m_program.release(); 143 | } 144 | -------------------------------------------------------------------------------- /10.09.video_decode_by_cpu_display_by_qml/i420render.cpp: -------------------------------------------------------------------------------- 1 | #include "i420render.h" 2 | 3 | I420Render::I420Render() 4 | { 5 | mTexY = new QOpenGLTexture(QOpenGLTexture::Target2D); 6 | mTexY->setFormat(QOpenGLTexture::LuminanceFormat); 7 | mTexY->setMinificationFilter(QOpenGLTexture::Nearest); 8 | mTexY->setMagnificationFilter(QOpenGLTexture::Nearest); 9 | mTexY->setWrapMode(QOpenGLTexture::ClampToEdge); 10 | 11 | mTexU = new QOpenGLTexture(QOpenGLTexture::Target2D); 12 | mTexU->setFormat(QOpenGLTexture::LuminanceFormat); 13 | mTexU->setMinificationFilter(QOpenGLTexture::Nearest); 14 | mTexU->setMagnificationFilter(QOpenGLTexture::Nearest); 15 | mTexU->setWrapMode(QOpenGLTexture::ClampToEdge); 16 | 17 | mTexV = new QOpenGLTexture(QOpenGLTexture::Target2D); 18 | mTexV->setFormat(QOpenGLTexture::LuminanceFormat); 19 | mTexV->setMinificationFilter(QOpenGLTexture::Nearest); 20 | mTexV->setMagnificationFilter(QOpenGLTexture::Nearest); 21 | mTexV->setWrapMode(QOpenGLTexture::ClampToEdge); 22 | } 23 | 24 | I420Render::~I420Render() 25 | {} 26 | 27 | void I420Render::init() 28 | { 29 | initializeOpenGLFunctions(); 30 | const char *vsrc = 31 | "attribute vec4 vertexIn; \ 32 | attribute vec2 textureIn; \ 33 | varying vec2 textureOut; \ 34 | void main(void) \ 35 | { \ 36 | gl_Position = vertexIn; \ 37 | textureOut = textureIn; \ 38 | }"; 39 | 40 | const char *fsrc = 41 | "varying mediump vec2 textureOut;\n" 42 | "uniform sampler2D textureY;\n" 43 | "uniform sampler2D textureU;\n" 44 | "uniform sampler2D textureV;\n" 45 | "void main(void)\n" 46 | "{\n" 47 | "vec3 yuv; \n" 48 | "vec3 rgb; \n" 49 | "yuv.x = texture2D(textureY, textureOut).r; \n" 50 | "yuv.y = texture2D(textureU, textureOut).r - 0.5; \n" 51 | "yuv.z = texture2D(textureV, textureOut).r - 0.5; \n" 52 | "rgb = mat3( 1, 1, 1, \n" 53 | "0, -0.3455, 1.779, \n" 54 | "1.4075, -0.7169, 0) * yuv; \n" 55 | "gl_FragColor = vec4(rgb, 1); \n" 56 | "}\n"; 57 | 58 | m_program.addCacheableShaderFromSourceCode(QOpenGLShader::Vertex,vsrc); 59 | m_program.addCacheableShaderFromSourceCode(QOpenGLShader::Fragment,fsrc); 60 | m_program.bindAttributeLocation("vertexIn",0); 61 | m_program.bindAttributeLocation("textureIn",1); 62 | m_program.link(); 63 | m_program.bind(); 64 | 65 | vertices << QVector2D(-1.0f,1.0f) 66 | << QVector2D(1.0f,1.0f) 67 | << QVector2D(1.0f,-1.0f) 68 | << QVector2D(-1.0f,-1.0f); 69 | 70 | textures << QVector2D(0.0f,1.f) 71 | << QVector2D(1.0f,1.0f) 72 | << QVector2D(1.0f,0.0f) 73 | << QVector2D(0.0f,0.0f); 74 | } 75 | 76 | void I420Render::updateTextureInfo(int w, int h) 77 | { 78 | mTexY->setSize(w,h); 79 | mTexY->allocateStorage(QOpenGLTexture::Red,QOpenGLTexture::UInt8); 80 | 81 | mTexU->setSize(w/2,h/2); 82 | mTexU->allocateStorage(QOpenGLTexture::Red,QOpenGLTexture::UInt8); 83 | 84 | mTexV->setSize(w/2,h/2); 85 | mTexV->allocateStorage(QOpenGLTexture::Red,QOpenGLTexture::UInt8); 86 | 87 | mTextureAlloced=true; 88 | } 89 | 90 | void I420Render::updateTextureData(const YUVData &data) 91 | { 92 | if(data.Y.size()<=0 || data.U.size()<=0 || data.V.size()<=0) return; 93 | 94 | QOpenGLPixelTransferOptions options; 95 | options.setImageHeight(data.height); 96 | 97 | options.setRowLength(data.yLineSize); 98 | mTexY->setData(QOpenGLTexture::Luminance,QOpenGLTexture::UInt8,data.Y.data(),&options); 99 | 100 | options.setRowLength(data.uLineSize); 101 | mTexU->setData(QOpenGLTexture::Luminance,QOpenGLTexture::UInt8,data.U.data(),&options); 102 | 103 | options.setRowLength(data.vLineSize); 104 | mTexV->setData(QOpenGLTexture::Luminance,QOpenGLTexture::UInt8,data.V.data(),&options); 105 | } 106 | 107 | void I420Render::paint() 108 | { 109 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f); 110 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 111 | glDisable(GL_DEPTH_TEST); 112 | 113 | if(!mTextureAlloced) return; 114 | 115 | m_program.bind(); 116 | m_program.enableAttributeArray("vertexIn"); 117 | m_program.setAttributeArray("vertexIn",vertices.constData()); 118 | m_program.enableAttributeArray("textureIn"); 119 | m_program.setAttributeArray("textureIn",textures.constData()); 120 | 121 | glActiveTexture(GL_TEXTURE0); 122 | mTexY->bind(); 123 | 124 | glActiveTexture(GL_TEXTURE1); 125 | mTexU->bind(); 126 | 127 | glActiveTexture(GL_TEXTURE2); 128 | mTexV->bind(); 129 | 130 | m_program.setUniformValue("textureY",0); 131 | m_program.setUniformValue("textureU",1); 132 | m_program.setUniformValue("textureV",2); 133 | glDrawArrays(GL_QUADS,0,4); 134 | m_program.disableAttributeArray("vertexIn"); 135 | m_program.disableAttributeArray("textureIn"); 136 | m_program.release(); 137 | } 138 | 139 | void I420Render::resize(int w,int h) 140 | { 141 | glViewport(0,0,w,h); 142 | } 143 | -------------------------------------------------------------------------------- /10.07.video_decode_by_cpu_display_by_qwidget/ffmpegwidget.cpp: -------------------------------------------------------------------------------- 1 | #include "ffmpegwidget.h" 2 | 3 | FFmpegVideo::FFmpegVideo() 4 | { 5 | fmtCtx = avformat_alloc_context(); 6 | pkt = av_packet_alloc(); 7 | yuvFrame = av_frame_alloc(); 8 | rgbFrame = av_frame_alloc(); 9 | } 10 | 11 | FFmpegVideo::~FFmpegVideo() 12 | { 13 | if(!pkt) av_packet_free(&pkt); 14 | if(!yuvFrame) av_frame_free(&yuvFrame); 15 | if(!rgbFrame) av_frame_free(&rgbFrame); 16 | if(!videoCodecCtx) avcodec_free_context(&videoCodecCtx); 17 | if(!videoCodecCtx) avcodec_close(videoCodecCtx); 18 | if(!fmtCtx) avformat_close_input(&fmtCtx); 19 | } 20 | 21 | void FFmpegVideo::setUrl(QString url) 22 | { 23 | _url = url; 24 | } 25 | 26 | bool FFmpegVideo::open_input_file() 27 | { 28 | if(_url.isEmpty()) return 0; 29 | 30 | if(avformat_open_input(&fmtCtx,_url.toLocal8Bit().data(),NULL,NULL)<0){ 31 | printf("Cannot open input file.\n"); 32 | return 0; 33 | } 34 | 35 | if(avformat_find_stream_info(fmtCtx,NULL)<0){ 36 | printf("Cannot find any stream in file.\n"); 37 | return 0; 38 | } 39 | 40 | int streamCnt=fmtCtx->nb_streams; 41 | for(int i=0;istreams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){ 43 | videoStreamIndex = i; 44 | continue; 45 | } 46 | } 47 | 48 | if(videoStreamIndex==-1){ 49 | printf("Cannot find video stream in file.\n"); 50 | return 0; 51 | } 52 | 53 | AVCodecParameters *videoCodecPara = fmtCtx->streams[videoStreamIndex]->codecpar; 54 | 55 | if(!(videoCodec = avcodec_find_decoder(videoCodecPara->codec_id))){ 56 | printf("Cannot find valid decode codec.\n"); 57 | return 0; 58 | } 59 | 60 | if(!(videoCodecCtx = avcodec_alloc_context3(videoCodec))){ 61 | printf("Cannot find valid decode codec context.\n"); 62 | return 0; 63 | } 64 | 65 | if(avcodec_parameters_to_context(videoCodecCtx,videoCodecPara)<0){ 66 | printf("Cannot initialize parameters.\n"); 67 | return 0; 68 | } 69 | if(avcodec_open2(videoCodecCtx,videoCodec,NULL)<0){ 70 | printf("Cannot open codec.\n"); 71 | return 0; 72 | } 73 | img_ctx = sws_getContext(videoCodecCtx->width, 74 | videoCodecCtx->height, 75 | videoCodecCtx->pix_fmt, 76 | videoCodecCtx->width, 77 | videoCodecCtx->height, 78 | AV_PIX_FMT_RGB32, 79 | SWS_BICUBIC,NULL,NULL,NULL); 80 | 81 | numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32,videoCodecCtx->width,videoCodecCtx->height,1); 82 | out_buffer = (unsigned char *)av_malloc(numBytes*sizeof(unsigned char)); 83 | 84 | int res = av_image_fill_arrays( 85 | rgbFrame->data,rgbFrame->linesize, 86 | out_buffer,AV_PIX_FMT_RGB32, 87 | videoCodecCtx->width,videoCodecCtx->height,1); 88 | if(res<0){ 89 | qDebug()<<"Fill arrays failed."; 90 | return 0; 91 | } 92 | 93 | return true; 94 | } 95 | 96 | void FFmpegVideo::run() 97 | { 98 | if(!open_input_file()){ 99 | qDebug()<<"Please open video file first."; 100 | return; 101 | } 102 | 103 | while(av_read_frame(fmtCtx,pkt)>=0){ 104 | if(pkt->stream_index == videoStreamIndex){ 105 | if(avcodec_send_packet(videoCodecCtx,pkt)>=0){ 106 | int ret; 107 | while((ret=avcodec_receive_frame(videoCodecCtx,yuvFrame))>=0){ 108 | if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) 109 | return; 110 | else if (ret < 0) { 111 | fprintf(stderr, "Error during decoding\n"); 112 | exit(1); 113 | } 114 | sws_scale(img_ctx, 115 | yuvFrame->data,yuvFrame->linesize, 116 | 0,videoCodecCtx->height, 117 | rgbFrame->data,rgbFrame->linesize); 118 | 119 | QImage img(out_buffer, 120 | videoCodecCtx->width,videoCodecCtx->height, 121 | QImage::Format_RGB32); 122 | emit sendQImage(img); 123 | QThread::msleep(30); 124 | } 125 | } 126 | av_packet_unref(pkt); 127 | } 128 | } 129 | 130 | qDebug()<<"All video play done"; 131 | } 132 | 133 | FFmpegWidget::FFmpegWidget(QWidget *parent) : QWidget(parent) 134 | { 135 | ffmpeg = new FFmpegVideo; 136 | connect(ffmpeg,SIGNAL(sendQImage(QImage)),this,SLOT(receiveQImage(QImage))); 137 | connect(ffmpeg,&FFmpegVideo::finished,ffmpeg,&FFmpegVideo::deleteLater); 138 | } 139 | 140 | FFmpegWidget::~FFmpegWidget() 141 | { 142 | if(ffmpeg->isRunning()){ 143 | stop(); 144 | } 145 | } 146 | 147 | void FFmpegWidget::setUrl(QString url) 148 | { 149 | ffmpeg->setUrl(url); 150 | } 151 | 152 | void FFmpegWidget::play() 153 | { 154 | stop(); 155 | ffmpeg->start(); 156 | } 157 | 158 | void FFmpegWidget::stop() 159 | { 160 | if(ffmpeg->isRunning()){ 161 | ffmpeg->requestInterruption(); 162 | ffmpeg->quit(); 163 | ffmpeg->wait(100); 164 | } 165 | img.fill(Qt::black); 166 | } 167 | 168 | void FFmpegWidget::paintEvent(QPaintEvent *) 169 | { 170 | QPainter painter(this); 171 | painter.drawImage(0,0,img); 172 | } 173 | 174 | void FFmpegWidget::receiveQImage(const QImage &rImg) 175 | { 176 | img = rImg.scaled(this->size()); 177 | update(); 178 | } 179 | -------------------------------------------------------------------------------- /10.08.1.video_decode_by_cpu_display_by_qopengl/i420render.cpp: -------------------------------------------------------------------------------- 1 | #include "i420render.h" 2 | 3 | #define ATTRIB_VERTEX 0 4 | #define ATTRIB_TEXTURE 1 5 | 6 | I420Render::I420Render(QWidget *parent) 7 | :QOpenGLWidget(parent) 8 | { 9 | decoder = new FFmpegDecoder; 10 | connect(decoder,&FFmpegDecoder::sigFirst,[=](uchar* p,int w,int h){ 11 | ptr = p; 12 | width = w; 13 | height = h; 14 | }); 15 | connect(decoder,&FFmpegDecoder::newFrame,[=](){ 16 | update(); 17 | }); 18 | } 19 | 20 | I420Render::~I420Render() 21 | { 22 | } 23 | 24 | void I420Render::setUrl(QString url) 25 | { 26 | decoder->setUrl(url); 27 | } 28 | 29 | void I420Render::startVideo() 30 | { 31 | decoder->start(); 32 | } 33 | 34 | void I420Render::initializeGL() 35 | { 36 | qDebug() << "initializeGL"; 37 | 38 | //初始化opengl (QOpenGLFunctions继承)函数 39 | initializeOpenGLFunctions(); 40 | 41 | //顶点shader 42 | const char *vString = 43 | "attribute vec4 vertexPosition;\ 44 | attribute vec2 textureCoordinate;\ 45 | varying vec2 texture_Out;\ 46 | void main(void)\ 47 | {\ 48 | gl_Position = vertexPosition;\ 49 | texture_Out = textureCoordinate;\ 50 | }"; 51 | //片元shader 52 | const char *tString = 53 | "varying vec2 texture_Out;\ 54 | uniform sampler2D tex_y;\ 55 | uniform sampler2D tex_u;\ 56 | uniform sampler2D tex_v;\ 57 | void main(void)\ 58 | {\ 59 | vec3 YUV;\ 60 | vec3 RGB;\ 61 | YUV.x = texture2D(tex_y, texture_Out).r;\ 62 | YUV.y = texture2D(tex_u, texture_Out).r - 0.5;\ 63 | YUV.z = texture2D(tex_v, texture_Out).r - 0.5;\ 64 | RGB = mat3(1.0, 1.0, 1.0,\ 65 | 0.0, -0.39465, 2.03211,\ 66 | 1.13983, -0.58060, 0.0) * YUV;\ 67 | gl_FragColor = vec4(RGB, 1.0);\ 68 | }"; 69 | 70 | //m_program加载shader(顶点和片元)脚本 71 | //片元(像素) 72 | qDebug()<setUrl(url); 27 | } 28 | 29 | void I420Render::startVideo() 30 | { 31 | decoder->start(); 32 | } 33 | 34 | void I420Render::initializeGL() 35 | { 36 | qDebug() << "initializeGL"; 37 | 38 | //初始化opengl (QOpenGLFunctions继承)函数 39 | initializeOpenGLFunctions(); 40 | 41 | //顶点shader 42 | const char *vString = 43 | "attribute vec4 vertexPosition;\ 44 | attribute vec2 textureCoordinate;\ 45 | varying vec2 texture_Out;\ 46 | void main(void)\ 47 | {\ 48 | gl_Position = vertexPosition;\ 49 | texture_Out = textureCoordinate;\ 50 | }"; 51 | //片元shader 52 | const char *tString = 53 | "varying vec2 texture_Out;\ 54 | uniform sampler2D tex_y;\ 55 | uniform sampler2D tex_u;\ 56 | uniform sampler2D tex_v;\ 57 | void main(void)\ 58 | {\ 59 | vec3 YUV;\ 60 | vec3 RGB;\ 61 | YUV.x = texture2D(tex_y, texture_Out).r;\ 62 | YUV.y = texture2D(tex_u, texture_Out).r - 0.5;\ 63 | YUV.z = texture2D(tex_v, texture_Out).r - 0.5;\ 64 | RGB = mat3(1.0, 1.0, 1.0,\ 65 | 0.0, -0.39465, 2.03211,\ 66 | 1.13983, -0.58060, 0.0) * YUV;\ 67 | gl_FragColor = vec4(RGB, 1.0);\ 68 | }"; 69 | 70 | //m_program加载shader(顶点和片元)脚本 71 | //片元(像素) 72 | qDebug()< 2 | 3 | #include "libavcodec/avcodec.h" 4 | #include "libavfilter/avfilter.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavutil/avutil.h" 7 | #include "libavutil/ffversion.h" 8 | #include "libswresample/swresample.h" 9 | #include "libswscale/swscale.h" 10 | #include "libpostproc/postprocess.h" 11 | 12 | #define MAX_AUDIO_FRAME_SIZE 192000 13 | 14 | int main() 15 | { 16 | const char inFileName[] = "C:\\Users\\hyper\\Music\\Sample.mp3"; 17 | const char outFileName[] = "test.pcm"; 18 | FILE *file=fopen(outFileName,"w+b"); 19 | if(!file){ 20 | printf("Cannot open output file.\n"); 21 | return -1; 22 | } 23 | 24 | AVFormatContext *fmtCtx =avformat_alloc_context(); 25 | AVCodecContext *codecCtx = NULL; 26 | AVPacket *pkt=av_packet_alloc(); 27 | AVFrame *frame = av_frame_alloc(); 28 | 29 | int aStreamIndex = -1; 30 | 31 | do{ 32 | if(avformat_open_input(&fmtCtx,inFileName,NULL,NULL)<0){ 33 | printf("Cannot open input file.\n"); 34 | break; 35 | } 36 | if(avformat_find_stream_info(fmtCtx,NULL)<0){ 37 | printf("Cannot find any stream in file.\n"); 38 | break; 39 | } 40 | 41 | av_dump_format(fmtCtx,0,inFileName,0); 42 | 43 | for(size_t i=0;inb_streams;i++){ 44 | if(fmtCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO){ 45 | aStreamIndex=(int)i; 46 | break; 47 | } 48 | } 49 | if(aStreamIndex==-1){ 50 | printf("Cannot find audio stream.\n"); 51 | break; 52 | } 53 | 54 | AVCodecParameters *aCodecPara = fmtCtx->streams[aStreamIndex]->codecpar; 55 | const AVCodec *codec = avcodec_find_decoder(aCodecPara->codec_id); 56 | if(!codec){ 57 | printf("Cannot find any codec for audio.\n"); 58 | break; 59 | } 60 | codecCtx = avcodec_alloc_context3(codec); 61 | if(avcodec_parameters_to_context(codecCtx,aCodecPara)<0){ 62 | printf("Cannot alloc codec context.\n"); 63 | break; 64 | } 65 | codecCtx->pkt_timebase=fmtCtx->streams[aStreamIndex]->time_base; 66 | 67 | if(avcodec_open2(codecCtx,codec,NULL)<0){ 68 | printf("Cannot open audio codec.\n"); 69 | break; 70 | } 71 | 72 | //设置转码参数 73 | uint64_t out_channel_layout = codecCtx->channel_layout; 74 | enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16; 75 | int out_sample_rate = codecCtx->sample_rate; 76 | int out_channels = av_get_channel_layout_nb_channels(out_channel_layout); 77 | 78 | uint8_t *audio_out_buffer = (uint8_t*)av_malloc(MAX_AUDIO_FRAME_SIZE*2); 79 | 80 | SwrContext *swr_ctx = swr_alloc_set_opts(NULL, 81 | out_channel_layout, 82 | out_sample_fmt, 83 | out_sample_rate, 84 | codecCtx->channel_layout, 85 | codecCtx->sample_fmt, 86 | codecCtx->sample_rate, 87 | 0,NULL); 88 | swr_init(swr_ctx); 89 | 90 | while(av_read_frame(fmtCtx,pkt)>=0){ 91 | if(pkt->stream_index==aStreamIndex){ 92 | if(avcodec_send_packet(codecCtx,pkt)>=0){ 93 | while(avcodec_receive_frame(codecCtx,frame)>=0){ 94 | /* 95 | Planar(平面),其数据格式排列方式为 (特别记住,该处是以点nb_samples采样点来交错,不是以字节交错): 96 | LLLLLLRRRRRRLLLLLLRRRRRRLLLLLLRRRRRRL...(每个LLLLLLRRRRRR为一个音频帧) 97 | 而不带P的数据格式(即交错排列)排列方式为: 98 | LRLRLRLRLRLRLRLRLRLRLRLRLRLRLRLRLRLRL...(每个LR为一个音频样本) 99 | */ 100 | if(av_sample_fmt_is_planar(codecCtx->sample_fmt)){ 101 | int len = swr_convert(swr_ctx, 102 | &audio_out_buffer, 103 | MAX_AUDIO_FRAME_SIZE*2, 104 | (const uint8_t**)frame->data, 105 | frame->nb_samples); 106 | if(len<=0){ 107 | continue; 108 | } 109 | 110 | int dst_bufsize = av_samples_get_buffer_size(0, 111 | out_channels, 112 | len, 113 | out_sample_fmt, 114 | 1); 115 | 116 | //int numBytes =av_get_bytes_per_sample(out_sample_fmt); 117 | //printf("number bytes is: %d.\n",numBytes); 118 | 119 | fwrite(audio_out_buffer,1,dst_bufsize,file); 120 | 121 | //pcm播放时是LRLRLR格式,所以要交错保存数据 122 | // for(int i=0;inb_samples;i++){ 123 | // for(int ch=0;ch<2;ch++){ 124 | // fwrite((char*)audio_out_buffer[ch]+numBytes*i,1,numBytes,file); 125 | // } 126 | // } 127 | } 128 | } 129 | } 130 | } 131 | av_packet_unref(pkt); 132 | } 133 | }while(0); 134 | 135 | av_frame_free(&frame); 136 | av_packet_free(&pkt); 137 | avcodec_close(codecCtx); 138 | avcodec_free_context(&codecCtx); 139 | avformat_free_context(fmtCtx); 140 | 141 | fclose(file); 142 | 143 | return 0; 144 | } 145 | -------------------------------------------------------------------------------- /10.06.2.video_decode_mp42yuv420sp/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include "libavcodec/avcodec.h" 4 | #include "libavfilter/avfilter.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavutil/avutil.h" 7 | #include "libavutil/ffversion.h" 8 | #include "libavutil/imgutils.h" 9 | #include "libswresample/swresample.h" 10 | #include "libswscale/swscale.h" 11 | #include "libpostproc/postprocess.h" 12 | 13 | int main() { 14 | FILE *fp=fopen("result.yuv","w+b"); 15 | if(fp==NULL){ 16 | printf("Cannot open file.\n"); 17 | return -1; 18 | } 19 | 20 | char filePath[] = "C:\\Users\\hyper\\Videos\\Sample.wmv";//文件地址 21 | int videoStreamIndex = -1;//视频流所在流序列中的索引 22 | int ret=0;//默认返回值 23 | 24 | //需要的变量名并初始化 25 | AVFormatContext *fmtCtx=NULL; 26 | AVPacket *pkt =NULL; 27 | AVCodecContext *codecCtx=NULL; 28 | AVCodecParameters *avCodecPara=NULL; 29 | const AVCodec *codec=NULL; 30 | AVFrame *yuvFrame = av_frame_alloc(); 31 | AVFrame *nv12Frame = av_frame_alloc(); 32 | 33 | unsigned char *out_buffer=NULL; 34 | 35 | do{ 36 | //=========================== 创建AVFormatContext结构体 ===============================// 37 | //分配一个AVFormatContext,FFMPEG所有的操作都要通过这个AVFormatContext来进行 38 | fmtCtx = avformat_alloc_context(); 39 | //==================================== 打开文件 ======================================// 40 | if ((ret=avformat_open_input(&fmtCtx, filePath, NULL, NULL)) != 0) { 41 | printf("cannot open video file\n"); 42 | break; 43 | } 44 | 45 | //=================================== 获取视频流信息 ===================================// 46 | if ((ret=avformat_find_stream_info(fmtCtx, NULL)) < 0) { 47 | printf("cannot retrive video info\n"); 48 | break; 49 | } 50 | 51 | //循环查找视频中包含的流信息,直到找到视频类型的流 52 | //便将其记录下来 保存到videoStreamIndex变量中 53 | for (unsigned int i = 0; i < fmtCtx->nb_streams; i++) { 54 | if (fmtCtx->streams[ i ]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { 55 | videoStreamIndex = i; 56 | break;//找到视频流就退出 57 | } 58 | } 59 | 60 | //如果videoStream为-1 说明没有找到视频流 61 | if (videoStreamIndex == -1) { 62 | printf("cannot find video stream\n"); 63 | break; 64 | } 65 | 66 | //打印输入和输出信息:长度 比特率 流格式等 67 | av_dump_format(fmtCtx, 0, filePath, 0); 68 | 69 | //================================= 查找解码器 ===================================// 70 | avCodecPara = fmtCtx->streams[ videoStreamIndex ]->codecpar; 71 | codec = avcodec_find_decoder(avCodecPara->codec_id); 72 | if (codec == NULL) { 73 | printf("cannot find decoder\n"); 74 | break; 75 | } 76 | //根据解码器参数来创建解码器内容 77 | codecCtx = avcodec_alloc_context3(codec); 78 | avcodec_parameters_to_context(codecCtx, avCodecPara); 79 | if (codecCtx == NULL) { 80 | printf("Cannot alloc context."); 81 | break; 82 | } 83 | 84 | //================================ 打开解码器 ===================================// 85 | if ((ret=avcodec_open2(codecCtx, codec, NULL)) < 0) { // 具体采用什么解码器ffmpeg经过封装 我们无须知道 86 | printf("cannot open decoder\n"); 87 | break; 88 | } 89 | 90 | int w=codecCtx->width;//视频宽度 91 | int h=codecCtx->height;//视频高度 92 | 93 | //================================ 设置数据转换参数 ================================// 94 | struct SwsContext *img_ctx = sws_getContext( 95 | codecCtx->width, codecCtx->height, codecCtx->pix_fmt, //源地址长宽以及数据格式 96 | codecCtx->width, codecCtx->height, AV_PIX_FMT_NV12, //目的地址长宽以及数据格式 97 | SWS_BICUBIC, NULL, NULL, NULL); //算法类型 AV_PIX_FMT_YUVJ420P AV_PIX_FMT_BGR24 98 | 99 | //==================================== 分配空间 ==================================// 100 | //一帧图像数据大小 101 | int numBytes = av_image_get_buffer_size(AV_PIX_FMT_NV12, codecCtx->width, codecCtx->height, 1); 102 | out_buffer = (unsigned char *)av_malloc(numBytes * sizeof(unsigned char)); 103 | 104 | //=========================== 分配AVPacket结构体 ===============================// 105 | pkt = av_packet_alloc(); //分配一个packet 106 | av_new_packet(pkt, codecCtx->width * codecCtx->height); //调整packet的数据 107 | //会将pFrameRGB的数据按RGB格式自动"关联"到buffer 即nv12Frame中的数据改变了 108 | //out_buffer中的数据也会相应的改变 109 | av_image_fill_arrays(nv12Frame->data, nv12Frame->linesize, out_buffer, AV_PIX_FMT_NV12, 110 | codecCtx->width, codecCtx->height, 1); 111 | 112 | //=========================== 读取视频信息 ===============================// 113 | int frameCnt = 0;//帧数 114 | while (av_read_frame(fmtCtx, pkt) >= 0) { //读取的是一帧视频 数据存入一个AVPacket的结构中 115 | if (pkt->stream_index == videoStreamIndex){ 116 | if (avcodec_send_packet(codecCtx, pkt) == 0){ 117 | while (avcodec_receive_frame(codecCtx, yuvFrame) == 0){ 118 | sws_scale(img_ctx, 119 | (const uint8_t* const*)yuvFrame->data, 120 | yuvFrame->linesize, 121 | 0, 122 | h, 123 | nv12Frame->data, 124 | nv12Frame->linesize); 125 | fwrite(nv12Frame->data[0],1,w*h,fp);//y 126 | fwrite(nv12Frame->data[1],1,w*h/2,fp);//uv 127 | 128 | printf("save frame %d to file.\n",frameCnt++); 129 | fflush(fp); 130 | } 131 | } 132 | } 133 | av_packet_unref(pkt);//重置pkt的内容 134 | } 135 | }while(0); 136 | //===========================释放所有指针===============================// 137 | av_packet_free(&pkt); 138 | avcodec_close(codecCtx); 139 | avformat_close_input(&fmtCtx); 140 | avformat_free_context(fmtCtx); 141 | av_frame_free(&yuvFrame); 142 | av_frame_free(&nv12Frame); 143 | 144 | av_free(out_buffer); 145 | 146 | return ret; 147 | } 148 | -------------------------------------------------------------------------------- /10.05.video_decode_frame_save/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include "libavcodec/avcodec.h" 4 | #include "libavfilter/avfilter.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavutil/avutil.h" 7 | #include "libavutil/ffversion.h" 8 | #include "libswresample/swresample.h" 9 | #include "libswscale/swscale.h" 10 | #include "libpostproc/postprocess.h" 11 | #include "libavutil/imgutils.h" 12 | 13 | //将FFmpeg解码后的数据保存到本地文件 14 | void saveFrame(AVFrame *pFrame, int width, int height, int iFrame) 15 | { 16 | FILE *pFile; 17 | char szFilename[32]; 18 | int y; 19 | 20 | // 打开文件 21 | sprintf(szFilename, "frame%d.ppm", iFrame); 22 | pFile = fopen(szFilename, "wb"); 23 | if (pFile == NULL) 24 | return; 25 | 26 | // 写入文件头 27 | fprintf(pFile, "P6\n%d %d\n255\n", width, height); 28 | 29 | // 写入像素数据 30 | for (y = 0; y < height; y++) 31 | fwrite(pFrame->data[0] + y * pFrame->linesize[0], 1, width * 3, pFile); 32 | 33 | // 关闭文件 34 | fclose(pFile); 35 | } 36 | 37 | int main() { 38 | char filePath[] = "C:\\Users\\hyper\\Videos\\Sample.wmv";//文件地址 39 | int videoStreamIndex = -1;//视频流所在流序列中的索引 40 | int ret=0;//默认返回值 41 | 42 | //需要的变量名并初始化 43 | AVFormatContext *fmtCtx=NULL; 44 | AVPacket *pkt =NULL; 45 | AVCodecContext *codecCtx=NULL; 46 | AVCodecParameters *avCodecPara=NULL; 47 | const AVCodec *codec=NULL; 48 | AVFrame *yuvFrame = av_frame_alloc(); 49 | AVFrame *rgbFrame = av_frame_alloc(); 50 | 51 | do{ 52 | //=========================== 创建AVFormatContext结构体 ===============================// 53 | //分配一个AVFormatContext,FFMPEG所有的操作都要通过这个AVFormatContext来进行 54 | fmtCtx = avformat_alloc_context(); 55 | //==================================== 打开文件 ======================================// 56 | if ((ret=avformat_open_input(&fmtCtx, filePath, NULL, NULL)) != 0) { 57 | printf("cannot open video file\n"); 58 | break; 59 | } 60 | 61 | //=================================== 获取视频流信息 ===================================// 62 | if ((ret=avformat_find_stream_info(fmtCtx, NULL)) < 0) { 63 | printf("cannot retrive video info\n"); 64 | break; 65 | } 66 | 67 | //循环查找视频中包含的流信息,直到找到视频类型的流 68 | //便将其记录下来 保存到videoStreamIndex变量中 69 | for (unsigned int i = 0; i < fmtCtx->nb_streams; i++) { 70 | if (fmtCtx->streams[ i ]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { 71 | videoStreamIndex = i; 72 | break;//找到视频流就退出 73 | } 74 | } 75 | 76 | //如果videoStream为-1 说明没有找到视频流 77 | if (videoStreamIndex == -1) { 78 | printf("cannot find video stream\n"); 79 | break; 80 | } 81 | 82 | //打印输入和输出信息:长度 比特率 流格式等 83 | av_dump_format(fmtCtx, 0, filePath, 0); 84 | 85 | //================================= 查找解码器 ===================================// 86 | avCodecPara = fmtCtx->streams[ videoStreamIndex ]->codecpar; 87 | codec = avcodec_find_decoder(avCodecPara->codec_id); 88 | if (codec == NULL) { 89 | printf("cannot find decoder\n"); 90 | break; 91 | } 92 | //根据解码器参数来创建解码器内容 93 | codecCtx = avcodec_alloc_context3(codec); 94 | avcodec_parameters_to_context(codecCtx, avCodecPara); 95 | if (codecCtx == NULL) { 96 | printf("Cannot alloc context."); 97 | break; 98 | } 99 | 100 | //================================ 打开解码器 ===================================// 101 | if ((ret=avcodec_open2(codecCtx, codec, NULL)) < 0) { // 具体采用什么解码器ffmpeg经过封装 我们无须知道 102 | printf("cannot open decoder\n"); 103 | break; 104 | } 105 | 106 | //================================ 设置数据转换参数 ================================// 107 | struct SwsContext *img_ctx = sws_getContext( 108 | codecCtx->width, codecCtx->height, codecCtx->pix_fmt, //源地址长宽以及数据格式 109 | codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB32, //目的地址长宽以及数据格式 110 | SWS_BICUBIC, NULL, NULL, NULL); //算法类型 AV_PIX_FMT_YUVJ420P AV_PIX_FMT_BGR24 111 | 112 | //==================================== 分配空间 ==================================// 113 | //一帧图像数据大小 114 | int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height, 1); 115 | unsigned char *out_buffer = (unsigned char *)av_malloc(numBytes * sizeof(unsigned char)); 116 | 117 | 118 | //=========================== 分配AVPacket结构体 ===============================// 119 | int i = 0;//用于帧计数 120 | pkt = av_packet_alloc(); //分配一个packet 121 | av_new_packet(pkt, codecCtx->width * codecCtx->height); //调整packet的数据 122 | 123 | //会将pFrameRGB的数据按RGB格式自动"关联"到buffer 即pFrameRGB中的数据改变了 124 | //out_buffer中的数据也会相应的改变 125 | av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, out_buffer, AV_PIX_FMT_RGB32, 126 | codecCtx->width, codecCtx->height, 1); 127 | 128 | //=========================== 读取视频信息 ===============================// 129 | while (av_read_frame(fmtCtx, pkt) >= 0) { //读取的是一帧视频 数据存入一个AVPacket的结构中 130 | if (pkt->stream_index == videoStreamIndex){ 131 | if (avcodec_send_packet(codecCtx, pkt) == 0){ 132 | while (avcodec_receive_frame(codecCtx, yuvFrame) == 0){ 133 | if (++i <= 500 && i >= 455){ 134 | sws_scale(img_ctx, 135 | (const uint8_t* const*)yuvFrame->data, 136 | yuvFrame->linesize, 137 | 0, 138 | codecCtx->height, 139 | rgbFrame->data, 140 | rgbFrame->linesize); 141 | saveFrame(rgbFrame, codecCtx->width, codecCtx->height, i); 142 | } 143 | } 144 | } 145 | } 146 | av_packet_unref(pkt);//重置pkt的内容 147 | } 148 | printf("There are %d frames int total.\n", i); 149 | }while(0); 150 | //===========================释放所有指针===============================// 151 | av_packet_free(&pkt); 152 | avcodec_close(codecCtx); 153 | avformat_close_input(&fmtCtx); 154 | avformat_free_context(fmtCtx); 155 | av_frame_free(&yuvFrame); 156 | av_frame_free(&rgbFrame); 157 | 158 | return ret; 159 | } 160 | -------------------------------------------------------------------------------- /10.12.video_encode_h2642mp4/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "libavcodec/avcodec.h" 4 | #include "libavformat/avformat.h" 5 | #include "libavcodec/avcodec.h" 6 | #include "libswscale/swscale.h" 7 | #include "libavformat/avformat.h" 8 | #include "libavutil/avutil.h" 9 | #include "libavutil/mathematics.h" 10 | 11 | // 将H264转封装为MP4 12 | int main(){ 13 | int frame_index=0;//统计帧数 14 | int inVStreamIndex=-1,outVStreamIndex=-1;//输入输出视频流在文件中的索引位置 15 | const char *inVFileName = "result.h264"; 16 | const char *outFileName = "result.mp4"; 17 | 18 | AVFormatContext *inVFmtCtx=NULL,*outFmtCtx=NULL; 19 | AVCodecParameters *codecPara=NULL; 20 | AVStream *outVStream=NULL; 21 | const AVCodec *outCodec=NULL; 22 | AVCodecContext *outCodecCtx=NULL; 23 | AVCodecParameters *outCodecPara=NULL; 24 | AVStream *inVStream=NULL; 25 | AVPacket *pkt=av_packet_alloc(); 26 | 27 | do{ 28 | //======================输入部分============================// 29 | //打开输入文件 30 | if(avformat_open_input(&inVFmtCtx,inVFileName,NULL,NULL)<0){ 31 | printf("Cannot open input file.\n"); 32 | break; 33 | } 34 | 35 | //查找输入文件中的流 36 | if(avformat_find_stream_info(inVFmtCtx,NULL)<0){ 37 | printf("Cannot find stream info in input file.\n"); 38 | break; 39 | } 40 | 41 | //查找视频流在文件中的位置 42 | for(size_t i=0;inb_streams;i++){ 43 | if(inVFmtCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO){ 44 | inVStreamIndex=(int)i; 45 | break; 46 | } 47 | } 48 | 49 | codecPara = inVFmtCtx->streams[inVStreamIndex]->codecpar;//输入视频流的编码参数 50 | 51 | printf("===============Input information========>\n"); 52 | av_dump_format(inVFmtCtx, 0, inVFileName, 0); 53 | printf("===============Input information========<\n"); 54 | 55 | //=====================输出部分=========================// 56 | //打开输出文件并填充格式数据 57 | if(avformat_alloc_output_context2(&outFmtCtx,NULL,NULL,outFileName)<0){ 58 | printf("Cannot alloc output file context.\n"); 59 | break; 60 | } 61 | 62 | //打开输出文件并填充数据 63 | if(avio_open(&outFmtCtx->pb,outFileName,AVIO_FLAG_READ_WRITE)<0){ 64 | printf("output file open failed.\n"); 65 | break; 66 | } 67 | 68 | //在输出的mp4文件中创建一条视频流 69 | outVStream = avformat_new_stream(outFmtCtx,NULL); 70 | if(!outVStream){ 71 | printf("Failed allocating output stream.\n"); 72 | break; 73 | } 74 | outVStream->time_base.den=25; 75 | outVStream->time_base.num=1; 76 | outVStreamIndex=outVStream->index; 77 | 78 | //查找编码器 79 | outCodec = avcodec_find_encoder(codecPara->codec_id); 80 | if(outCodec==NULL){ 81 | printf("Cannot find any encoder.\n"); 82 | break; 83 | } 84 | 85 | //从输入的h264编码器数据复制一份到输出文件的编码器中 86 | outCodecCtx=avcodec_alloc_context3(outCodec); 87 | outCodecPara = outFmtCtx->streams[outVStream->index]->codecpar; 88 | if(avcodec_parameters_copy(outCodecPara,codecPara)<0){ 89 | printf("Cannot copy codec para.\n"); 90 | break; 91 | } 92 | if(avcodec_parameters_to_context(outCodecCtx,outCodecPara)<0){ 93 | printf("Cannot alloc codec ctx from para.\n"); 94 | break; 95 | } 96 | outCodecCtx->time_base.den=25; 97 | outCodecCtx->time_base.num=1; 98 | 99 | //打开输出文件需要的编码器 100 | if(avcodec_open2(outCodecCtx,outCodec,NULL)<0){ 101 | printf("Cannot open output codec.\n"); 102 | break; 103 | } 104 | 105 | printf("============Output Information=============>\n"); 106 | av_dump_format(outFmtCtx,0,outFileName,1); 107 | printf("============Output Information=============<\n"); 108 | 109 | //写入文件头 110 | if(avformat_write_header(outFmtCtx,NULL)<0){ 111 | printf("Cannot write header to file.\n"); 112 | return -1; 113 | } 114 | 115 | //===============编码部分===============// 116 | 117 | inVStream = inVFmtCtx->streams[inVStreamIndex]; 118 | while(av_read_frame(inVFmtCtx,pkt)>=0){//循环读取每一帧直到读完 119 | if(pkt->stream_index==inVStreamIndex){//确保处理的是视频流 120 | //FIXME:No PTS (Example: Raw H.264) 121 | //Simple Write PTS 122 | //如果当前处理帧的显示时间戳为0或者没有等等不是正常值 123 | if(pkt->pts==AV_NOPTS_VALUE){ 124 | printf("frame_index:%d\n", frame_index); 125 | //Write PTS 126 | AVRational time_base1 = inVStream->time_base; 127 | //Duration between 2 frames (us) 128 | int64_t calc_duration = (int)(AV_TIME_BASE / av_q2d(inVStream->r_frame_rate)); 129 | //Parameters 130 | pkt->pts = (int)((frame_index*calc_duration) / (av_q2d(time_base1)*AV_TIME_BASE)); 131 | pkt->dts = pkt->pts; 132 | pkt->duration = (int)(calc_duration / (av_q2d(time_base1)*AV_TIME_BASE)); 133 | frame_index++; 134 | } 135 | //Convert PTS/DTS 136 | pkt->pts = av_rescale_q_rnd(pkt->pts, inVStream->time_base, outVStream->time_base, (enum AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); 137 | pkt->dts = av_rescale_q_rnd(pkt->dts, inVStream->time_base, outVStream->time_base, (enum AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)); 138 | pkt->duration = av_rescale_q(pkt->duration, inVStream->time_base, outVStream->time_base); 139 | pkt->pos = -1; 140 | pkt->stream_index = outVStreamIndex; 141 | printf("Write 1 Packet. size:%5d\tpts:%lld\n", pkt->size, pkt->pts); 142 | //Write 143 | if (av_interleaved_write_frame(outFmtCtx, pkt) < 0) { 144 | printf("Error muxing packet\n"); 145 | break; 146 | } 147 | av_packet_unref(pkt); 148 | } 149 | } 150 | 151 | av_write_trailer(outFmtCtx); 152 | }while(0); 153 | 154 | //=================释放所有指针======================= 155 | av_packet_free(&pkt); 156 | avformat_close_input(&outFmtCtx); 157 | avcodec_close(outCodecCtx); 158 | avcodec_free_context(&outCodecCtx); 159 | avformat_close_input(&inVFmtCtx); 160 | avformat_free_context(inVFmtCtx); 161 | avio_close(outFmtCtx->pb); 162 | 163 | return 0; 164 | } 165 | -------------------------------------------------------------------------------- /10.10.3.video_decode_by_cuda_display_by_qml/ffmpegdecoder.cpp: -------------------------------------------------------------------------------- 1 | #include "ffmpegdecoder.h" 2 | 3 | typedef struct DecodeContext{ 4 | AVBufferRef *hw_device_ref; 5 | }DecodeContext; 6 | 7 | DecodeContext decode = {NULL}; 8 | 9 | static enum AVPixelFormat hw_pix_fmt; 10 | static AVBufferRef* hw_device_ctx=NULL; 11 | 12 | FFmpegDecoder::FFmpegDecoder() 13 | {} 14 | 15 | FFmpegDecoder::~FFmpegDecoder() 16 | {} 17 | 18 | void FFmpegDecoder::setUrl(QString url) 19 | { 20 | _filePath = url; 21 | } 22 | 23 | AVPixelFormat FFmpegDecoder::get_hw_format(AVCodecContext *ctx, const AVPixelFormat *pix_fmts) 24 | { 25 | Q_UNUSED(ctx) 26 | const enum AVPixelFormat *p; 27 | 28 | for (p = pix_fmts; *p != -1; p++) { 29 | if (*p == hw_pix_fmt) 30 | return *p; 31 | } 32 | 33 | fprintf(stderr, "Failed to get HW surface format.\n"); 34 | return AV_PIX_FMT_NONE; 35 | } 36 | 37 | int FFmpegDecoder::hw_decoder_init(AVCodecContext *ctx, const AVHWDeviceType type) 38 | { 39 | int err = 0; 40 | 41 | if ((err = av_hwdevice_ctx_create(&hw_device_ctx, type, 42 | NULL, NULL, 0)) < 0) { 43 | fprintf(stderr, "Failed to create specified HW device.\n"); 44 | return err; 45 | } 46 | ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx); 47 | 48 | return err; 49 | } 50 | 51 | void FFmpegDecoder::run() 52 | { 53 | AVFormatContext *fmtCtx =NULL; 54 | const AVCodec *videoCodec =NULL; 55 | AVCodecContext *videoCodecCtx=NULL; 56 | AVPacket *pkt = NULL; 57 | AVFrame *yuvFrame = NULL; 58 | AVFrame *rgbFrame = NULL; 59 | AVFrame *nv12Frame = NULL; 60 | AVStream *videoStream = NULL; 61 | 62 | int videoStreamIndex =-1; 63 | int numBytes = -1; 64 | avformat_network_init(); 65 | fmtCtx = avformat_alloc_context(); 66 | pkt = av_packet_alloc(); 67 | yuvFrame = av_frame_alloc(); 68 | rgbFrame = av_frame_alloc(); 69 | nv12Frame = av_frame_alloc(); 70 | 71 | enum AVHWDeviceType type; 72 | int i; 73 | 74 | type = av_hwdevice_find_type_by_name("cuda"); 75 | if (type == AV_HWDEVICE_TYPE_NONE) { 76 | fprintf(stderr, "Device type %s is not supported.\n", "h264_cuvid"); 77 | fprintf(stderr, "Available device types:"); 78 | while((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE) 79 | fprintf(stderr, " %s", av_hwdevice_get_type_name(type)); 80 | fprintf(stderr, "\n"); 81 | return; 82 | } 83 | 84 | /* open the input file */ 85 | if (avformat_open_input(&fmtCtx, _filePath.toLocal8Bit().data(), NULL, NULL) != 0) { 86 | return; 87 | } 88 | 89 | if (avformat_find_stream_info(fmtCtx, NULL) < 0) { 90 | fprintf(stderr, "Cannot find input stream information.\n"); 91 | return; 92 | } 93 | 94 | /* find the video stream information */ 95 | ret = av_find_best_stream(fmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, (AVCodec**)&videoCodec, 0); 96 | if (ret < 0) { 97 | fprintf(stderr, "Cannot find a video stream in the input file\n"); 98 | return; 99 | } 100 | videoStreamIndex = ret; 101 | 102 | for (i = 0;; i++) { 103 | const AVCodecHWConfig *config = avcodec_get_hw_config(videoCodec, i); 104 | if (!config) { 105 | fprintf(stderr, "Decoder %s does not support device type %s.\n", 106 | videoCodec->name, av_hwdevice_get_type_name(type)); 107 | return; 108 | } 109 | if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && 110 | config->device_type == type) { 111 | hw_pix_fmt = config->pix_fmt; 112 | break; 113 | } 114 | } 115 | 116 | if (!(videoCodecCtx = avcodec_alloc_context3(videoCodec))) 117 | return ; 118 | 119 | videoStream = fmtCtx->streams[videoStreamIndex]; 120 | if (avcodec_parameters_to_context(videoCodecCtx, videoStream->codecpar) < 0) 121 | return; 122 | 123 | videoCodecCtx->get_format = get_hw_format; 124 | 125 | if (hw_decoder_init(videoCodecCtx, type) < 0) 126 | return; 127 | 128 | if ((ret = avcodec_open2(videoCodecCtx, videoCodec, NULL)) < 0) { 129 | fprintf(stderr, "Failed to open codec for stream #%u\n", videoStreamIndex); 130 | return; 131 | } 132 | 133 | videoWidth = videoCodecCtx->width; 134 | videoHeight = videoCodecCtx->height; 135 | numBytes = av_image_get_buffer_size(AV_PIX_FMT_NV12,videoWidth,videoHeight,1); 136 | out_buffer = (uchar *)av_malloc(numBytes*sizeof(uchar)); 137 | 138 | while(av_read_frame(fmtCtx,pkt)>=0 && !isInterruptionRequested()){ 139 | if(pkt->stream_index == videoStreamIndex){ 140 | if(avcodec_send_packet(videoCodecCtx,pkt)>=0){ 141 | int ret; 142 | while((ret=avcodec_receive_frame(videoCodecCtx,yuvFrame))>=0){ 143 | if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) 144 | return; 145 | else if (ret < 0) { 146 | fprintf(stderr, "Error during decoding\n"); 147 | exit(1); 148 | } 149 | 150 | if(yuvFrame->format==videoCodecCtx->pix_fmt){ 151 | if(av_hwframe_transfer_data(nv12Frame,yuvFrame,0)<0){ 152 | continue; 153 | } 154 | } 155 | 156 | int bytes =0; 157 | for(int i=0;idata[0]+nv12Frame->linesize[0]*i,videoWidth); 159 | bytes+=videoWidth; 160 | } 161 | int uv=videoHeight>>1; 162 | for(int i=0;idata[1]+nv12Frame->linesize[1]*i,videoWidth); 164 | bytes+=videoWidth; 165 | } 166 | emit sigNewFrame(); 167 | 168 | QThread::msleep(30); 169 | } 170 | } 171 | av_packet_unref(pkt); 172 | } 173 | } 174 | qDebug()<<"Thread stop now"; 175 | 176 | if(!pkt) av_packet_free(&pkt); 177 | if(!yuvFrame) av_frame_free(&yuvFrame); 178 | if(!rgbFrame) av_frame_free(&rgbFrame); 179 | if(!nv12Frame) av_frame_free(&nv12Frame); 180 | if(!videoCodecCtx) avcodec_free_context(&videoCodecCtx); 181 | if(!videoCodecCtx) avcodec_close(videoCodecCtx); 182 | if(!fmtCtx) avformat_close_input(&fmtCtx); 183 | } 184 | -------------------------------------------------------------------------------- /10.16.audio_player_decode_by_ffmpeg_play_by_qt/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | using namespace std; 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | 14 | extern "C"{ 15 | #include "libavcodec/avcodec.h" 16 | #include "libavfilter/avfilter.h" 17 | #include "libavformat/avformat.h" 18 | #include "libavutil/avutil.h" 19 | #include "libavutil/ffversion.h" 20 | #include "libswresample/swresample.h" 21 | #include "libswscale/swscale.h" 22 | #include "libpostproc/postprocess.h" 23 | } 24 | 25 | #define MAX_AUDIO_FRAME_SIZE 192000 26 | 27 | int main() 28 | { 29 | QString _url="C:\\Users\\hyper\\Music\\Sample.mp3"; 30 | 31 | QAudioOutput *audioOutput; 32 | QIODevice *streamOut; 33 | 34 | QAudioFormat audioFmt; 35 | audioFmt.setSampleRate(44100); 36 | audioFmt.setChannelCount(2); 37 | audioFmt.setSampleSize(16); 38 | audioFmt.setCodec("audio/pcm"); 39 | audioFmt.setByteOrder(QAudioFormat::LittleEndian); 40 | audioFmt.setSampleType(QAudioFormat::SignedInt); 41 | 42 | QAudioDeviceInfo info = QAudioDeviceInfo::defaultOutputDevice(); 43 | if(!info.isFormatSupported(audioFmt)){ 44 | audioFmt = info.nearestFormat(audioFmt); 45 | } 46 | audioOutput = new QAudioOutput(audioFmt); 47 | audioOutput->setVolume(100); 48 | 49 | streamOut = audioOutput->start(); 50 | 51 | AVFormatContext *fmtCtx =avformat_alloc_context(); 52 | AVCodecContext *codecCtx = NULL; 53 | AVPacket *pkt=av_packet_alloc(); 54 | AVFrame *frame = av_frame_alloc(); 55 | 56 | int aStreamIndex = -1; 57 | 58 | do{ 59 | if(avformat_open_input(&fmtCtx,_url.toLocal8Bit().data(),NULL,NULL)<0){ 60 | qDebug("Cannot open input file."); 61 | break; 62 | } 63 | if(avformat_find_stream_info(fmtCtx,NULL)<0){ 64 | qDebug("Cannot find any stream in file."); 65 | break; 66 | } 67 | 68 | av_dump_format(fmtCtx,0,_url.toLocal8Bit().data(),0); 69 | 70 | for(size_t i=0;inb_streams;i++){ 71 | if(fmtCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO){ 72 | aStreamIndex=(int)i; 73 | break; 74 | } 75 | } 76 | if(aStreamIndex==-1){ 77 | qDebug("Cannot find audio stream."); 78 | break; 79 | } 80 | 81 | AVCodecParameters *aCodecPara = fmtCtx->streams[aStreamIndex]->codecpar; 82 | const AVCodec *codec = avcodec_find_decoder(aCodecPara->codec_id); 83 | if(!codec){ 84 | qDebug("Cannot find any codec for audio."); 85 | break; 86 | } 87 | codecCtx = avcodec_alloc_context3(codec); 88 | if(avcodec_parameters_to_context(codecCtx,aCodecPara)<0){ 89 | qDebug("Cannot alloc codec context."); 90 | break; 91 | } 92 | codecCtx->pkt_timebase = fmtCtx->streams[aStreamIndex]->time_base; 93 | 94 | if(avcodec_open2(codecCtx,codec,NULL)<0){ 95 | qDebug("Cannot open audio codec."); 96 | break; 97 | } 98 | 99 | //设置转码参数 100 | uint64_t out_channel_layout = codecCtx->channel_layout; 101 | enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16; 102 | int out_sample_rate = codecCtx->sample_rate; 103 | int out_channels = av_get_channel_layout_nb_channels(out_channel_layout); 104 | //printf("out rate : %d , out_channel is: %d\n",out_sample_rate,out_channels); 105 | 106 | uint8_t *audio_out_buffer = (uint8_t*)av_malloc(MAX_AUDIO_FRAME_SIZE*2); 107 | 108 | SwrContext *swr_ctx = swr_alloc_set_opts(NULL, 109 | out_channel_layout, 110 | out_sample_fmt, 111 | out_sample_rate, 112 | codecCtx->channel_layout, 113 | codecCtx->sample_fmt, 114 | codecCtx->sample_rate, 115 | 0,NULL); 116 | swr_init(swr_ctx); 117 | 118 | double sleep_time=0; 119 | 120 | while(av_read_frame(fmtCtx,pkt)>=0){ 121 | if(pkt->stream_index==aStreamIndex){ 122 | if(avcodec_send_packet(codecCtx,pkt)>=0){ 123 | while(avcodec_receive_frame(codecCtx,frame)>=0){ 124 | if(av_sample_fmt_is_planar(codecCtx->sample_fmt)){ 125 | int len = swr_convert(swr_ctx, 126 | &audio_out_buffer, 127 | MAX_AUDIO_FRAME_SIZE*2, 128 | (const uint8_t**)frame->data, 129 | frame->nb_samples); 130 | if(len<=0){ 131 | continue; 132 | } 133 | //qDebug("convert length is: %d.\n",len); 134 | 135 | int out_size = av_samples_get_buffer_size(0, 136 | out_channels, 137 | len, 138 | out_sample_fmt, 139 | 1); 140 | //qDebug("buffer size is: %d.",dst_bufsize); 141 | 142 | sleep_time=(out_sample_rate*16*2/8)/out_size; 143 | 144 | if(audioOutput->bytesFree()write((char*)audio_out_buffer,out_size); 147 | }else { 148 | streamOut->write((char*)audio_out_buffer,out_size); 149 | } 150 | //将数据写入PCM文件 151 | //fwrite(audio_out_buffer,1,dst_bufsize,file); 152 | } 153 | } 154 | } 155 | } 156 | av_packet_unref(pkt); 157 | } 158 | }while(0); 159 | 160 | av_frame_free(&frame); 161 | av_packet_free(&pkt); 162 | avcodec_close(codecCtx); 163 | avcodec_free_context(&codecCtx); 164 | avformat_free_context(fmtCtx); 165 | 166 | streamOut->close(); 167 | 168 | return 0; 169 | } 170 | -------------------------------------------------------------------------------- /10.18.audio_encode_pcm2mp3/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "libavcodec/avcodec.h" 4 | #include "libavdevice/avdevice.h" 5 | #include "libavformat/avformat.h" 6 | #include "libavformat/avio.h" 7 | #include "libavutil/imgutils.h" 8 | #include "libswresample/swresample.h" 9 | #include "libswscale/swscale.h" 10 | 11 | int flush_encoder(AVFormatContext *fmtCtx,AVCodecContext *codecCtx,int aStreamIndex){ 12 | int ret=0; 13 | AVPacket *enc_pkt=av_packet_alloc(); 14 | enc_pkt->data=NULL; 15 | enc_pkt->size=0; 16 | 17 | if (!(codecCtx->codec->capabilities & AV_CODEC_CAP_DELAY)) 18 | return 0; 19 | 20 | printf("Flushing stream #%u encoder\n",aStreamIndex); 21 | if((ret=avcodec_send_frame(codecCtx,0))>=0){ 22 | while(avcodec_receive_packet(codecCtx,enc_pkt)>=0){ 23 | printf("success encoder 1 frame.\n"); 24 | /* mux encoded frame */ 25 | ret = av_write_frame(fmtCtx,enc_pkt); 26 | if(ret<0){ 27 | break; 28 | } 29 | } 30 | } 31 | 32 | return ret; 33 | } 34 | 35 | int main() 36 | { 37 | AVFormatContext *fmtCtx = NULL; 38 | AVCodecContext *codecCtx =NULL; 39 | const AVCodec *codec = NULL; 40 | AVFrame *frame = NULL; 41 | AVPacket *pkt = NULL; 42 | 43 | fmtCtx = avformat_alloc_context(); 44 | frame = av_frame_alloc(); 45 | pkt = av_packet_alloc(); 46 | pkt->data = NULL; 47 | pkt->size = 0; 48 | 49 | const char *inFileName = "s16le.pcm"; 50 | const char *outFileName = "output.mp3"; 51 | 52 | int ret=0; 53 | 54 | do{ 55 | //==========Output information============ 56 | 57 | if(avformat_alloc_output_context2(&fmtCtx,NULL,NULL,outFileName)<0){ 58 | printf("Cannot alloc output file context.\n"); 59 | break; 60 | } 61 | const AVOutputFormat *outFmt = fmtCtx->oformat; 62 | 63 | if(avio_open(&fmtCtx->pb,outFileName,AVIO_FLAG_READ_WRITE)<0){ 64 | printf("Cannot open output file.\n"); 65 | break; 66 | } 67 | 68 | AVStream *outStream = avformat_new_stream(fmtCtx,NULL); 69 | if(!outStream){ 70 | printf("Cannot create a new stream to output file.\n"); 71 | break; 72 | } 73 | 74 | //设置参数 75 | AVCodecParameters *codecPara = fmtCtx->streams[outStream->index]->codecpar; 76 | codecPara->codec_type = AVMEDIA_TYPE_AUDIO; 77 | codecPara->codec_id = outFmt->audio_codec; 78 | codecPara->sample_rate=44100; 79 | codecPara->channel_layout = AV_CH_LAYOUT_STEREO; 80 | codecPara->bit_rate = 128000; 81 | codecPara->format = AV_SAMPLE_FMT_FLTP; 82 | codecPara->channels = av_get_channel_layout_nb_channels(codecPara->channel_layout); 83 | 84 | //查找编码器 85 | codec = avcodec_find_encoder(outFmt->audio_codec); 86 | if(codec==NULL){ 87 | printf("Cannot find audio encoder.\n"); 88 | break; 89 | } 90 | 91 | codecCtx = avcodec_alloc_context3(codec); 92 | avcodec_parameters_to_context(codecCtx,codecPara); 93 | if(codecCtx==NULL){ 94 | printf("Cannot alloc codec ctx from para.\n"); 95 | break; 96 | } 97 | 98 | //打开编码器 99 | if(avcodec_open2(codecCtx,codec,NULL)<0){ 100 | printf("Cannot open encoder.\n"); 101 | break; 102 | } 103 | 104 | av_dump_format(fmtCtx,0,outFileName,1); 105 | 106 | //=========== 107 | frame->nb_samples = codecCtx->frame_size; 108 | frame->format = codecCtx->sample_fmt; 109 | frame->channels = 2; 110 | 111 | // PCM重采样 112 | struct SwrContext *swrCtx = swr_alloc_set_opts(NULL, 113 | av_get_default_channel_layout(codecCtx->channels), 114 | codecCtx->sample_fmt, 115 | codecCtx->sample_rate, 116 | av_get_default_channel_layout(frame->channels), 117 | AV_SAMPLE_FMT_S16,// PCM源文件的采样格式 118 | 44100,0,NULL); 119 | swr_init(swrCtx); 120 | 121 | /* 分配空间 */ 122 | uint8_t **convert_data = (uint8_t**)calloc(codecCtx->channels,sizeof(*convert_data)); 123 | av_samples_alloc(convert_data,NULL,codecCtx->channels,codecCtx->frame_size, 124 | codecCtx->sample_fmt,0); 125 | 126 | int size = av_samples_get_buffer_size(NULL,codecCtx->channels, 127 | codecCtx->frame_size,codecCtx->sample_fmt,1); 128 | uint8_t *frameBuf = (uint8_t*)av_malloc(size); 129 | avcodec_fill_audio_frame(frame,codecCtx->channels,codecCtx->sample_fmt, 130 | (const uint8_t*)frameBuf,size,1); 131 | 132 | //写帧头 133 | ret = avformat_write_header(fmtCtx,NULL); 134 | 135 | FILE *inFile = fopen(inFileName,"rb"); 136 | if(!inFile){ 137 | printf("Cannot open input file.\n"); 138 | break; 139 | } 140 | 141 | for(int i=0;;i++){ 142 | //输入一帧数据的长度 143 | int length = frame->nb_samples*av_get_bytes_per_sample(AV_SAMPLE_FMT_S16)*frame->channels; 144 | //读PCM:特意注意读取的长度,否则可能出现转码之后声音变快或者变慢 145 | if(fread(frameBuf,1,length,inFile)<=0){ 146 | printf("Cannot read raw data from file.\n"); 147 | return -1; 148 | }else if(feof(inFile)){ 149 | break; 150 | } 151 | 152 | swr_convert(swrCtx,convert_data,codecCtx->frame_size, 153 | (const uint8_t**)frame->data, 154 | frame->nb_samples); 155 | 156 | //输出一帧数据的长度 157 | length = codecCtx->frame_size * av_get_bytes_per_sample(codecCtx->sample_fmt); 158 | //双通道赋值(输出的AAC为双通道) 159 | memcpy(frame->data[0],convert_data[0],length); 160 | memcpy(frame->data[1],convert_data[1],length); 161 | 162 | frame->pts = i*100; 163 | if(avcodec_send_frame(codecCtx,frame)<0){ 164 | while(avcodec_receive_packet(codecCtx,pkt)>=0){ 165 | pkt->stream_index = outStream->index; 166 | printf("write %4d frame, size=%d, length=%d\n",i,size,length); 167 | av_write_frame(fmtCtx,pkt); 168 | } 169 | } 170 | av_packet_unref(pkt); 171 | } 172 | 173 | // flush encoder 174 | if(flush_encoder(fmtCtx,codecCtx,outStream->index)<0){ 175 | printf("Cannot flush encoder.\n"); 176 | return -1; 177 | } 178 | 179 | // write trailer 180 | av_write_trailer(fmtCtx); 181 | 182 | fclose(inFile); 183 | av_free(frameBuf); 184 | }while(0); 185 | 186 | avcodec_close(codecCtx); 187 | av_free(frame); 188 | avio_close(fmtCtx->pb); 189 | avformat_free_context(fmtCtx); 190 | 191 | return ret; 192 | } 193 | --------------------------------------------------------------------------------