├── .gitignore ├── CMakeLists.txt ├── LICENSE ├── README.md ├── ffmpeg_nvmpi.patch ├── nvmpi.h ├── nvmpi.pc.in ├── nvmpi_dec.cpp └── nvmpi_enc.cpp /.gitignore: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | *.d 3 | 4 | # Compiled Object files 5 | *.slo 6 | *.lo 7 | *.o 8 | *.obj 9 | 10 | # Precompiled Headers 11 | *.gch 12 | *.pch 13 | 14 | # Compiled Dynamic libraries 15 | *.so 16 | *.dylib 17 | *.dll 18 | 19 | # Fortran module files 20 | *.mod 21 | *.smod 22 | 23 | # Compiled Static libraries 24 | *.lai 25 | *.la 26 | *.a 27 | *.lib 28 | 29 | # Executables 30 | *.exe 31 | *.out 32 | *.app 33 | build 34 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.9) 2 | project(nvmpi VERSION 1.0.0 DESCRIPTION "nvidia multimedia api") 3 | 4 | set(CMAKE_C_FLAGS“$ {CMAKE_C_FLAGS} -fPIC”) 5 | set(CMAKE_CXX_FLAGS“$ {CMAKE_CXX_FLAGS} -fPIC”) 6 | #set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath-link=/lib") 7 | #set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath-link=/usr/lib/aarch64-linux-gnu") 8 | set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath-link=/usr/lib/aarch64-linux-gnu/tegra") 9 | set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath-link=/usr/local/cuda/lib64") 10 | 11 | find_library(LIB_NVBUF nvbuf_utils PATHS /usr/lib/aarch64-linux-gnu/tegra) 12 | find_library(LIB_V4L2 nvv4l2 PATHS /usr/lib/aarch64-linux-gnu/tegra) 13 | find_package (Threads) 14 | #find_library(LIB_DRM drm PATHS /usr/lib/aarch64-linux-gnu/tegra) 15 | #find_library(LIB_EGL EGL PATHS /usr/lib/aarch64-linux-gnu/tegra) 16 | 17 | add_library(nvmpi SHARED 18 | nvmpi_dec.cpp 19 | nvmpi_enc.cpp 20 | /usr/src/jetson_multimedia_api/samples/common/classes/NvBuffer.cpp 21 | /usr/src/jetson_multimedia_api/samples/common/classes/NvElement.cpp 22 | /usr/src/jetson_multimedia_api/samples/common/classes/NvElementProfiler.cpp 23 | /usr/src/jetson_multimedia_api/samples/common/classes/NvLogging.cpp 24 | /usr/src/jetson_multimedia_api/samples/common/classes/NvV4l2Element.cpp 25 | /usr/src/jetson_multimedia_api/samples/common/classes/NvV4l2ElementPlane.cpp 26 | /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoDecoder.cpp 27 | /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoEncoder.cpp 28 | #common/NvVideoConverter.cpp 29 | #common/NvApplicationProfiler.cpp 30 | #common/NvEglRenderer.cpp 31 | #common/NvUtils.cpp 32 | ) 33 | 34 | add_library(nvmpi_static STATIC 35 | nvmpi_dec.cpp 36 | nvmpi_enc.cpp 37 | /usr/src/jetson_multimedia_api/samples/common/classes/NvBuffer.cpp 38 | /usr/src/jetson_multimedia_api/samples/common/classes/NvElement.cpp 39 | /usr/src/jetson_multimedia_api/samples/common/classes/NvElementProfiler.cpp 40 | /usr/src/jetson_multimedia_api/samples/common/classes/NvLogging.cpp 41 | /usr/src/jetson_multimedia_api/samples/common/classes/NvV4l2Element.cpp 42 | /usr/src/jetson_multimedia_api/samples/common/classes/NvV4l2ElementPlane.cpp 43 | /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoDecoder.cpp 44 | /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoEncoder.cpp 45 | #common/NvVideoConverter.cpp 46 | #common/NvApplicationProfiler.cpp 47 | #common/NvEglRenderer.cpp 48 | #common/NvUtils.cpp 49 | ) 50 | 51 | set_target_properties(nvmpi_static PROPERTIES OUTPUT_NAME nvmpi) 52 | set_target_properties(nvmpi PROPERTIES VERSION ${PROJECT_VERSION}) 53 | set_target_properties(nvmpi PROPERTIES SOVERSION 1) 54 | set_target_properties(nvmpi nvmpi_static PROPERTIES PUBLIC_HEADER nvmpi.h) 55 | target_link_libraries(nvmpi PRIVATE ${CMAKE_THREAD_LIBS_INIT} ${LIB_NVBUF} ${LIB_V4L2}) 56 | target_include_directories(nvmpi PRIVATE /usr/src/jetson_multimedia_api/include) 57 | target_include_directories(nvmpi PRIVATE /usr/local/cuda/include) 58 | target_include_directories(nvmpi_static PRIVATE /usr/src/jetson_multimedia_api/include) 59 | target_include_directories(nvmpi_static PRIVATE /usr/local/cuda/include) 60 | configure_file(nvmpi.pc.in nvmpi.pc @ONLY) 61 | include(GNUInstallDirs) 62 | install(TARGETS nvmpi nvmpi_static 63 | LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} 64 | ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} 65 | PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) 66 | install(FILES ${CMAKE_BINARY_DIR}/nvmpi.pc 67 | DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/pkgconfig) 68 | install(FILES ${CMAKE_BINARY_DIR}/nvmpi.pc 69 | DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig) 70 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) [2020] [jiangwei] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 19 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 20 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # jetson-ffmpeg 2 | L4T Multimedia API for ffmpeg 3 | 4 | **1.build and install library** 5 | 6 | git clone https://github.com/jocover/jetson-ffmpeg.git 7 | cd jetson-ffmpeg 8 | mkdir build 9 | cd build 10 | cmake .. 11 | make 12 | sudo make install 13 | sudo ldconfig 14 | 15 | **2.patch ffmpeg and build** 16 | 17 | git clone git://source.ffmpeg.org/ffmpeg.git -b release/4.2 --depth=1 18 | cd ffmpeg 19 | wget https://github.com/jocover/jetson-ffmpeg/raw/master/ffmpeg_nvmpi.patch 20 | git apply ffmpeg_nvmpi.patch 21 | ./configure --enable-nvmpi 22 | make 23 | 24 | **3.using** 25 | 26 | ### Supports Decoding 27 | - MPEG2 28 | - H.264/AVC 29 | - HEVC 30 | - VP8 31 | - VP9 32 | 33 | **example** 34 | 35 | ffmpeg -c:v h264_nvmpi -i input_file -f null - 36 | 37 | ### Supports Encoding 38 | - H.264/AVC 39 | - HEVC 40 | 41 | **example** 42 | 43 | ffmpeg -i input_file -c:v h264_nvmpi 44 | -------------------------------------------------------------------------------- /ffmpeg_nvmpi.patch: -------------------------------------------------------------------------------- 1 | diff --git a/configure b/configure 2 | index 6a7a85c..5810ab5 100755 3 | --- a/configure 4 | +++ b/configure 5 | @@ -340,6 +340,7 @@ External library support: 6 | --disable-vaapi disable Video Acceleration API (mainly Unix/Intel) code [autodetect] 7 | --disable-vdpau disable Nvidia Video Decode and Presentation API for Unix code [autodetect] 8 | --disable-videotoolbox disable VideoToolbox code [autodetect] 9 | + --enable-nvmpi enable nvmpi code 10 | 11 | Toolchain options: 12 | --arch=ARCH select architecture [$arch] 13 | @@ -1851,6 +1852,7 @@ HWACCEL_LIBRARY_LIST=" 14 | mmal 15 | omx 16 | opencl 17 | + nvmpi 18 | " 19 | 20 | DOCUMENT_LIST=" 21 | @@ -3014,11 +3016,14 @@ h264_mediacodec_decoder_deps="mediacodec" 22 | h264_mediacodec_decoder_select="h264_mp4toannexb_bsf h264_parser" 23 | h264_mmal_decoder_deps="mmal" 24 | h264_nvenc_encoder_deps="nvenc" 25 | +h264_nvmpi_encoder_deps="nvmpi" 26 | h264_omx_encoder_deps="omx" 27 | h264_qsv_decoder_select="h264_mp4toannexb_bsf h264_parser qsvdec" 28 | h264_qsv_encoder_select="qsvenc" 29 | h264_rkmpp_decoder_deps="rkmpp" 30 | h264_rkmpp_decoder_select="h264_mp4toannexb_bsf" 31 | +h264_nvmpi_decoder_deps="nvmpi" 32 | +h264_nvmpi_decoder_select="h264_mp4toannexb_bsf" 33 | h264_vaapi_encoder_select="cbs_h264 vaapi_encode" 34 | h264_v4l2m2m_decoder_deps="v4l2_m2m h264_v4l2_m2m" 35 | h264_v4l2m2m_decoder_select="h264_mp4toannexb_bsf" 36 | @@ -3029,10 +3034,13 @@ hevc_cuvid_decoder_select="hevc_mp4toannexb_bsf" 37 | hevc_mediacodec_decoder_deps="mediacodec" 38 | hevc_mediacodec_decoder_select="hevc_mp4toannexb_bsf hevc_parser" 39 | hevc_nvenc_encoder_deps="nvenc" 40 | +hevc_nvmpi_encoder_deps="nvmpi" 41 | hevc_qsv_decoder_select="hevc_mp4toannexb_bsf hevc_parser qsvdec" 42 | hevc_qsv_encoder_select="hevcparse qsvenc" 43 | hevc_rkmpp_decoder_deps="rkmpp" 44 | hevc_rkmpp_decoder_select="hevc_mp4toannexb_bsf" 45 | +hevc_nvmpi_decoder_deps="nvmpi" 46 | +hevc_nvmpi_decoder_select="hevc_mp4toannexb_bsf" 47 | hevc_vaapi_encoder_deps="VAEncPictureParameterBufferHEVC" 48 | hevc_vaapi_encoder_select="cbs_h265 vaapi_encode" 49 | hevc_v4l2m2m_decoder_deps="v4l2_m2m hevc_v4l2_m2m" 50 | @@ -3047,6 +3055,7 @@ mpeg1_cuvid_decoder_deps="cuvid" 51 | mpeg1_v4l2m2m_decoder_deps="v4l2_m2m mpeg1_v4l2_m2m" 52 | mpeg2_crystalhd_decoder_select="crystalhd" 53 | mpeg2_cuvid_decoder_deps="cuvid" 54 | +mpeg2_nvmpi_decoder_deps="nvmpi" 55 | mpeg2_mmal_decoder_deps="mmal" 56 | mpeg2_mediacodec_decoder_deps="mediacodec" 57 | mpeg2_qsv_decoder_select="qsvdec mpegvideo_parser" 58 | @@ -3055,6 +3064,7 @@ mpeg2_vaapi_encoder_select="cbs_mpeg2 vaapi_encode" 59 | mpeg2_v4l2m2m_decoder_deps="v4l2_m2m mpeg2_v4l2_m2m" 60 | mpeg4_crystalhd_decoder_select="crystalhd" 61 | mpeg4_cuvid_decoder_deps="cuvid" 62 | +mpeg4_nvmpi_decoder_deps="nvmpi" 63 | mpeg4_mediacodec_decoder_deps="mediacodec" 64 | mpeg4_mmal_decoder_deps="mmal" 65 | mpeg4_omx_encoder_deps="omx" 66 | @@ -3069,6 +3079,7 @@ vc1_mmal_decoder_deps="mmal" 67 | vc1_qsv_decoder_select="qsvdec vc1_parser" 68 | vc1_v4l2m2m_decoder_deps="v4l2_m2m vc1_v4l2_m2m" 69 | vp8_cuvid_decoder_deps="cuvid" 70 | +vp8_nvmpi_decoder_deps="nvmpi" 71 | vp8_mediacodec_decoder_deps="mediacodec" 72 | vp8_qsv_decoder_select="qsvdec vp8_parser" 73 | vp8_rkmpp_decoder_deps="rkmpp" 74 | @@ -3077,6 +3088,7 @@ vp8_vaapi_encoder_select="vaapi_encode" 75 | vp8_v4l2m2m_decoder_deps="v4l2_m2m vp8_v4l2_m2m" 76 | vp8_v4l2m2m_encoder_deps="v4l2_m2m vp8_v4l2_m2m" 77 | vp9_cuvid_decoder_deps="cuvid" 78 | +vp9_nvmpi_decoder_deps="nvmpi" 79 | vp9_mediacodec_decoder_deps="mediacodec" 80 | vp9_rkmpp_decoder_deps="rkmpp" 81 | vp9_vaapi_encoder_deps="VAEncPictureParameterBufferVP9" 82 | @@ -6366,6 +6378,7 @@ enabled rkmpp && { require_pkg_config rkmpp rockchip_mpp rockchip/r 83 | die "ERROR: rkmpp requires --enable-libdrm"; } 84 | } 85 | enabled vapoursynth && require_pkg_config vapoursynth "vapoursynth-script >= 42" VSScript.h vsscript_init 86 | +enabled nvmpi && require_pkg_config nvmpi nvmpi nvmpi.h nvmpi_create_decoder 87 | 88 | 89 | if enabled gcrypt; then 90 | diff --git a/libavcodec/Makefile b/libavcodec/Makefile 91 | index 3cd73fb..c3ed5cc 100644 92 | --- a/libavcodec/Makefile 93 | +++ b/libavcodec/Makefile 94 | @@ -354,6 +354,8 @@ OBJS-$(CONFIG_H264_MMAL_DECODER) += mmaldec.o 95 | OBJS-$(CONFIG_H264_NVENC_ENCODER) += nvenc_h264.o 96 | OBJS-$(CONFIG_NVENC_ENCODER) += nvenc_h264.o 97 | OBJS-$(CONFIG_NVENC_H264_ENCODER) += nvenc_h264.o 98 | +OBJS-$(CONFIG_H264_NVMPI_DECODER) += nvmpi_dec.o 99 | +OBJS-$(CONFIG_H264_NVMPI_ENCODER) += nvmpi_enc.o 100 | OBJS-$(CONFIG_H264_OMX_ENCODER) += omx.o 101 | OBJS-$(CONFIG_H264_QSV_DECODER) += qsvdec_h2645.o 102 | OBJS-$(CONFIG_H264_QSV_ENCODER) += qsvenc_h264.o 103 | @@ -379,6 +381,8 @@ OBJS-$(CONFIG_HEVC_QSV_ENCODER) += qsvenc_hevc.o hevc_ps_enc.o \ 104 | OBJS-$(CONFIG_HEVC_RKMPP_DECODER) += rkmppdec.o 105 | OBJS-$(CONFIG_HEVC_VAAPI_ENCODER) += vaapi_encode_h265.o h265_profile_level.o 106 | OBJS-$(CONFIG_HEVC_V4L2M2M_DECODER) += v4l2_m2m_dec.o 107 | +OBJS-$(CONFIG_HEVC_NVMPI_DECODER) += nvmpi_dec.o 108 | +OBJS-$(CONFIG_HEVC_NVMPI_ENCODER) += nvmpi_enc.o 109 | OBJS-$(CONFIG_HEVC_V4L2M2M_ENCODER) += v4l2_m2m_enc.o 110 | OBJS-$(CONFIG_HNM4_VIDEO_DECODER) += hnm4video.o 111 | OBJS-$(CONFIG_HQ_HQA_DECODER) += hq_hqa.o hq_hqadata.o hq_hqadsp.o \ 112 | @@ -464,11 +468,13 @@ OBJS-$(CONFIG_MPEG2_QSV_ENCODER) += qsvenc_mpeg2.o 113 | OBJS-$(CONFIG_MPEG2VIDEO_DECODER) += mpeg12dec.o mpeg12.o mpeg12data.o 114 | OBJS-$(CONFIG_MPEG2VIDEO_ENCODER) += mpeg12enc.o mpeg12.o 115 | OBJS-$(CONFIG_MPEG2_CUVID_DECODER) += cuviddec.o 116 | +OBJS-$(CONFIG_MPEG2_NVMPI_DECODER) += nvmpi_dec.o 117 | OBJS-$(CONFIG_MPEG2_MEDIACODEC_DECODER) += mediacodecdec.o 118 | OBJS-$(CONFIG_MPEG2_VAAPI_ENCODER) += vaapi_encode_mpeg2.o 119 | OBJS-$(CONFIG_MPEG2_V4L2M2M_DECODER) += v4l2_m2m_dec.o 120 | OBJS-$(CONFIG_MPEG4_DECODER) += xvididct.o 121 | OBJS-$(CONFIG_MPEG4_CUVID_DECODER) += cuviddec.o 122 | +OBJS-$(CONFIG_MPEG4_NVMPI_DECODER) += nvmpi_dec.o 123 | OBJS-$(CONFIG_MPEG4_MEDIACODEC_DECODER) += mediacodecdec.o 124 | OBJS-$(CONFIG_MPEG4_OMX_ENCODER) += omx.o 125 | OBJS-$(CONFIG_MPEG4_V4L2M2M_DECODER) += v4l2_m2m_dec.o 126 | @@ -669,6 +675,7 @@ OBJS-$(CONFIG_VP6_DECODER) += vp6.o vp56.o vp56data.o \ 127 | OBJS-$(CONFIG_VP7_DECODER) += vp8.o vp56rac.o 128 | OBJS-$(CONFIG_VP8_DECODER) += vp8.o vp56rac.o 129 | OBJS-$(CONFIG_VP8_CUVID_DECODER) += cuviddec.o 130 | +OBJS-$(CONFIG_VP8_NVMPI_DECODER) += nvmpi_dec.o 131 | OBJS-$(CONFIG_VP8_MEDIACODEC_DECODER) += mediacodecdec.o 132 | OBJS-$(CONFIG_VP8_QSV_DECODER) += qsvdec_other.o 133 | OBJS-$(CONFIG_VP8_RKMPP_DECODER) += rkmppdec.o 134 | @@ -679,6 +686,7 @@ OBJS-$(CONFIG_VP9_DECODER) += vp9.o vp9data.o vp9dsp.o vp9lpf.o vp9r 135 | vp9block.o vp9prob.o vp9mvs.o vp56rac.o \ 136 | vp9dsp_8bpp.o vp9dsp_10bpp.o vp9dsp_12bpp.o 137 | OBJS-$(CONFIG_VP9_CUVID_DECODER) += cuviddec.o 138 | +OBJS-$(CONFIG_VP9_NVMPI_DECODER) += nvmpi_dec.o 139 | OBJS-$(CONFIG_VP9_MEDIACODEC_DECODER) += mediacodecdec.o 140 | OBJS-$(CONFIG_VP9_RKMPP_DECODER) += rkmppdec.o 141 | OBJS-$(CONFIG_VP9_VAAPI_ENCODER) += vaapi_encode_vp9.o 142 | diff --git a/libavcodec/allcodecs.c b/libavcodec/allcodecs.c 143 | index d2f9a39..04dc62b 100644 144 | --- a/libavcodec/allcodecs.c 145 | +++ b/libavcodec/allcodecs.c 146 | @@ -143,11 +143,15 @@ extern AVCodec ff_h264_mediacodec_decoder; 147 | extern AVCodec ff_h264_mmal_decoder; 148 | extern AVCodec ff_h264_qsv_decoder; 149 | extern AVCodec ff_h264_rkmpp_decoder; 150 | +extern AVCodec ff_h264_nvmpi_decoder; 151 | +extern AVCodec ff_h264_nvmpi_encoder; 152 | extern AVCodec ff_hap_encoder; 153 | extern AVCodec ff_hap_decoder; 154 | extern AVCodec ff_hevc_decoder; 155 | extern AVCodec ff_hevc_qsv_decoder; 156 | extern AVCodec ff_hevc_rkmpp_decoder; 157 | +extern AVCodec ff_hevc_nvmpi_decoder; 158 | +extern AVCodec ff_hevc_nvmpi_encoder; 159 | extern AVCodec ff_hevc_v4l2m2m_decoder; 160 | extern AVCodec ff_hnm4_video_decoder; 161 | extern AVCodec ff_hq_hqa_decoder; 162 | @@ -766,18 +770,22 @@ extern AVCodec ff_mjpeg_qsv_encoder; 163 | extern AVCodec ff_mjpeg_vaapi_encoder; 164 | extern AVCodec ff_mpeg1_cuvid_decoder; 165 | extern AVCodec ff_mpeg2_cuvid_decoder; 166 | +extern AVCodec ff_mpeg2_nvmpi_decoder; 167 | extern AVCodec ff_mpeg2_qsv_encoder; 168 | extern AVCodec ff_mpeg2_vaapi_encoder; 169 | extern AVCodec ff_mpeg4_cuvid_decoder; 170 | +extern AVCodec ff_mpeg4_nvmpi_decoder; 171 | extern AVCodec ff_mpeg4_mediacodec_decoder; 172 | extern AVCodec ff_mpeg4_v4l2m2m_encoder; 173 | extern AVCodec ff_vc1_cuvid_decoder; 174 | extern AVCodec ff_vp8_cuvid_decoder; 175 | +extern AVCodec ff_vp8_nvmpi_decoder; 176 | extern AVCodec ff_vp8_mediacodec_decoder; 177 | extern AVCodec ff_vp8_qsv_decoder; 178 | extern AVCodec ff_vp8_v4l2m2m_encoder; 179 | extern AVCodec ff_vp8_vaapi_encoder; 180 | extern AVCodec ff_vp9_cuvid_decoder; 181 | +extern AVCodec ff_vp9_nvmpi_decoder; 182 | extern AVCodec ff_vp9_mediacodec_decoder; 183 | extern AVCodec ff_vp9_vaapi_encoder; 184 | 185 | diff --git a/libavcodec/nvmpi_dec.c b/libavcodec/nvmpi_dec.c 186 | new file mode 100644 187 | index 0000000..f82aa61 188 | --- /dev/null 189 | +++ b/libavcodec/nvmpi_dec.c 190 | @@ -0,0 +1,171 @@ 191 | +#include 192 | +#include 193 | +#include 194 | + 195 | +#include 196 | +#include "avcodec.h" 197 | +#include "decode.h" 198 | +#include "internal.h" 199 | +#include "libavutil/buffer.h" 200 | +#include "libavutil/common.h" 201 | +#include "libavutil/frame.h" 202 | +#include "libavutil/hwcontext.h" 203 | +#include "libavutil/hwcontext_drm.h" 204 | +#include "libavutil/imgutils.h" 205 | +#include "libavutil/log.h" 206 | + 207 | + 208 | + 209 | + 210 | +typedef struct { 211 | + char eos_reached; 212 | + nvmpictx* ctx; 213 | + AVClass *av_class; 214 | +} nvmpiDecodeContext; 215 | + 216 | +static nvCodingType nvmpi_get_codingtype(AVCodecContext *avctx) 217 | +{ 218 | + switch (avctx->codec_id) { 219 | + case AV_CODEC_ID_H264: return NV_VIDEO_CodingH264; 220 | + case AV_CODEC_ID_HEVC: return NV_VIDEO_CodingHEVC; 221 | + case AV_CODEC_ID_VP8: return NV_VIDEO_CodingVP8; 222 | + case AV_CODEC_ID_VP9: return NV_VIDEO_CodingVP9; 223 | + case AV_CODEC_ID_MPEG4: return NV_VIDEO_CodingMPEG4; 224 | + case AV_CODEC_ID_MPEG2VIDEO: return NV_VIDEO_CodingMPEG2; 225 | + default: return NV_VIDEO_CodingUnused; 226 | + } 227 | +}; 228 | + 229 | + 230 | +static int nvmpi_init_decoder(AVCodecContext *avctx){ 231 | + 232 | + nvmpiDecodeContext *nvmpi_context = avctx->priv_data; 233 | + nvCodingType codectype=NV_VIDEO_CodingUnused; 234 | + 235 | + codectype =nvmpi_get_codingtype(avctx); 236 | + if (codectype == NV_VIDEO_CodingUnused) { 237 | + av_log(avctx, AV_LOG_ERROR, "Unknown codec type (%d).\n", avctx->codec_id); 238 | + return AVERROR_UNKNOWN; 239 | + } 240 | + 241 | + //Workaround for default pix_fmt not being set, so check if it isnt set and set it, 242 | + //or if it is set, but isnt set to something we can work with. 243 | + 244 | + if(avctx->pix_fmt ==AV_PIX_FMT_NONE){ 245 | + avctx->pix_fmt=AV_PIX_FMT_YUV420P; 246 | + }else if(avctx-> pix_fmt != AV_PIX_FMT_YUV420P){ 247 | + av_log(avctx, AV_LOG_ERROR, "Invalid Pix_FMT for NVMPI Only yuv420p is supported\n"); 248 | + return AVERROR_INVALIDDATA; 249 | + } 250 | + 251 | + nvmpi_context->ctx=nvmpi_create_decoder(codectype,NV_PIX_YUV420); 252 | + 253 | + if(!nvmpi_context->ctx){ 254 | + av_log(avctx, AV_LOG_ERROR, "Failed to nvmpi_create_decoder (code = %d).\n", AVERROR_EXTERNAL); 255 | + return AVERROR_EXTERNAL; 256 | + } 257 | + return 0; 258 | + 259 | +} 260 | + 261 | + 262 | + 263 | +static int nvmpi_close(AVCodecContext *avctx){ 264 | + 265 | + nvmpiDecodeContext *nvmpi_context = avctx->priv_data; 266 | + return nvmpi_decoder_close(nvmpi_context->ctx); 267 | + 268 | +} 269 | + 270 | + 271 | + 272 | +static int nvmpi_decode(AVCodecContext *avctx,void *data,int *got_frame, AVPacket *avpkt){ 273 | + 274 | + nvmpiDecodeContext *nvmpi_context = avctx->priv_data; 275 | + AVFrame *frame = data; 276 | + nvFrame _nvframe={0}; 277 | + nvPacket packet; 278 | + uint8_t* ptrs[3]; 279 | + int res,linesize[3]; 280 | + 281 | + if(avpkt->size){ 282 | + packet.payload_size=avpkt->size; 283 | + packet.payload=avpkt->data; 284 | + packet.pts=avpkt->pts; 285 | + 286 | + res=nvmpi_decoder_put_packet(nvmpi_context->ctx,&packet); 287 | + } 288 | + 289 | + res=nvmpi_decoder_get_frame(nvmpi_context->ctx,&_nvframe,avctx->flags & AV_CODEC_FLAG_LOW_DELAY); 290 | + 291 | + if(res<0) 292 | + return avpkt->size; 293 | + 294 | + if (ff_get_buffer(avctx, frame, 0) < 0) { 295 | + return AVERROR(ENOMEM); 296 | + 297 | + } 298 | + 299 | + linesize[0]=_nvframe.linesize[0]; 300 | + linesize[1]=_nvframe.linesize[1]; 301 | + linesize[2]=_nvframe.linesize[2]; 302 | + 303 | + ptrs[0]=_nvframe.payload[0]; 304 | + ptrs[1]=_nvframe.payload[1]; 305 | + ptrs[2]=_nvframe.payload[2]; 306 | + 307 | + av_image_copy(frame->data, frame->linesize, (const uint8_t **) ptrs, linesize, avctx->pix_fmt, _nvframe.width,_nvframe.height); 308 | + 309 | + frame->width=_nvframe.width; 310 | + frame->height=_nvframe.height; 311 | + 312 | + frame->format=AV_PIX_FMT_YUV420P; 313 | + frame->pts=_nvframe.timestamp; 314 | + frame->pkt_dts = AV_NOPTS_VALUE; 315 | + 316 | + avctx->coded_width=_nvframe.width; 317 | + avctx->coded_height=_nvframe.height; 318 | + avctx->width=_nvframe.width; 319 | + avctx->height=_nvframe.height; 320 | + 321 | + *got_frame = 1; 322 | + 323 | + return avpkt->size; 324 | +} 325 | + 326 | + 327 | + 328 | + 329 | +#define NVMPI_DEC_CLASS(NAME) \ 330 | + static const AVClass nvmpi_##NAME##_dec_class = { \ 331 | + .class_name = "nvmpi_" #NAME "_dec", \ 332 | + .version = LIBAVUTIL_VERSION_INT, \ 333 | + }; 334 | + 335 | +#define NVMPI_DEC(NAME, ID, BSFS) \ 336 | + NVMPI_DEC_CLASS(NAME) \ 337 | + AVCodec ff_##NAME##_nvmpi_decoder = { \ 338 | + .name = #NAME "_nvmpi", \ 339 | + .long_name = NULL_IF_CONFIG_SMALL(#NAME " (nvmpi)"), \ 340 | + .type = AVMEDIA_TYPE_VIDEO, \ 341 | + .id = ID, \ 342 | + .priv_data_size = sizeof(nvmpiDecodeContext), \ 343 | + .init = nvmpi_init_decoder, \ 344 | + .close = nvmpi_close, \ 345 | + .decode = nvmpi_decode, \ 346 | + .priv_class = &nvmpi_##NAME##_dec_class, \ 347 | + .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AVOID_PROBING | AV_CODEC_CAP_HARDWARE, \ 348 | + .pix_fmts =(const enum AVPixelFormat[]){AV_PIX_FMT_YUV420P,AV_PIX_FMT_NV12,AV_PIX_FMT_NONE},\ 349 | + .bsfs = BSFS, \ 350 | + .wrapper_name = "nvmpi", \ 351 | + }; 352 | + 353 | + 354 | + 355 | +NVMPI_DEC(h264, AV_CODEC_ID_H264,"h264_mp4toannexb"); 356 | +NVMPI_DEC(hevc, AV_CODEC_ID_HEVC,"hevc_mp4toannexb"); 357 | +NVMPI_DEC(mpeg2, AV_CODEC_ID_MPEG2VIDEO,NULL); 358 | +NVMPI_DEC(mpeg4, AV_CODEC_ID_MPEG4,NULL); 359 | +NVMPI_DEC(vp9, AV_CODEC_ID_VP9,NULL); 360 | +NVMPI_DEC(vp8, AV_CODEC_ID_VP8,NULL); 361 | + 362 | diff --git a/libavcodec/nvmpi_enc.c b/libavcodec/nvmpi_enc.c 363 | new file mode 100644 364 | index 0000000..0e1390e 365 | --- /dev/null 366 | +++ b/libavcodec/nvmpi_enc.c 367 | @@ -0,0 +1,272 @@ 368 | +#include 369 | +#include "avcodec.h" 370 | +#include "internal.h" 371 | +#include 372 | +#include "libavutil/avstring.h" 373 | +#include "libavutil/avutil.h" 374 | +#include "libavutil/common.h" 375 | +#include "libavutil/imgutils.h" 376 | +#include "libavutil/log.h" 377 | +#include "libavutil/opt.h" 378 | + 379 | + 380 | +typedef struct { 381 | + const AVClass *class; 382 | + nvmpictx* ctx; 383 | + int num_capture_buffers; 384 | + int profile; 385 | + int level; 386 | + int rc; 387 | + int preset; 388 | +}nvmpiEncodeContext; 389 | + 390 | +static av_cold int nvmpi_encode_init(AVCodecContext *avctx){ 391 | + 392 | + nvmpiEncodeContext * nvmpi_context = avctx->priv_data; 393 | + 394 | + nvEncParam param={0}; 395 | + 396 | + param.width=avctx->width; 397 | + param.height=avctx->height; 398 | + param.bitrate=avctx->bit_rate; 399 | + param.mode_vbr=0; 400 | + param.idr_interval=60; 401 | + param.iframe_interval=30; 402 | + param.peak_bitrate=0; 403 | + param.fps_n=avctx->framerate.num; 404 | + param.fps_d=avctx->framerate.den; 405 | + param.profile=nvmpi_context->profile& ~FF_PROFILE_H264_INTRA; 406 | + param.level=nvmpi_context->level; 407 | + param.capture_num=nvmpi_context->num_capture_buffers; 408 | + param.hw_preset_type=nvmpi_context->preset; 409 | + param.insert_spspps_idr=(avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER)?0:1; 410 | + 411 | + if(nvmpi_context->rc==1){ 412 | + param.mode_vbr=1; 413 | + } 414 | + 415 | + if(avctx->qmin >= 0 && avctx->qmax >= 0){ 416 | + param.qmin=avctx->qmin; 417 | + param.qmax=avctx->qmax; 418 | + } 419 | + 420 | + if (avctx->refs >= 0){ 421 | + param.refs=avctx->refs; 422 | + 423 | + } 424 | + 425 | + if(avctx->max_b_frames > 0 && avctx->max_b_frames < 3){ 426 | + param.max_b_frames=avctx->max_b_frames; 427 | + } 428 | + 429 | + if(avctx->gop_size>0){ 430 | + param.idr_interval=param.iframe_interval=avctx->gop_size; 431 | + 432 | + } 433 | + 434 | + 435 | + if ((avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) && (avctx->codec->id == AV_CODEC_ID_H264)){ 436 | + 437 | + uint8_t *dst[4]; 438 | + int linesize[4]; 439 | + nvFrame _nvframe={0}; 440 | + nvPacket packet={0}; 441 | + int i; 442 | + int ret; 443 | + nvmpictx* _ctx; 444 | + av_image_alloc(dst, linesize,avctx->width,avctx->height,avctx->pix_fmt,1); 445 | + 446 | + _ctx=nvmpi_create_encoder(NV_VIDEO_CodingH264,¶m); 447 | + i=0; 448 | + 449 | + while(1){ 450 | + 451 | + _nvframe.payload[0]=dst[0]; 452 | + _nvframe.payload[1]=dst[1]; 453 | + _nvframe.payload[2]=dst[2]; 454 | + _nvframe.payload_size[0]=linesize[0]*avctx->height; 455 | + _nvframe.payload_size[1]=linesize[1]*avctx->height/2; 456 | + _nvframe.payload_size[2]=linesize[2]*avctx->height/2; 457 | + 458 | + nvmpi_encoder_put_frame(_ctx,&_nvframe); 459 | + 460 | + ret=nvmpi_encoder_get_packet(_ctx,&packet); 461 | + 462 | + if(ret<0) 463 | + continue; 464 | + 465 | + //find idr index 0x0000000165 466 | + while((packet.payload[i]!=0||packet.payload[i+1]!=0||packet.payload[i+2]!=0||packet.payload[i+3]!=0x01||packet.payload[i+4]!=0x65)){ 467 | + i++; 468 | + 469 | + } 470 | + 471 | + avctx->extradata_size=i; 472 | + avctx->extradata = av_mallocz( avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE ); 473 | + memcpy( avctx->extradata, packet.payload,avctx->extradata_size); 474 | + memset( avctx->extradata + avctx->extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE ); 475 | + 476 | + break; 477 | + 478 | + } 479 | + 480 | + nvmpi_encoder_close(_ctx); 481 | + 482 | + 483 | + } 484 | + 485 | + if(avctx->codec->id == AV_CODEC_ID_H264) 486 | + nvmpi_context->ctx=nvmpi_create_encoder(NV_VIDEO_CodingH264,¶m); 487 | + else if(avctx->codec->id == AV_CODEC_ID_HEVC){ 488 | + nvmpi_context->ctx=nvmpi_create_encoder(NV_VIDEO_CodingHEVC,¶m); 489 | + } 490 | + 491 | + 492 | + return 0; 493 | +} 494 | + 495 | + 496 | +static int nvmpi_encode_frame(AVCodecContext *avctx, AVPacket *pkt,const AVFrame *frame, int *got_packet){ 497 | + 498 | + nvmpiEncodeContext * nvmpi_context = avctx->priv_data; 499 | + nvFrame _nvframe={0}; 500 | + nvPacket packet={0}; 501 | + int res; 502 | + 503 | + if(frame){ 504 | + 505 | + _nvframe.payload[0]=frame->data[0]; 506 | + _nvframe.payload[1]=frame->data[1]; 507 | + _nvframe.payload[2]=frame->data[2]; 508 | + 509 | + _nvframe.payload_size[0]=frame->linesize[0]*frame->height; 510 | + _nvframe.payload_size[1]=frame->linesize[1]*frame->height/2; 511 | + _nvframe.payload_size[2]=frame->linesize[2]*frame->height/2; 512 | + 513 | + _nvframe.linesize[0]=frame->linesize[0]; 514 | + _nvframe.linesize[1]=frame->linesize[1]; 515 | + _nvframe.linesize[2]=frame->linesize[2]; 516 | + 517 | + _nvframe.timestamp=frame->pts; 518 | + 519 | + res=nvmpi_encoder_put_frame(nvmpi_context->ctx,&_nvframe); 520 | + 521 | + if(res<0) 522 | + return res; 523 | + } 524 | + 525 | + 526 | + if(nvmpi_encoder_get_packet(nvmpi_context->ctx,&packet)<0) 527 | + return 0; 528 | + 529 | + 530 | + ff_alloc_packet2(avctx,pkt,packet.payload_size,packet.payload_size); 531 | + 532 | + memcpy(pkt->data,packet.payload,packet.payload_size); 533 | + pkt->dts=pkt->pts=packet.pts; 534 | + 535 | + if(packet.flags& AV_PKT_FLAG_KEY) 536 | + pkt->flags = AV_PKT_FLAG_KEY; 537 | + 538 | + 539 | + *got_packet = 1; 540 | + 541 | + return 0; 542 | +} 543 | + 544 | +static av_cold int nvmpi_encode_close(AVCodecContext *avctx){ 545 | + 546 | + nvmpiEncodeContext *nvmpi_context = avctx->priv_data; 547 | + nvmpi_encoder_close(nvmpi_context->ctx); 548 | + 549 | + return 0; 550 | +} 551 | + 552 | +static const AVCodecDefault defaults[] = { 553 | + { "b", "2M" }, 554 | + { "qmin", "-1" }, 555 | + { "qmax", "-1" }, 556 | + { "qdiff", "-1" }, 557 | + { "qblur", "-1" }, 558 | + { "qcomp", "-1" }, 559 | + { "g", "50" }, 560 | + { "bf", "0" }, 561 | + { "refs", "0" }, 562 | + { NULL }, 563 | +}; 564 | + 565 | + 566 | +#define OFFSET(x) offsetof(nvmpiEncodeContext, x) 567 | +#define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM 568 | + 569 | +static const AVOption options[] = { 570 | + { "num_capture_buffers", "Number of buffers in the capture context", OFFSET(num_capture_buffers), AV_OPT_TYPE_INT, {.i64 = 10 }, 1, 32, AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM }, 571 | + /// Profile, 572 | + 573 | + { "profile", "Set the encoding profile", OFFSET(profile), AV_OPT_TYPE_INT, { .i64 = FF_PROFILE_UNKNOWN }, FF_PROFILE_UNKNOWN, FF_PROFILE_H264_HIGH, VE, "profile" }, 574 | + { "baseline", "", 0, AV_OPT_TYPE_CONST, { .i64 = FF_PROFILE_H264_BASELINE }, 0, 0, VE, "profile" }, 575 | + { "main", "", 0, AV_OPT_TYPE_CONST, { .i64 = FF_PROFILE_H264_MAIN }, 0, 0, VE, "profile" }, 576 | + { "high", "", 0, AV_OPT_TYPE_CONST, { .i64 = FF_PROFILE_H264_HIGH }, 0, 0, VE, "profile" }, 577 | + 578 | + /// Profile Level 579 | + { "level", "Profile Level", OFFSET(level), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 62, VE, "level" }, 580 | + { "auto", "", 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, 0, 0, VE, "level" }, 581 | + { "1.0", "", 0, AV_OPT_TYPE_CONST, { .i64 = 10 }, 0, 0, VE, "level" }, 582 | + { "1.1", "", 0, AV_OPT_TYPE_CONST, { .i64 = 11 }, 0, 0, VE, "level" }, 583 | + { "1.2", "", 0, AV_OPT_TYPE_CONST, { .i64 = 12 }, 0, 0, VE, "level" }, 584 | + { "1.3", "", 0, AV_OPT_TYPE_CONST, { .i64 = 13 }, 0, 0, VE, "level" }, 585 | + { "2.0", "", 0, AV_OPT_TYPE_CONST, { .i64 = 20 }, 0, 0, VE, "level" }, 586 | + { "2.1", "", 0, AV_OPT_TYPE_CONST, { .i64 = 21 }, 0, 0, VE, "level" }, 587 | + { "2.2", "", 0, AV_OPT_TYPE_CONST, { .i64 = 22 }, 0, 0, VE, "level" }, 588 | + { "3.0", "", 0, AV_OPT_TYPE_CONST, { .i64 = 30 }, 0, 0, VE, "level" }, 589 | + { "3.1", "", 0, AV_OPT_TYPE_CONST, { .i64 = 31 }, 0, 0, VE, "level" }, 590 | + { "3.2", "", 0, AV_OPT_TYPE_CONST, { .i64 = 32 }, 0, 0, VE, "level" }, 591 | + { "4.0", "", 0, AV_OPT_TYPE_CONST, { .i64 = 40 }, 0, 0, VE, "level" }, 592 | + { "4.1", "", 0, AV_OPT_TYPE_CONST, { .i64 = 41 }, 0, 0, VE, "level" }, 593 | + { "4.2", "", 0, AV_OPT_TYPE_CONST, { .i64 = 42 }, 0, 0, VE, "level" }, 594 | + { "5.0", "", 0, AV_OPT_TYPE_CONST, { .i64 = 50 }, 0, 0, VE, "level" }, 595 | + { "5.1", "", 0, AV_OPT_TYPE_CONST, { .i64 = 51 }, 0, 0, VE, "level" }, 596 | + 597 | + { "rc", "Override the preset rate-control", OFFSET(rc), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, INT_MAX, VE, "rc" }, 598 | + { "cbr", "Constant bitrate mode", 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, 0, 0, VE, "rc" }, 599 | + { "vbr", "Variable bitrate mode", 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, 0, 0, VE, "rc" }, 600 | + 601 | + { "preset", "Set the encoding preset", OFFSET(preset), AV_OPT_TYPE_INT, { .i64 = 3 }, 1, 4, VE, "preset" }, 602 | + { "default", "", 0, AV_OPT_TYPE_CONST, { .i64 = 3 }, 0, 0, VE, "preset" }, 603 | + { "slow", "", 0, AV_OPT_TYPE_CONST, { .i64 = 4 }, 0, 0, VE, "preset" }, 604 | + { "medium", "", 0, AV_OPT_TYPE_CONST, { .i64 = 3 }, 0, 0, VE, "preset" }, 605 | + { "fast", "", 0, AV_OPT_TYPE_CONST, { .i64 = 2 }, 0, 0, VE, "preset" }, 606 | + { "ultrafast", "", 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, 0, 0, VE, "preset" }, 607 | + { NULL } 608 | +}; 609 | + 610 | + 611 | +#define NVMPI_ENC_CLASS(NAME) \ 612 | + static const AVClass nvmpi_ ## NAME ## _enc_class = { \ 613 | + .class_name = #NAME "_nvmpi_encoder", \ 614 | + .item_name = av_default_item_name, \ 615 | + .option = options, \ 616 | + .version = LIBAVUTIL_VERSION_INT, \ 617 | + }; 618 | + 619 | + 620 | +#define NVMPI_ENC(NAME, LONGNAME, CODEC) \ 621 | + NVMPI_ENC_CLASS(NAME) \ 622 | + AVCodec ff_ ## NAME ## _nvmpi_encoder = { \ 623 | + .name = #NAME "_nvmpi" , \ 624 | + .long_name = NULL_IF_CONFIG_SMALL("nvmpi " LONGNAME " encoder wrapper"), \ 625 | + .type = AVMEDIA_TYPE_VIDEO, \ 626 | + .id = CODEC , \ 627 | + .priv_data_size = sizeof(nvmpiEncodeContext), \ 628 | + .priv_class = &nvmpi_ ## NAME ##_enc_class, \ 629 | + .init = nvmpi_encode_init, \ 630 | + .encode2 = nvmpi_encode_frame, \ 631 | + .close = nvmpi_encode_close, \ 632 | + .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },\ 633 | + .capabilities = AV_CODEC_CAP_HARDWARE | AV_CODEC_CAP_DELAY, \ 634 | + .defaults = defaults,\ 635 | + .wrapper_name = "nvmpi", \ 636 | + }; 637 | + 638 | +NVMPI_ENC(h264, "H.264", AV_CODEC_ID_H264); 639 | +NVMPI_ENC(hevc, "HEVC", AV_CODEC_ID_HEVC); 640 | -------------------------------------------------------------------------------- /nvmpi.h: -------------------------------------------------------------------------------- 1 | #ifndef __NVMPI_H__ 2 | #define __NVMPI_H__ 3 | #include 4 | #include 5 | 6 | typedef struct nvmpictx nvmpictx; 7 | 8 | typedef enum { 9 | NV_PIX_NV12, 10 | NV_PIX_YUV420 11 | }nvPixFormat; 12 | 13 | typedef struct _NVENCPARAM{ 14 | unsigned int width; 15 | unsigned int height; 16 | unsigned int profile; 17 | unsigned int level; 18 | unsigned int bitrate; 19 | unsigned int peak_bitrate; 20 | char enableLossless; 21 | char mode_vbr; 22 | char insert_spspps_idr; 23 | unsigned int iframe_interval; 24 | unsigned int idr_interval; 25 | unsigned int fps_n; 26 | unsigned int fps_d; 27 | int capture_num; 28 | unsigned int max_b_frames; 29 | unsigned int refs; 30 | unsigned int qmax; 31 | unsigned int qmin; 32 | unsigned int hw_preset_type; 33 | 34 | } nvEncParam; 35 | 36 | typedef struct _NVPACKET{ 37 | unsigned long flags; 38 | unsigned long payload_size; 39 | unsigned char *payload; 40 | unsigned long pts; 41 | } nvPacket; 42 | 43 | typedef struct _NVFRAME{ 44 | unsigned long flags; 45 | unsigned long payload_size[3]; 46 | unsigned char *payload[3]; 47 | unsigned int linesize[3]; 48 | nvPixFormat type; 49 | unsigned int width; 50 | unsigned int height; 51 | time_t timestamp; 52 | }nvFrame; 53 | 54 | 55 | 56 | typedef enum { 57 | NV_VIDEO_CodingUnused, 58 | NV_VIDEO_CodingH264, /**< H.264 */ 59 | NV_VIDEO_CodingMPEG4, /**< MPEG-4 */ 60 | NV_VIDEO_CodingMPEG2, /**< MPEG-2 */ 61 | NV_VIDEO_CodingVP8, /**< VP8 */ 62 | NV_VIDEO_CodingVP9, /**< VP9 */ 63 | NV_VIDEO_CodingHEVC, /**< H.265/HEVC */ 64 | } nvCodingType; 65 | 66 | #ifdef __cplusplus 67 | extern "C" { 68 | #endif 69 | 70 | nvmpictx* nvmpi_create_decoder(nvCodingType codingType,nvPixFormat pixFormat); 71 | 72 | int nvmpi_decoder_put_packet(nvmpictx* ctx,nvPacket* packet); 73 | 74 | int nvmpi_decoder_get_frame(nvmpictx* ctx,nvFrame* frame,bool wait); 75 | 76 | int nvmpi_decoder_close(nvmpictx* ctx); 77 | 78 | nvmpictx* nvmpi_create_encoder(nvCodingType codingType,nvEncParam * param); 79 | 80 | int nvmpi_encoder_put_frame(nvmpictx* ctx,nvFrame* frame); 81 | 82 | int nvmpi_encoder_get_packet(nvmpictx* ctx,nvPacket* packet); 83 | 84 | int nvmpi_encoder_close(nvmpictx* ctx); 85 | 86 | #ifdef __cplusplus 87 | } 88 | #endif 89 | 90 | #endif /*__NVMPI_H__*/ 91 | -------------------------------------------------------------------------------- /nvmpi.pc.in: -------------------------------------------------------------------------------- 1 | prefix=@CMAKE_INSTALL_PREFIX@ 2 | exec_prefix=@CMAKE_INSTALL_PREFIX@ 3 | libdir=${exec_prefix}/lib 4 | includedir=${prefix}/include 5 | 6 | Name: @PROJECT_NAME@ 7 | Description: @PROJECT_DESCRIPTION@ 8 | Version: @PROJECT_VERSION@ 9 | 10 | Requires: 11 | Libs: -L${libdir} -lnvmpi 12 | Cflags: -I${includedir} 13 | -------------------------------------------------------------------------------- /nvmpi_dec.cpp: -------------------------------------------------------------------------------- 1 | 2 | #include "nvmpi.h" 3 | #include "NvVideoDecoder.h" 4 | #include "nvbuf_utils.h" 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | 13 | #define CHUNK_SIZE 4000000 14 | #define MAX_BUFFERS 32 15 | 16 | #define TEST_ERROR(condition, message, errorCode) \ 17 | if (condition) \ 18 | { \ 19 | std::cout<< message; \ 20 | } 21 | 22 | using namespace std; 23 | 24 | struct nvmpictx 25 | { 26 | NvVideoDecoder *dec{nullptr}; 27 | bool eos{false}; 28 | bool got_res_event{false}; 29 | int index{0}; 30 | unsigned int coded_width{0}; 31 | unsigned int coded_height{0}; 32 | int dst_dma_fd{0}; 33 | int numberCaptureBuffers{0}; 34 | int dmaBufferFileDescriptor[MAX_BUFFERS]; 35 | nvPixFormat out_pixfmt; 36 | unsigned int decoder_pixfmt{0}; 37 | std::thread * dec_capture_loop{nullptr}; 38 | std::mutex* mutex{nullptr}; 39 | std::condition_variable* has_frame_cv{nullptr}; 40 | std::queue * frame_pools{nullptr}; 41 | unsigned char * bufptr_0[MAX_BUFFERS]; 42 | unsigned char * bufptr_1[MAX_BUFFERS]; 43 | unsigned char * bufptr_2[MAX_BUFFERS]; 44 | unsigned int frame_size[MAX_NUM_PLANES]; 45 | unsigned int frame_linesize[MAX_NUM_PLANES]; 46 | unsigned long long timestamp[MAX_BUFFERS]; 47 | }; 48 | 49 | void respondToResolutionEvent(v4l2_format &format, v4l2_crop &crop,nvmpictx* ctx){ 50 | 51 | int32_t minimumDecoderCaptureBuffers; 52 | int ret=0; 53 | NvBufferCreateParams input_params = {0}; 54 | NvBufferCreateParams cParams = {0}; 55 | 56 | ret = ctx->dec->capture_plane.getFormat(format); 57 | TEST_ERROR(ret < 0, "Error: Could not get format from decoder capture plane", ret); 58 | 59 | ret = ctx->dec->capture_plane.getCrop(crop); 60 | TEST_ERROR(ret < 0, "Error: Could not get crop from decoder capture plane", ret); 61 | 62 | ctx->coded_width=crop.c.width; 63 | ctx->coded_height=crop.c.height; 64 | 65 | if(ctx->dst_dma_fd != -1) 66 | { 67 | NvBufferDestroy(ctx->dst_dma_fd); 68 | ctx->dst_dma_fd = -1; 69 | } 70 | 71 | input_params.payloadType = NvBufferPayload_SurfArray; 72 | input_params.width = crop.c.width; 73 | input_params.height = crop.c.height; 74 | input_params.layout = NvBufferLayout_Pitch; 75 | input_params.colorFormat = ctx->out_pixfmt==NV_PIX_NV12?NvBufferColorFormat_NV12: NvBufferColorFormat_YUV420; 76 | input_params.nvbuf_tag = NvBufferTag_VIDEO_DEC; 77 | 78 | ctx->dec->capture_plane.deinitPlane(); 79 | 80 | for (int index = 0; index < ctx->numberCaptureBuffers; index++) 81 | { 82 | if (ctx->dmaBufferFileDescriptor[index] != 0) 83 | { 84 | ret = NvBufferDestroy(ctx->dmaBufferFileDescriptor[index]); 85 | TEST_ERROR(ret < 0, "Failed to Destroy NvBuffer", ret); 86 | } 87 | 88 | } 89 | 90 | 91 | ret=ctx->dec->setCapturePlaneFormat(format.fmt.pix_mp.pixelformat,format.fmt.pix_mp.width,format.fmt.pix_mp.height); 92 | TEST_ERROR(ret < 0, "Error in setting decoder capture plane format", ret); 93 | 94 | ctx->dec->getMinimumCapturePlaneBuffers(minimumDecoderCaptureBuffers); 95 | TEST_ERROR(ret < 0, "Error while getting value of minimum capture plane buffers",ret); 96 | 97 | ctx->numberCaptureBuffers = minimumDecoderCaptureBuffers + 5; 98 | 99 | 100 | 101 | switch (format.fmt.pix_mp.colorspace) 102 | { 103 | case V4L2_COLORSPACE_SMPTE170M: 104 | if (format.fmt.pix_mp.quantization == V4L2_QUANTIZATION_DEFAULT) 105 | { 106 | // "Decoder colorspace ITU-R BT.601 with standard range luma (16-235)" 107 | cParams.colorFormat = NvBufferColorFormat_NV12; 108 | } 109 | else 110 | { 111 | //"Decoder colorspace ITU-R BT.601 with extended range luma (0-255)"; 112 | cParams.colorFormat = NvBufferColorFormat_NV12_ER; 113 | } 114 | break; 115 | case V4L2_COLORSPACE_REC709: 116 | if (format.fmt.pix_mp.quantization == V4L2_QUANTIZATION_DEFAULT) 117 | { 118 | //"Decoder colorspace ITU-R BT.709 with standard range luma (16-235)"; 119 | cParams.colorFormat = NvBufferColorFormat_NV12_709; 120 | } 121 | else 122 | { 123 | //"Decoder colorspace ITU-R BT.709 with extended range luma (0-255)"; 124 | cParams.colorFormat = NvBufferColorFormat_NV12_709_ER; 125 | } 126 | break; 127 | case V4L2_COLORSPACE_BT2020: 128 | { 129 | //"Decoder colorspace ITU-R BT.2020"; 130 | cParams.colorFormat = NvBufferColorFormat_NV12_2020; 131 | } 132 | break; 133 | default: 134 | if (format.fmt.pix_mp.quantization == V4L2_QUANTIZATION_DEFAULT) 135 | { 136 | //"Decoder colorspace ITU-R BT.601 with standard range luma (16-235)"; 137 | cParams.colorFormat = NvBufferColorFormat_NV12; 138 | } 139 | else 140 | { 141 | //"Decoder colorspace ITU-R BT.601 with extended range luma (0-255)"; 142 | cParams.colorFormat = NvBufferColorFormat_NV12_ER; 143 | } 144 | break; 145 | } 146 | 147 | 148 | 149 | ret = NvBufferCreateEx (&ctx->dst_dma_fd, &input_params); 150 | TEST_ERROR(ret == -1, "create dst_dmabuf failed", error); 151 | 152 | for (int index = 0; index < ctx->numberCaptureBuffers; index++) 153 | { 154 | cParams.width = crop.c.width; 155 | cParams.height = crop.c.height; 156 | cParams.layout = NvBufferLayout_BlockLinear; 157 | cParams.payloadType = NvBufferPayload_SurfArray; 158 | cParams.nvbuf_tag = NvBufferTag_VIDEO_DEC; 159 | 160 | ret = NvBufferCreateEx(&ctx->dmaBufferFileDescriptor[index], &cParams); 161 | TEST_ERROR(ret < 0, "Failed to create buffers", ret); 162 | 163 | } 164 | 165 | ctx->dec->capture_plane.reqbufs(V4L2_MEMORY_DMABUF, ctx->numberCaptureBuffers); 166 | TEST_ERROR(ret < 0, "Error in decoder capture plane streamon", ret); 167 | 168 | ctx->dec->capture_plane.setStreamStatus(true); 169 | TEST_ERROR(ret < 0, "Error in decoder capture plane streamon", ret); 170 | 171 | 172 | for (uint32_t i = 0; i < ctx->dec->capture_plane.getNumBuffers(); i++) 173 | { 174 | struct v4l2_buffer v4l2_buf; 175 | struct v4l2_plane planes[MAX_PLANES]; 176 | 177 | memset(&v4l2_buf, 0, sizeof(v4l2_buf)); 178 | memset(planes, 0, sizeof(planes)); 179 | 180 | v4l2_buf.index = i; 181 | v4l2_buf.m.planes = planes; 182 | v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 183 | v4l2_buf.memory = V4L2_MEMORY_DMABUF; 184 | v4l2_buf.m.planes[0].m.fd = ctx->dmaBufferFileDescriptor[i]; 185 | 186 | ret = ctx->dec->capture_plane.qBuffer(v4l2_buf, NULL); 187 | TEST_ERROR(ret < 0, "Error Qing buffer at output plane", ret); 188 | } 189 | 190 | ctx->got_res_event = true; 191 | } 192 | 193 | void *dec_capture_loop_fcn(void *arg){ 194 | nvmpictx* ctx=(nvmpictx*)arg; 195 | 196 | struct v4l2_format v4l2Format; 197 | struct v4l2_crop v4l2Crop; 198 | struct v4l2_event v4l2Event; 199 | int ret,buf_index=0; 200 | 201 | while (!(ctx->dec->isInError()||ctx->eos)){ 202 | NvBuffer *dec_buffer; 203 | 204 | ret = ctx->dec->dqEvent(v4l2Event, ctx->got_res_event ? 0 : 500); 205 | if (ret == 0) 206 | { 207 | switch (v4l2Event.type) 208 | { 209 | case V4L2_EVENT_RESOLUTION_CHANGE: 210 | respondToResolutionEvent(v4l2Format, v4l2Crop,ctx); 211 | continue; 212 | } 213 | } 214 | 215 | if (!ctx->got_res_event) { 216 | continue; 217 | } 218 | 219 | while(!ctx->eos){ 220 | struct v4l2_buffer v4l2_buf; 221 | struct v4l2_plane planes[MAX_PLANES]; 222 | v4l2_buf.m.planes = planes; 223 | 224 | if (ctx->dec->capture_plane.dqBuffer(v4l2_buf, &dec_buffer, NULL, 0)){ 225 | if (errno == EAGAIN) 226 | { 227 | usleep(1000); 228 | } 229 | else 230 | { 231 | 232 | ERROR_MSG("Error while calling dequeue at capture plane"); 233 | ctx->eos=true; 234 | } 235 | break; 236 | 237 | } 238 | 239 | dec_buffer->planes[0].fd = ctx->dmaBufferFileDescriptor[v4l2_buf.index]; 240 | NvBufferRect src_rect, dest_rect; 241 | src_rect.top = 0; 242 | src_rect.left = 0; 243 | src_rect.width = ctx->coded_width; 244 | src_rect.height = ctx->coded_height; 245 | dest_rect.top = 0; 246 | dest_rect.left = 0; 247 | dest_rect.width = ctx->coded_width; 248 | dest_rect.height = ctx->coded_height; 249 | 250 | NvBufferTransformParams transform_params; 251 | memset(&transform_params,0,sizeof(transform_params)); 252 | transform_params.transform_flag = NVBUFFER_TRANSFORM_FILTER; 253 | transform_params.transform_flip = NvBufferTransform_None; 254 | transform_params.transform_filter = NvBufferTransform_Filter_Smart; 255 | transform_params.src_rect = src_rect; 256 | transform_params.dst_rect = dest_rect; 257 | 258 | ctx->mutex->lock(); 259 | 260 | if(!ctx->eos){ 261 | 262 | ret = NvBufferTransform(dec_buffer->planes[0].fd, ctx->dst_dma_fd, &transform_params); 263 | TEST_ERROR(ret==-1, "Transform failed",ret); 264 | 265 | NvBufferParams parm; 266 | ret = NvBufferGetParams(ctx->dst_dma_fd, &parm); 267 | 268 | if(!ctx->frame_size[0]){ 269 | 270 | for(int index=0;indexbufptr_0[index]=new unsigned char[parm.psize[0]];//Y 272 | ctx->bufptr_1[index]=new unsigned char[parm.psize[1]];//UV or UU 273 | ctx->bufptr_2[index]=new unsigned char[parm.psize[2]];//VV 274 | } 275 | } 276 | 277 | 278 | ctx->frame_linesize[0]=parm.width[0]; 279 | ctx->frame_size[0]=parm.psize[0]; 280 | 281 | ctx->frame_linesize[1]=parm.width[1]; 282 | ctx->frame_size[1]=parm.psize[1]; 283 | ctx->frame_linesize[2]=parm.width[2]; 284 | ctx->frame_size[2]=parm.psize[2]; 285 | 286 | 287 | ret=NvBuffer2Raw(ctx->dst_dma_fd,0,parm.width[0],parm.height[0],ctx->bufptr_0[buf_index]); 288 | ret=NvBuffer2Raw(ctx->dst_dma_fd,1,parm.width[1],parm.height[1],ctx->bufptr_1[buf_index]); 289 | if(ctx->out_pixfmt==NV_PIX_YUV420) 290 | ret=NvBuffer2Raw(ctx->dst_dma_fd,2,parm.width[2],parm.height[2],ctx->bufptr_2[buf_index]); 291 | 292 | ctx->frame_pools->push(buf_index); 293 | ctx->timestamp[buf_index]= (v4l2_buf.timestamp.tv_usec % 1000000) + (v4l2_buf.timestamp.tv_sec * 1000000UL); 294 | 295 | buf_index=(buf_index+1)%MAX_BUFFERS; 296 | 297 | } 298 | 299 | ctx->mutex->unlock(); 300 | 301 | if (ctx->eos) { 302 | break; 303 | } 304 | 305 | ctx->has_frame_cv->notify_one(); 306 | 307 | v4l2_buf.m.planes[0].m.fd = ctx->dmaBufferFileDescriptor[v4l2_buf.index]; 308 | if (ctx->dec->capture_plane.qBuffer(v4l2_buf, NULL) < 0){ 309 | ERROR_MSG("Error while queueing buffer at decoder capture plane"); 310 | } 311 | } 312 | } 313 | 314 | // Wake all waiting threads at EOS or decoder error 315 | ctx->has_frame_cv->notify_all(); 316 | } 317 | 318 | nvmpictx* nvmpi_create_decoder(nvCodingType codingType,nvPixFormat pixFormat){ 319 | 320 | int ret; 321 | log_level = LOG_LEVEL_INFO; 322 | 323 | nvmpictx* ctx=new nvmpictx; 324 | 325 | ctx->dec = NvVideoDecoder::createVideoDecoder("dec0"); 326 | TEST_ERROR(!ctx->dec, "Could not create decoder",ret); 327 | 328 | ret=ctx->dec->subscribeEvent(V4L2_EVENT_RESOLUTION_CHANGE, 0, 0); 329 | TEST_ERROR(ret < 0, "Could not subscribe to V4L2_EVENT_RESOLUTION_CHANGE", ret); 330 | 331 | switch(codingType){ 332 | case NV_VIDEO_CodingH264: 333 | ctx->decoder_pixfmt=V4L2_PIX_FMT_H264; 334 | break; 335 | case NV_VIDEO_CodingHEVC: 336 | ctx->decoder_pixfmt=V4L2_PIX_FMT_H265; 337 | break; 338 | case NV_VIDEO_CodingMPEG4: 339 | ctx->decoder_pixfmt=V4L2_PIX_FMT_MPEG4; 340 | break; 341 | case NV_VIDEO_CodingMPEG2: 342 | ctx->decoder_pixfmt=V4L2_PIX_FMT_MPEG2; 343 | break; 344 | case NV_VIDEO_CodingVP8: 345 | ctx->decoder_pixfmt=V4L2_PIX_FMT_VP8; 346 | break; 347 | case NV_VIDEO_CodingVP9: 348 | ctx->decoder_pixfmt=V4L2_PIX_FMT_VP9; 349 | break; 350 | default: 351 | ctx->decoder_pixfmt=V4L2_PIX_FMT_H264; 352 | break; 353 | } 354 | 355 | ret=ctx->dec->setOutputPlaneFormat(ctx->decoder_pixfmt, CHUNK_SIZE); 356 | 357 | TEST_ERROR(ret < 0, "Could not set output plane format", ret); 358 | 359 | //ctx->nalu_parse_buffer = new char[CHUNK_SIZE]; 360 | ret = ctx->dec->setFrameInputMode(0); 361 | TEST_ERROR(ret < 0, "Error in decoder setFrameInputMode for NALU", ret); 362 | 363 | ret = ctx->dec->output_plane.setupPlane(V4L2_MEMORY_USERPTR, 10, false, true); 364 | TEST_ERROR(ret < 0, "Error while setting up output plane", ret); 365 | 366 | ctx->dec->output_plane.setStreamStatus(true); 367 | TEST_ERROR(ret < 0, "Error in output plane stream on", ret); 368 | 369 | ctx->out_pixfmt=pixFormat; 370 | ctx->dst_dma_fd=-1; 371 | ctx->eos=false; 372 | ctx->got_res_event=false; 373 | ctx->index=0; 374 | ctx->frame_size[0]=0; 375 | ctx->frame_pools=new std::queue; 376 | ctx->mutex = new std::mutex(); 377 | ctx->has_frame_cv = new std::condition_variable(); 378 | for(int index=0;indexdmaBufferFileDescriptor[index]=0; 380 | for(int index=0;indexbufptr_0[index] = nullptr; 382 | ctx->bufptr_1[index] = nullptr; 383 | ctx->bufptr_2[index] = nullptr; 384 | } 385 | ctx->numberCaptureBuffers=0; 386 | ctx->dec_capture_loop=new thread(dec_capture_loop_fcn,ctx); 387 | 388 | return ctx; 389 | } 390 | 391 | 392 | 393 | 394 | int nvmpi_decoder_put_packet(nvmpictx* ctx,nvPacket* packet){ 395 | int ret; 396 | 397 | struct v4l2_buffer v4l2_buf; 398 | struct v4l2_plane planes[MAX_PLANES]; 399 | NvBuffer *nvBuffer; 400 | 401 | memset(&v4l2_buf, 0, sizeof(v4l2_buf)); 402 | memset(planes, 0, sizeof(planes)); 403 | 404 | v4l2_buf.m.planes = planes; 405 | 406 | if (ctx->index < (int)ctx->dec->output_plane.getNumBuffers()) { 407 | nvBuffer = ctx->dec->output_plane.getNthBuffer(ctx->index); 408 | } else { 409 | ret = ctx->dec->output_plane.dqBuffer(v4l2_buf, &nvBuffer, NULL, -1); 410 | if (ret < 0) { 411 | cout << "Error DQing buffer at output plane" << std::endl; 412 | return false; 413 | } 414 | } 415 | 416 | memcpy(nvBuffer->planes[0].data,packet->payload,packet->payload_size); 417 | nvBuffer->planes[0].bytesused=packet->payload_size; 418 | 419 | 420 | 421 | if (ctx->index < ctx->dec->output_plane.getNumBuffers()) 422 | { 423 | v4l2_buf.index = ctx->index ; 424 | v4l2_buf.m.planes = planes; 425 | } 426 | 427 | v4l2_buf.m.planes[0].bytesused = nvBuffer->planes[0].bytesused; 428 | 429 | v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY; 430 | v4l2_buf.timestamp.tv_sec = packet->pts / 1000000; 431 | v4l2_buf.timestamp.tv_usec = packet->pts % 1000000; 432 | 433 | 434 | ret = ctx->dec->output_plane.qBuffer(v4l2_buf, NULL); 435 | if (ret < 0) { 436 | std::cout << "Error Qing buffer at output plane" << std::endl; 437 | return false; 438 | } 439 | 440 | if (ctx->index < ctx->dec->output_plane.getNumBuffers()) 441 | ctx->index++; 442 | 443 | if (v4l2_buf.m.planes[0].bytesused == 0) { 444 | ctx->eos=true; 445 | std::cout << "Input file read complete" << std::endl; 446 | } 447 | 448 | 449 | return 0; 450 | 451 | } 452 | 453 | 454 | int nvmpi_decoder_get_frame(nvmpictx* ctx,nvFrame* frame,bool wait){ 455 | 456 | int ret,picture_index; 457 | std::unique_lock lock(*ctx->mutex); 458 | 459 | if (wait) { 460 | while (ctx->frame_pools->empty() && !ctx->eos && !ctx->dec->isInError()) { 461 | ctx->has_frame_cv->wait(lock); 462 | } 463 | } 464 | 465 | if (ctx->frame_pools->empty()) { 466 | return -1; 467 | } 468 | 469 | picture_index=ctx->frame_pools->front(); 470 | ctx->frame_pools->pop(); 471 | 472 | frame->width=ctx->coded_width; 473 | frame->height=ctx->coded_height; 474 | 475 | frame->linesize[0]=ctx->frame_linesize[0]; 476 | frame->linesize[1]=ctx->frame_linesize[1]; 477 | frame->linesize[2]=ctx->frame_linesize[2]; 478 | 479 | frame->payload[0]=ctx->bufptr_0[picture_index]; 480 | frame->payload[1]=ctx->bufptr_1[picture_index]; 481 | frame->payload[2]=ctx->bufptr_2[picture_index]; 482 | 483 | frame->payload_size[0]=ctx->frame_size[0]; 484 | frame->payload_size[1]=ctx->frame_size[1]; 485 | frame->payload_size[2]=ctx->frame_size[2]; 486 | frame->timestamp=ctx->timestamp[picture_index]; 487 | 488 | return 0; 489 | 490 | } 491 | 492 | int nvmpi_decoder_close(nvmpictx* ctx){ 493 | 494 | ctx->mutex->lock(); 495 | ctx->eos=true; 496 | ctx->mutex->unlock(); 497 | 498 | ctx->dec->capture_plane.setStreamStatus(false); 499 | 500 | if (ctx->dec_capture_loop) { 501 | ctx->dec_capture_loop->join(); 502 | delete ctx->dec_capture_loop; 503 | ctx->dec_capture_loop = nullptr; 504 | } 505 | 506 | if(ctx->dst_dma_fd != -1) 507 | { 508 | NvBufferDestroy(ctx->dst_dma_fd); 509 | ctx->dst_dma_fd = -1; 510 | } 511 | 512 | for (int index = 0; index < ctx->numberCaptureBuffers; index++) 513 | { 514 | if (ctx->dmaBufferFileDescriptor[index] != 0) 515 | { 516 | int ret = NvBufferDestroy(ctx->dmaBufferFileDescriptor[index]); 517 | TEST_ERROR(ret < 0, "Failed to Destroy NvBuffer", ret); 518 | } 519 | 520 | } 521 | 522 | delete ctx->dec; ctx->dec = nullptr; 523 | 524 | for(int index=0;indexbufptr_0[index]; 526 | delete[] ctx->bufptr_1[index]; 527 | delete[] ctx->bufptr_2[index]; 528 | } 529 | 530 | delete ctx->mutex; ctx->mutex = nullptr; 531 | delete ctx->has_frame_cv; ctx->has_frame_cv = nullptr; 532 | delete ctx->frame_pools; ctx->frame_pools = nullptr; 533 | 534 | delete ctx; ctx = nullptr; 535 | 536 | return 0; 537 | } 538 | 539 | 540 | -------------------------------------------------------------------------------- /nvmpi_enc.cpp: -------------------------------------------------------------------------------- 1 | #include "nvmpi.h" 2 | #include "NvVideoEncoder.h" 3 | #include "nvbuf_utils.h" 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | #define CHUNK_SIZE 2*1024*1024 11 | #define MAX_BUFFERS 32 12 | #define TEST_ERROR(condition, message, errorCode) \ 13 | if (condition) \ 14 | { \ 15 | std::cout<< message; \ 16 | } 17 | 18 | 19 | using namespace std; 20 | 21 | struct nvmpictx{ 22 | NvVideoEncoder *enc; 23 | int index; 24 | std::queue * packet_pools; 25 | uint32_t width; 26 | uint32_t height; 27 | uint32_t profile; 28 | bool enableLossless; 29 | uint32_t bitrate; 30 | uint32_t peak_bitrate; 31 | uint32_t raw_pixfmt; 32 | uint32_t encoder_pixfmt; 33 | enum v4l2_mpeg_video_bitrate_mode ratecontrol; 34 | enum v4l2_mpeg_video_h264_level level; 35 | enum v4l2_enc_hw_preset_type hw_preset_type; 36 | uint32_t iframe_interval; 37 | uint32_t idr_interval; 38 | uint32_t fps_n; 39 | uint32_t fps_d; 40 | bool enable_extended_colorformat; 41 | uint32_t qmax; 42 | uint32_t qmin; 43 | uint32_t num_b_frames; 44 | uint32_t num_reference_frames; 45 | bool insert_sps_pps_at_idr; 46 | 47 | uint32_t packets_buf_size; 48 | uint32_t packets_num; 49 | unsigned char * packets[MAX_BUFFERS]; 50 | uint32_t packets_size[MAX_BUFFERS]; 51 | bool packets_keyflag[MAX_BUFFERS]; 52 | uint64_t timestamp[MAX_BUFFERS]; 53 | int buf_index; 54 | }; 55 | 56 | 57 | static bool encoder_capture_plane_dq_callback(struct v4l2_buffer *v4l2_buf, NvBuffer * buffer, NvBuffer * shared_buffer, void *arg){ 58 | 59 | nvmpictx *ctx = (nvmpictx *) arg; 60 | NvVideoEncoder *enc = ctx->enc; 61 | //uint32_t frame_num = ctx->enc->capture_plane.getTotalDequeuedBuffers() - 1; 62 | 63 | if (v4l2_buf == NULL) 64 | { 65 | cout << "Error while dequeing buffer from output plane" << endl; 66 | return false; 67 | } 68 | 69 | if (buffer->planes[0].bytesused == 0) 70 | { 71 | cout << "Got 0 size buffer in capture \n"; 72 | return false; 73 | } 74 | 75 | if(ctx->packets_buf_size < buffer->planes[0].bytesused){ 76 | 77 | ctx->packets_buf_size=buffer->planes[0].bytesused; 78 | 79 | for(int index=0;index< ctx->packets_num;index++){ 80 | delete[] ctx->packets[index]; 81 | ctx->packets[index]=new unsigned char[ctx->packets_buf_size]; 82 | } 83 | } 84 | 85 | ctx->packets_size[ctx->buf_index]=buffer->planes[0].bytesused; 86 | memcpy(ctx->packets[ctx->buf_index],buffer->planes[0].data,buffer->planes[0].bytesused); 87 | 88 | ctx->timestamp[ctx->buf_index] = (v4l2_buf->timestamp.tv_usec % 1000000) + (v4l2_buf->timestamp.tv_sec * 1000000UL); 89 | 90 | ctx->packet_pools->push(ctx->buf_index); 91 | 92 | v4l2_ctrl_videoenc_outputbuf_metadata enc_metadata; 93 | ctx->enc->getMetadata(v4l2_buf->index, enc_metadata); 94 | if(enc_metadata.KeyFrame){ 95 | ctx->packets_keyflag[ctx->buf_index]=true; 96 | }else{ 97 | ctx->packets_keyflag[ctx->buf_index]=false; 98 | } 99 | 100 | ctx->buf_index=(ctx->buf_index+1)%ctx->packets_num; 101 | 102 | if (ctx->enc->capture_plane.qBuffer(*v4l2_buf, NULL) < 0) 103 | { 104 | 105 | ERROR_MSG("Error while Qing buffer at capture plane"); 106 | return false; 107 | } 108 | 109 | return true; 110 | } 111 | 112 | 113 | nvmpictx* nvmpi_create_encoder(nvCodingType codingType,nvEncParam * param){ 114 | 115 | int ret; 116 | log_level = LOG_LEVEL_INFO; 117 | nvmpictx *ctx=new nvmpictx; 118 | ctx->index=0; 119 | ctx->width=param->width; 120 | ctx->height=param->height; 121 | ctx->enableLossless=false; 122 | ctx->bitrate=param->bitrate; 123 | ctx->ratecontrol = V4L2_MPEG_VIDEO_BITRATE_MODE_CBR; 124 | ctx->idr_interval = param->idr_interval; 125 | ctx->fps_n = param->fps_n; 126 | ctx->fps_d = param->fps_d; 127 | ctx->iframe_interval = param->iframe_interval; 128 | ctx->packet_pools=new std::queue; 129 | ctx->buf_index=0; 130 | ctx->enable_extended_colorformat=false; 131 | ctx->packets_num=param->capture_num; 132 | ctx->qmax=param->qmax; 133 | ctx->qmin=param->qmin; 134 | ctx->num_b_frames=param->max_b_frames; 135 | ctx->num_reference_frames=param->refs; 136 | ctx->insert_sps_pps_at_idr=(param->insert_spspps_idr==1)?true:false; 137 | 138 | switch(param->profile){ 139 | case 77://FF_PROFILE_H264_MAIN 140 | ctx->profile=V4L2_MPEG_VIDEO_H264_PROFILE_MAIN; 141 | break; 142 | case 66://FF_PROFILE_H264_BASELINE 143 | ctx->profile=V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE; 144 | break; 145 | case 100://FF_PROFILE_H264_HIGH 146 | ctx->profile=V4L2_MPEG_VIDEO_H264_PROFILE_HIGH; 147 | break; 148 | 149 | default: 150 | ctx->profile=V4L2_MPEG_VIDEO_H264_PROFILE_MAIN; 151 | break; 152 | 153 | } 154 | 155 | switch(param->level){ 156 | case 10: 157 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_1_0; 158 | break; 159 | case 11: 160 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_1_1; 161 | break; 162 | case 12: 163 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_1_2; 164 | break; 165 | case 13: 166 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_1_3; 167 | break; 168 | case 20: 169 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_2_0; 170 | break; 171 | case 21: 172 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_2_1; 173 | break; 174 | case 22: 175 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_2_2; 176 | break; 177 | case 30: 178 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_3_0; 179 | break; 180 | case 31: 181 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_3_1; 182 | break; 183 | case 32: 184 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_3_2; 185 | break; 186 | case 40: 187 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_4_0; 188 | break; 189 | case 41: 190 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_4_1; 191 | break; 192 | case 42: 193 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_4_2; 194 | break; 195 | case 50: 196 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_5_0; 197 | break; 198 | case 51: 199 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_5_1; 200 | break; 201 | default: 202 | ctx->level=V4L2_MPEG_VIDEO_H264_LEVEL_5_1; 203 | break; 204 | } 205 | 206 | switch(param->hw_preset_type){ 207 | case 1: 208 | ctx->hw_preset_type = V4L2_ENC_HW_PRESET_ULTRAFAST; 209 | break; 210 | case 2: 211 | ctx->hw_preset_type = V4L2_ENC_HW_PRESET_FAST; 212 | break; 213 | case 3: 214 | ctx->hw_preset_type = V4L2_ENC_HW_PRESET_MEDIUM; 215 | break; 216 | case 4: 217 | ctx->hw_preset_type = V4L2_ENC_HW_PRESET_SLOW; 218 | break; 219 | default: 220 | ctx->hw_preset_type = V4L2_ENC_HW_PRESET_MEDIUM; 221 | break; 222 | 223 | } 224 | 225 | 226 | 227 | if(param->enableLossless) 228 | ctx->enableLossless=true; 229 | 230 | if(param->mode_vbr) 231 | ctx->ratecontrol=V4L2_MPEG_VIDEO_BITRATE_MODE_VBR; 232 | 233 | ctx->packets_buf_size=CHUNK_SIZE; 234 | 235 | for(int index=0;indexpackets[index]=new unsigned char[ctx->packets_buf_size]; 237 | 238 | if(codingType==NV_VIDEO_CodingH264){ 239 | ctx->encoder_pixfmt=V4L2_PIX_FMT_H264; 240 | }else if(codingType==NV_VIDEO_CodingHEVC){ 241 | ctx->encoder_pixfmt=V4L2_PIX_FMT_H265; 242 | } 243 | ctx->enc=NvVideoEncoder::createVideoEncoder("enc0"); 244 | TEST_ERROR(!ctx->enc, "Could not create encoder",ret); 245 | 246 | ret = ctx->enc->setCapturePlaneFormat(ctx->encoder_pixfmt, ctx->width,ctx->height, CHUNK_SIZE); 247 | 248 | TEST_ERROR(ret < 0, "Could not set output plane format", ret); 249 | 250 | switch (ctx->profile) 251 | { 252 | case V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10: 253 | ctx->raw_pixfmt = V4L2_PIX_FMT_P010M; 254 | break; 255 | case V4L2_MPEG_VIDEO_H265_PROFILE_MAIN: 256 | default: 257 | ctx->raw_pixfmt = V4L2_PIX_FMT_YUV420M; 258 | } 259 | 260 | if (ctx->enableLossless && codingType == NV_VIDEO_CodingH264) 261 | { 262 | ctx->profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE; 263 | ret = ctx->enc->setOutputPlaneFormat(V4L2_PIX_FMT_YUV444M, ctx->width,ctx->height); 264 | } 265 | else{ 266 | ret = ctx->enc->setOutputPlaneFormat(ctx->raw_pixfmt, ctx->width,ctx->height); 267 | } 268 | 269 | TEST_ERROR(ret < 0, "Could not set output plane format", ret); 270 | 271 | ret = ctx->enc->setBitrate(ctx->bitrate); 272 | TEST_ERROR(ret < 0, "Could not set encoder bitrate", ret); 273 | 274 | ret=ctx->enc->setHWPresetType(ctx->hw_preset_type); 275 | TEST_ERROR(ret < 0, "Could not set encoder HW Preset Type", ret); 276 | 277 | if(ctx->num_reference_frames){ 278 | ret = ctx->enc->setNumReferenceFrames(ctx->num_reference_frames); 279 | TEST_ERROR(ret < 0, "Could not set num reference frames", ret); 280 | } 281 | 282 | if(ctx->num_b_frames != (uint32_t) -1 && codingType == NV_VIDEO_CodingH264 ){ 283 | ret = ctx->enc->setNumBFrames(ctx->num_b_frames); 284 | TEST_ERROR(ret < 0, "Could not set number of B Frames", ret); 285 | } 286 | 287 | 288 | if (codingType == NV_VIDEO_CodingH264 || codingType == NV_VIDEO_CodingHEVC) 289 | { 290 | ret = ctx->enc->setProfile(ctx->profile); 291 | TEST_ERROR(ret < 0, "Could not set encoder profile", ret); 292 | } 293 | 294 | if( codingType== NV_VIDEO_CodingH264){ 295 | ret = ctx->enc->setLevel(ctx->level); 296 | TEST_ERROR(ret < 0, "Could not set encoder level", ret); 297 | } 298 | 299 | 300 | if (ctx->enableLossless){ 301 | ret = ctx->enc->setConstantQp(0); 302 | TEST_ERROR(ret < 0, "Could not set encoder constant qp=0", ret); 303 | 304 | }else{ 305 | 306 | ret = ctx->enc->setRateControlMode(ctx->ratecontrol); 307 | TEST_ERROR(ret < 0, "Could not set encoder rate control mode", ret); 308 | 309 | if (ctx->ratecontrol == V4L2_MPEG_VIDEO_BITRATE_MODE_VBR){ 310 | 311 | uint32_t peak_bitrate; 312 | if (ctx->peak_bitrate < ctx->bitrate) 313 | peak_bitrate = 1.2f * ctx->bitrate; 314 | else 315 | peak_bitrate = ctx->peak_bitrate; 316 | ret = ctx->enc->setPeakBitrate(peak_bitrate); 317 | TEST_ERROR(ret < 0, "Could not set encoder peak bitrate", ret); 318 | } 319 | } 320 | 321 | ret = ctx->enc->setIDRInterval(ctx->idr_interval); 322 | TEST_ERROR(ret < 0, "Could not set encoder IDR interval", ret); 323 | 324 | if(ctx->qmax>0 ||ctx->qmin >0){ 325 | ctx->enc->setQpRange(ctx->qmin, ctx->qmax, ctx->qmin,ctx->qmax, ctx->qmin, ctx->qmax); 326 | } 327 | ret = ctx->enc->setIFrameInterval(ctx->iframe_interval); 328 | TEST_ERROR(ret < 0, "Could not set encoder I-Frame interval", ret); 329 | 330 | if(ctx->insert_sps_pps_at_idr){ 331 | ret = ctx->enc->setInsertSpsPpsAtIdrEnabled(true); 332 | TEST_ERROR(ret < 0, "Could not set insertSPSPPSAtIDR", ret); 333 | } 334 | 335 | ret = ctx->enc->setFrameRate(ctx->fps_n, ctx->fps_d); 336 | TEST_ERROR(ret < 0, "Could not set framerate", ret); 337 | 338 | ret = ctx->enc->output_plane.setupPlane(V4L2_MEMORY_USERPTR, ctx->packets_num, false, true); 339 | TEST_ERROR(ret < 0, "Could not setup output plane", ret); 340 | 341 | ret = ctx->enc->capture_plane.setupPlane(V4L2_MEMORY_MMAP, ctx->packets_num, true, false); 342 | TEST_ERROR(ret < 0, "Could not setup capture plane", ret); 343 | 344 | ret = ctx->enc->subscribeEvent(V4L2_EVENT_EOS,0,0); 345 | TEST_ERROR(ret < 0, "Could not subscribe EOS event", ret); 346 | 347 | ret = ctx->enc->output_plane.setStreamStatus(true); 348 | TEST_ERROR(ret < 0, "Error in output plane streamon", ret); 349 | 350 | ret = ctx->enc->capture_plane.setStreamStatus(true); 351 | TEST_ERROR(ret < 0, "Error in capture plane streamon", ret); 352 | 353 | 354 | ctx->enc->capture_plane.setDQThreadCallback(encoder_capture_plane_dq_callback); 355 | 356 | ctx->enc->capture_plane.startDQThread(ctx); 357 | 358 | // Enqueue all the empty capture plane buffers 359 | for (uint32_t i = 0; i < ctx->enc->capture_plane.getNumBuffers(); i++){ 360 | struct v4l2_buffer v4l2_buf; 361 | struct v4l2_plane planes[MAX_PLANES]; 362 | memset(&v4l2_buf, 0, sizeof(v4l2_buf)); 363 | memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane)); 364 | 365 | v4l2_buf.index = i; 366 | v4l2_buf.m.planes = planes; 367 | 368 | ret = ctx->enc->capture_plane.qBuffer(v4l2_buf, NULL); 369 | TEST_ERROR(ret < 0, "Error while queueing buffer at capture plane", ret); 370 | 371 | } 372 | 373 | return ctx; 374 | } 375 | 376 | 377 | int nvmpi_encoder_put_frame(nvmpictx* ctx,nvFrame* frame){ 378 | int ret; 379 | 380 | struct v4l2_buffer v4l2_buf; 381 | struct v4l2_plane planes[MAX_PLANES]; 382 | NvBuffer *nvBuffer; 383 | 384 | memset(&v4l2_buf, 0, sizeof(v4l2_buf)); 385 | memset(planes, 0, sizeof(planes)); 386 | 387 | v4l2_buf.m.planes = planes; 388 | 389 | if(ctx->enc->isInError()) 390 | return -1; 391 | 392 | if(ctx->index < ctx->enc->output_plane.getNumBuffers()){ 393 | 394 | nvBuffer=ctx->enc->output_plane.getNthBuffer(ctx->index); 395 | v4l2_buf.index = ctx->index ; 396 | ctx->index++; 397 | 398 | }else{ 399 | ret = ctx->enc->output_plane.dqBuffer(v4l2_buf, &nvBuffer, NULL, -1); 400 | if (ret < 0) { 401 | cout << "Error DQing buffer at output plane" << std::endl; 402 | return false; 403 | } 404 | 405 | } 406 | 407 | memcpy(nvBuffer->planes[0].data,frame->payload[0],frame->payload_size[0]); 408 | memcpy(nvBuffer->planes[1].data,frame->payload[1],frame->payload_size[1]); 409 | memcpy(nvBuffer->planes[2].data,frame->payload[2],frame->payload_size[2]); 410 | nvBuffer->planes[0].bytesused=frame->payload_size[0]; 411 | nvBuffer->planes[1].bytesused=frame->payload_size[1]; 412 | nvBuffer->planes[2].bytesused=frame->payload_size[2]; 413 | 414 | v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY; 415 | v4l2_buf.timestamp.tv_usec = frame->timestamp % 1000000; 416 | v4l2_buf.timestamp.tv_sec = frame->timestamp / 1000000; 417 | 418 | ret = ctx->enc->output_plane.qBuffer(v4l2_buf, NULL); 419 | TEST_ERROR(ret < 0, "Error while queueing buffer at output plane", ret); 420 | 421 | return 0; 422 | } 423 | 424 | int nvmpi_encoder_get_packet(nvmpictx* ctx,nvPacket* packet){ 425 | 426 | int ret,packet_index; 427 | 428 | if(ctx->packet_pools->empty()) 429 | return -1; 430 | 431 | packet_index= ctx->packet_pools->front(); 432 | 433 | auto ts = ctx->timestamp[packet_index]; 434 | auto size = ctx->packets_size[packet_index]; 435 | if((ts > 0) && (size == 0)) // Old packet, but 0-0 skip! 436 | { 437 | return -1; 438 | } 439 | 440 | packet->payload=ctx->packets[packet_index]; 441 | packet->pts=ts; 442 | 443 | packet->payload_size=size; 444 | if(ctx->packets_keyflag[packet_index]) 445 | packet->flags|= 0x0001;//AV_PKT_FLAG_KEY 0x0001 446 | ctx->packets_size[packet_index] = 0; // mark as readed 447 | ctx->packet_pools->pop(); 448 | return 0; 449 | } 450 | 451 | int nvmpi_encoder_close(nvmpictx* ctx){ 452 | 453 | ctx->enc->capture_plane.stopDQThread(); 454 | ctx->enc->capture_plane.waitForDQThread(1000); 455 | delete ctx->enc; 456 | delete ctx->packet_pools; 457 | delete ctx; 458 | } 459 | 460 | --------------------------------------------------------------------------------