├── CMakeLists.txt ├── src ├── rgb2yuv │ ├── rgb2yuv.cpp │ └── rgb2yuv.h └── rtsp │ ├── DD_H264VideoFileServerMediaSubsession.cpp │ ├── RTSP.cpp │ ├── StreamEncoder.cpp │ ├── demo.cpp │ ├── include │ ├── DD_H264VideoFileServerMediaSubsession.hh │ ├── RTSP.hh │ └── StreamEncoder.hh │ └── testOnDemandRTSPServer.cpp └── 收流结果.mp4 /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | PROJECT(ZSXL) 2 | CMAKE_MINIMUM_REQUIRED(VERSION 3.5.1) 3 | set(CMAKE_CXX_STANDARD 14) 4 | if(COMMAND cmake_policy) 5 | cmake_policy(SET CMP0003 NEW) 6 | endif(COMMAND cmake_policy) 7 | 8 | set(CMAKE_BUILD_TYPE "Debug") 9 | # set(CMAKE_BUILD_TYPE "Release") 10 | 11 | set(RTSP_SRCS 12 | 13 | ${CMAKE_CURRENT_SOURCE_DIR}/src/rtsp/DD_H264VideoFileServerMediaSubsession.cpp 14 | ${CMAKE_CURRENT_SOURCE_DIR}/src/rtsp/StreamEncoder.cpp 15 | ${CMAKE_CURRENT_SOURCE_DIR}/src/rtsp/testOnDemandRTSPServer.cpp 16 | ${CMAKE_CURRENT_SOURCE_DIR}/src/rtsp/RTSP.cpp 17 | ${CMAKE_CURRENT_SOURCE_DIR}/src/rtsp/demo.cpp 18 | ${CMAKE_CURRENT_SOURCE_DIR}/src/rgb2yuv/rgb2yuv.cpp 19 | ) 20 | # opencv 21 | find_package(OpenCV REQUIRED) 22 | message(STATUS ${OpenCV_LIBRARY_DIRS}) 23 | #include 24 | include_directories( 25 | ${OpenCV_INCLUDE_DIRS} 26 | /usr/include 27 | /usr/include/ffmpeg 28 | /usr/include/groupsock 29 | /usr/include/liveMedia 30 | /usr/include/BasicUsageEnvironment 31 | /usr/include/UsageEnvironment 32 | ) 33 | 34 | 35 | #exe 36 | # add_executable(zsxl ${SRCS}) 37 | # target_link_libraries(zsxl ${OpenCV_LIBS} pthread ) 38 | 39 | LINK_DIRECTORIES(/usr/lib64) 40 | add_executable(rtsp ${RTSP_SRCS}) 41 | target_link_libraries(rtsp ${OpenCV_LIBS} pthread -lrockchip_mpp -lrockchip_rga swscale avformat avcodec UsageEnvironment groupsock BasicUsageEnvironment liveMedia) 42 | -------------------------------------------------------------------------------- /src/rgb2yuv/rgb2yuv.cpp: -------------------------------------------------------------------------------- 1 | #include "rgb2yuv.h" 2 | 3 | Rgb2YUV::Rgb2YUV() 4 | { 5 | table_init(); 6 | } 7 | 8 | //表的初始化 9 | void Rgb2YUV::table_init() 10 | { 11 | int i; 12 | for(i = 0; i < COLORSIZE; i++) 13 | { 14 | Y_R[i] = (i * 1224) >> 12; //Y对应的查表数组0.2988 15 | Y_G[i] = (i * 2404) >> 12; //0.5869 16 | Y_B[i] = (i * 469) >> 12; //0.1162 17 | U_R[i] = (i * 692) >> 12; //U对应的查表数组0.1688 18 | U_G[i] = (i * 1356) >> 12; //0.3312 19 | U_B[i] = i /*(* 2048) */>> 1; //0.5 20 | // V_R[i] = (i * 2048) >> 12; ///V对应的查表数组 21 | V_G[i] = (i * 1731) >> 12; //0.4184 22 | V_B[i] = (i * 334) >> 12; //0.0816 23 | } 24 | } 25 | 26 | void Rgb2YUV::RGB2YUV420(uint8_t *rgbBufIn, uint8_t *yuvBufOut , int nWidth, int nHeight) 27 | { 28 | int pix = 0; 29 | int pixP4; 30 | 31 | RGB *in = (RGB *)rgbBufIn; //需要计算的原始数据 32 | // unsigned char out[XSIZE*YSIZE * 3/2]; //计算后的结果 33 | 34 | int IMGSIZE = nWidth * nHeight; 35 | 36 | for(int y = 0; y < nHeight; y++) //line 37 | { 38 | for(int x=0;x < nWidth;x++)//pixf 39 | { 40 | 41 | // RGB rgbByte = in[pix]; (这样取的数据经过转换后是一个垂直倒立的图像) 42 | RGB rgbByte = in[(nHeight-y-1)*nWidth+x]; //取得垂直方向上镜像的位置即可解决倒立问题 43 | 44 | //首先执行颜色互换 -- 没有这个的话 得到的YUV图像颜色不对 45 | uint8_t temp = rgbByte.r; //顺序调整 46 | rgbByte.r = rgbByte.b; 47 | rgbByte.b = temp; 48 | 49 | int i = Y_R[rgbByte.r] + Y_G[rgbByte.g] + Y_B[rgbByte.b]; 50 | yuvBufOut[pix]= i; //Y 51 | if((x%2==1)&&(y%2==1)) 52 | { 53 | pixP4=(nWidth>>1) *(y>>1) + (x>>1); 54 | i=U_B[rgbByte.b] - U_R[rgbByte.r] - U_G[rgbByte.g]+128; 55 | 56 | yuvBufOut[pixP4+IMGSIZE] = i; 57 | /*+ U_B[in[pix+1].b] - U_R[in[pix+1].r] - U_G[in[pix+1].g] 58 | +U_B[in[pix+XSIZE].b] - U_R[in[pix+XSIZE].r] - U_G[in[pix+XSIZE].g] 59 | +U_B[in[pix+1+XSIZE].b] - U_R[in[pix+1+XSIZE].r] - U_G[in[pix+1+XSIZE].g] )/4*/ 60 | //U 61 | i=U_B[rgbByte.r] - V_G[rgbByte.g] - V_B[rgbByte.b]+128; 62 | 63 | yuvBufOut[pixP4 + 5 * IMGSIZE /4] = i; 64 | /*+U_B[in[pix+1].r] - V_G[in[pix+1].g] - V_B[in[pix+1].b] 65 | +U_B[in[pix+XSIZE].r] - V_G[in[pix+XSIZE].g] - V_B[in[pix+XSIZE].b] 66 | +U_B[in[pix+1+XSIZE].r] - V_G[in[pix+1+XSIZE].g] - V_B[in[pix+1+XSIZE].b])/4*/ 67 | //V 68 | } 69 | 70 | pix++; 71 | } 72 | 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/rgb2yuv/rgb2yuv.h: -------------------------------------------------------------------------------- 1 | #ifndef RGB2YUV_H 2 | #define RGB2YUV_H 3 | 4 | typedef unsigned char uint8_t; 5 | 6 | #define COLORSIZE 256 7 | 8 | class Rgb2YUV 9 | { 10 | public: 11 | 12 | typedef struct RGB 13 | { 14 | unsigned char r; 15 | unsigned char g; 16 | unsigned char b; 17 | }RGB; 18 | 19 | Rgb2YUV(); 20 | 21 | /** 22 | * @brief RGB2YUV420 23 | * @param rgbBufIn rgb数据输入 24 | * @param yuvBufOut yuv数据输出 25 | * @param nWidth 26 | * @param nHeight 27 | */ 28 | void RGB2YUV420(uint8_t* rgbBufIn, uint8_t* yuvBufOut, int nWidth, int nHeight); 29 | 30 | private: 31 | unsigned short Y_R[COLORSIZE],Y_G[COLORSIZE],Y_B[COLORSIZE],U_R[COLORSIZE],U_G[COLORSIZE],U_B[COLORSIZE],V_G[COLORSIZE],V_B[COLORSIZE]; //查表数组V_R[COLORSIZE] 32 | void table_init(); 33 | 34 | }; 35 | 36 | #endif // RGB2YUV_H 37 | -------------------------------------------------------------------------------- /src/rtsp/DD_H264VideoFileServerMediaSubsession.cpp: -------------------------------------------------------------------------------- 1 | /********** 2 | This library is free software; you can redistribute it and/or modify it under 3 | the terms of the GNU Lesser General Public License as published by the 4 | Free Software Foundation; either version 2.1 of the License, or (at your 5 | option) any later version. (See .) 6 | 7 | This library is distributed in the hope that it will be useful, but WITHOUT 8 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 9 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for 10 | more details. 11 | 12 | You should have received a copy of the GNU Lesser General Public License 13 | along with this library; if not, write to the Free Software Foundation, Inc., 14 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | **********/ 16 | // "liveMedia" 17 | // Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. 18 | // A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s 19 | // on demand, from a H264 video file. 20 | // Implementation 21 | 22 | #include "include/DD_H264VideoFileServerMediaSubsession.hh" 23 | #include "H264VideoRTPSink.hh" 24 | #include "include/StreamEncoder.hh" 25 | #include "include/RTSP.hh" 26 | #include "H264VideoStreamFramer.hh" 27 | #include 28 | 29 | DD_H264VideoFileServerMediaSubsession* 30 | DD_H264VideoFileServerMediaSubsession::createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource) 31 | { 32 | return new DD_H264VideoFileServerMediaSubsession(env, fileName, reuseFirstSource); 33 | } 34 | 35 | DD_H264VideoFileServerMediaSubsession::DD_H264VideoFileServerMediaSubsession(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource) 36 | : OnDemandServerMediaSubsession(env, reuseFirstSource), 37 | fAuxSDPLine(NULL),camera(NULL) 38 | { 39 | 40 | } 41 | 42 | DD_H264VideoFileServerMediaSubsession::~DD_H264VideoFileServerMediaSubsession() 43 | { 44 | delete[] fAuxSDPLine; 45 | } 46 | 47 | char const* DD_H264VideoFileServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) 48 | { 49 | char const* auxSDPLine = rtpSink->auxSDPLine(); 50 | if (auxSDPLine != NULL) 51 | { 52 | return auxSDPLine; 53 | } 54 | else 55 | { 56 | char const* fmtpFmt = "a=fmtp:%d packetization-mode=1" 57 | ";profile-level-id=000000" 58 | ";sprop-parameter-sets=H264\r\n"; 59 | 60 | unsigned fmtpFmtSize = strlen(fmtpFmt)+3/* max char len */; 61 | 62 | char* fmtp = new char[fmtpFmtSize]; 63 | delete[] fAuxSDPLine; 64 | //缺少下面这一句,调了3天,F**K!! 65 | memcpy(fmtp,fmtpFmt,fmtpFmtSize*sizeof(char)); 66 | fAuxSDPLine = fmtp; 67 | return fAuxSDPLine; 68 | } 69 | } 70 | 71 | FramedSource* DD_H264VideoFileServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) 72 | { 73 | std::cout << "createNewStreamSource!\n" << std::endl; 74 | RTSPFramedSource *rtsp_; 75 | 76 | estBitrate = 10000; // kbps, estimate 77 | 78 | rtsp_ = new RTSPFramedSource(envir()); 79 | if(rtsp_==NULL) 80 | return NULL; 81 | 82 | if(camera!=NULL) 83 | { 84 | camera=NULL; 85 | } 86 | camera=rtsp_; 87 | 88 | StreamEncoder* fileSource = StreamEncoder::createNew(envir(),rtsp_,1000000); 89 | if (fileSource == NULL) 90 | return NULL; 91 | std::cout << "111!\n" << std::endl; 92 | FramedSource*tmp=H264VideoStreamFramer::createNew(envir(), fileSource); 93 | std::cout << "111!\n" << std::endl; 94 | return tmp; 95 | } 96 | 97 | RTPSink* DD_H264VideoFileServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, 98 | unsigned char rtpPayloadTypeIfDynamic, 99 | FramedSource* /*inputSource*/) 100 | { 101 | return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); 102 | } 103 | 104 | void DD_H264VideoFileServerMediaSubsession::startStream(unsigned clientSessionId, 105 | void* streamToken, 106 | TaskFunc* rtcpRRHandler, 107 | void* rtcpRRHandlerClientData, 108 | unsigned short& rtpSeqNum, 109 | unsigned& rtpTimestamp, 110 | ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, 111 | void* serverRequestAlternativeByteHandlerClientData) 112 | { 113 | std::cout << "Subsession::startStream!\n" << std::endl; 114 | StreamState* streamState = (StreamState*)streamToken; 115 | 116 | OnDemandServerMediaSubsession::startStream(clientSessionId,streamToken,rtcpRRHandler, 117 | rtcpRRHandlerClientData,rtpSeqNum,rtpTimestamp,serverRequestAlternativeByteHandler, 118 | serverRequestAlternativeByteHandlerClientData); 119 | } 120 | 121 | void DD_H264VideoFileServerMediaSubsession::pauseStream(unsigned /*clientSessionId*/, void* streamToken) 122 | { 123 | StreamState* streamState = (StreamState*)streamToken; 124 | 125 | OnDemandServerMediaSubsession::pauseStream(0,streamToken); 126 | } 127 | -------------------------------------------------------------------------------- /src/rtsp/RTSP.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include "include/RTSP.hh" 13 | #include "../rgb2yuv/rgb2yuv.h" 14 | #include 15 | #include 16 | #include 17 | 18 | extern "C" { 19 | #include 20 | } 21 | 22 | using namespace std; 23 | 24 | 25 | unsigned long get_time(void) 26 | { 27 | struct timeval ts; 28 | gettimeofday(&ts, NULL); 29 | return (ts.tv_sec * 1000 + ts.tv_usec / 1000); 30 | } 31 | 32 | RTSPFramedSource::RTSPFramedSource(UsageEnvironment& env): 33 | FramedSource(env), 34 | white_pos(0), 35 | fOutputBuffer(NULL), 36 | fMaxOfFrameToSend(1),fTotOfFrameToSend(0) 37 | { 38 | } 39 | 40 | inline void RTSPFramedSource::registerOutputInterest(void) { 41 | fOutputBuffer=fTo; 42 | fTotOfFrameToSend=0;//缓冲中的图像帧数 43 | } 44 | 45 | inline void RTSPFramedSource::reset(void) 46 | { 47 | fOutputBuffer=NULL; 48 | fTotOfFrameToSend=0;//缓冲中的图像帧数 49 | } 50 | 51 | inline void RTSPFramedSource::convernt_to_OutputBuffer(void) 52 | { 53 | static int frame_size = 0; 54 | frame_size = 640 * 480 * 1.5*sizeof(unsigned char); 55 | #if 0 //这一段是模拟推YUYV 56 | 57 | int line_size= 640 * 2*sizeof(unsigned char); 58 | memset(fOutputBuffer, 0,480*line_size); 59 | if(white_pos>479) white_pos=0; 60 | memset(fOutputBuffer+line_size*white_pos++, 255,line_size); 61 | #else//下面模拟推mat 62 | int line_size= 640 * 3*sizeof(unsigned char); 63 | cv::Mat tmp; 64 | tmp=cv::Mat::zeros(640,480,CV_8UC3); 65 | if(white_pos>479) white_pos=0; 66 | memset(tmp.data+line_size*white_pos++, 255,line_size); 67 | #endif 68 | 69 | unsigned char *frame_nv12 = NULL; 70 | frame_nv12 = (unsigned char *)malloc(frame_size); 71 | if (!frame_nv12) 72 | return; 73 | #ifdef USE_RGA 74 | RockchipRga *rga; 75 | rga = RgaCreate(); 76 | if (!rga) { 77 | std::cout << "rgaCreate error!\n" << std::endl; 78 | return; 79 | } 80 | // std::cout << "convernt_to_OutputBuffer!\n" << std::endl; 81 | rga->ops->initCtx(rga); 82 | rga->ops->setRotate(rga, RGA_ROTATE_NONE); 83 | // rga->ops->setSrcFormat(rga, V4L2_PIX_FMT_YUYV, 640, 480); 84 | // rga->ops->setSrcFormat(rga, V4L2_PIX_FMT_YUYV, 640, 480);//推YUYV,v4l2 85 | rga->ops->setSrcFormat(rga, V4L2_PIX_FMT_RGB24, 640, 480);//推mat 86 | rga->ops->setDstFormat(rga, V4L2_PIX_FMT_NV12, 640, 480); 87 | // cv::imwrite("../1.jpg",tmp); 88 | 89 | rga->ops->setDstBufferPtr(rga, frame_nv12); 90 | // rga->ops->setSrcBufferPtr(rga, (unsigned char *) camera_buffers[index].start);//推v4l2 91 | // rga->ops->setSrcBufferPtr(rga, (unsigned char *)fOutputBuffer);//推YUYV 92 | rga->ops->setSrcBufferPtr(rga, (unsigned char *)tmp.data);//推mat 93 | int ret = rga->ops->go(rga); 94 | if (ret) 95 | { 96 | printf("rga->ops->go fail! \n"); 97 | } 98 | RgaDestroy(rga); 99 | #else 100 | Rgb2YUV r2y; 101 | r2y.RGB2YUV420((unsigned char *)tmp.data,frame_nv12,640,480); 102 | #endif 103 | 104 | memcpy(fOutputBuffer, frame_nv12, frame_size); 105 | fOutputBuffer += frame_size; 106 | fTotOfFrameToSend++; 107 | 108 | free(frame_nv12); 109 | } 110 | 111 | void RTSPFramedSource::doGetNextFrame() 112 | { 113 | // std::cout << "doGetNextFrame!\n" << std::endl; 114 | long bigin = get_time(); 115 | 116 | registerOutputInterest(); 117 | 118 | while(fTotOfFrameToSend 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include "rockchip/rk_mpi.h" 12 | #include "rockchip/mpp_buffer.h" 13 | #include "rockchip/mpp_err.h" 14 | #include "rockchip/mpp_frame.h" 15 | #include "rockchip/mpp_meta.h" 16 | #include "rockchip/mpp_packet.h" 17 | #include "rockchip/mpp_task.h" 18 | 19 | 20 | 21 | #define MPP_ALIGN(x, a) (((x)+(a)-1)&~((a)-1)) 22 | #define mpp_malloc(type, count) \ 23 | (type*)malloc(sizeof(type) * (count)) 24 | #define mpp_calloc(type, count) \ 25 | (type*)malloc(sizeof(type) * (count)) 26 | #define mpp_free(ptr) \ 27 | free(ptr) 28 | 29 | #define MPP_FREE(ptr) do { if(ptr) mpp_free(ptr); ptr = NULL; } while (0) 30 | 31 | #define MAX_FILE_NAME_LENGTH 256 32 | 33 | export unsigned long get_time(void); 34 | 35 | typedef struct { 36 | char file_input[MAX_FILE_NAME_LENGTH]; 37 | char file_output[MAX_FILE_NAME_LENGTH]; 38 | MppCodingType type; 39 | RK_U32 width; 40 | RK_U32 height; 41 | MppFrameFormat format; 42 | RK_U32 debug; 43 | RK_U32 num_frames; 44 | 45 | RK_U32 have_input; 46 | RK_U32 have_output; 47 | } MpiEncTestCmd; 48 | 49 | typedef struct { 50 | // global flow control flag 51 | RK_U32 frm_eos; 52 | RK_U32 pkt_eos; 53 | RK_U32 frame_count; 54 | RK_U64 stream_size; 55 | 56 | // src and dst 57 | FILE *fp_input; 58 | FILE *fp_output; 59 | 60 | // base flow context 61 | MppCtx ctx; 62 | MppApi *mpi; 63 | MppEncPrepCfg prep_cfg; 64 | MppEncRcCfg rc_cfg; 65 | MppEncCodecCfg codec_cfg; 66 | 67 | // input / output 68 | MppBuffer frm_buf; 69 | MppEncSeiMode sei_mode; 70 | 71 | // paramter for resource malloc 72 | RK_U32 width; 73 | RK_U32 height; 74 | RK_U32 hor_stride; 75 | RK_U32 ver_stride; 76 | MppFrameFormat fmt; 77 | MppCodingType type; 78 | RK_U32 num_frames; 79 | 80 | // resources 81 | size_t frame_size; 82 | /* NOTE: packet buffer may overflow */ 83 | size_t packet_size; 84 | 85 | // rate control runtime parameter 86 | RK_S32 gop; 87 | RK_S32 fps; 88 | RK_S32 bps; 89 | } MpiEncTestData; 90 | 91 | MPP_RET test_ctx_init(MpiEncTestData **data, MpiEncTestCmd *cmd) 92 | { 93 | MpiEncTestData *p = NULL; 94 | MPP_RET ret = MPP_OK; 95 | 96 | if (!data || !cmd) { 97 | printf("invalid input data %p cmd %p\n", data, cmd); 98 | return MPP_ERR_NULL_PTR; 99 | } 100 | p = mpp_calloc(MpiEncTestData, 1); 101 | if (!p) { 102 | printf("create MpiEncTestData failed\n"); 103 | ret = MPP_ERR_MALLOC; 104 | goto RET; 105 | } 106 | memset(p, 0, sizeof(MpiEncTestData)); 107 | 108 | // get paramter from cmd 109 | p->width = cmd->width; 110 | p->height = cmd->height; 111 | p->hor_stride = MPP_ALIGN(cmd->width, 16); 112 | p->ver_stride = MPP_ALIGN(cmd->height, 16); 113 | p->fmt = cmd->format; 114 | p->type = cmd->type; 115 | if (cmd->type == MPP_VIDEO_CodingMJPEG) 116 | cmd->num_frames = 1; 117 | p->num_frames = cmd->num_frames; 118 | 119 | if (cmd->have_input) { 120 | p->fp_input = fopen(cmd->file_input, "rb"); 121 | if (NULL == p->fp_input) { 122 | printf("failed to open input file %s\n", cmd->file_input); 123 | printf("create default yuv image for test\n"); 124 | } 125 | } 126 | 127 | if (cmd->have_output) { 128 | p->fp_output = fopen(cmd->file_output, "w+b"); 129 | if (NULL == p->fp_output) { 130 | printf("failed to open output file %s\n", cmd->file_output); 131 | ret = MPP_ERR_OPEN_FILE; 132 | } 133 | } 134 | 135 | // update resource parameter 136 | if (p->fmt <= MPP_FMT_YUV420SP_VU) 137 | p->frame_size = p->hor_stride * p->ver_stride * 3 / 2; 138 | else if (p->fmt <= MPP_FMT_YUV422_UYVY) { 139 | // NOTE: yuyv and uyvy need to double stride 140 | p->hor_stride *= 2; 141 | p->frame_size = p->hor_stride * p->ver_stride; 142 | } else 143 | p->frame_size = p->hor_stride * p->ver_stride * 4; 144 | p->packet_size = p->width * p->height; 145 | 146 | RET: 147 | *data = p; 148 | return ret; 149 | } 150 | MPP_RET test_ctx_deinit(MpiEncTestData **data) 151 | { 152 | MpiEncTestData *p = NULL; 153 | 154 | if (!data) { 155 | printf("invalid input data %p\n", data); 156 | return MPP_ERR_NULL_PTR; 157 | } 158 | 159 | p = *data; 160 | if (p) { 161 | if (p->fp_input) { 162 | fclose(p->fp_input); 163 | p->fp_input = NULL; 164 | } 165 | if (p->fp_output) { 166 | fclose(p->fp_output); 167 | p->fp_output = NULL; 168 | } 169 | MPP_FREE(p); 170 | *data = NULL; 171 | } 172 | 173 | return MPP_OK; 174 | } 175 | MPP_RET test_mpp_setup(MpiEncTestData *p) 176 | { 177 | MPP_RET ret; 178 | MppApi *mpi; 179 | MppCtx ctx; 180 | MppEncCodecCfg *codec_cfg; 181 | MppEncPrepCfg *prep_cfg; 182 | MppEncRcCfg *rc_cfg; 183 | 184 | if (NULL == p) 185 | return MPP_ERR_NULL_PTR; 186 | 187 | mpi = p->mpi; 188 | ctx = p->ctx; 189 | codec_cfg = &p->codec_cfg; 190 | prep_cfg = &p->prep_cfg; 191 | rc_cfg = &p->rc_cfg; 192 | 193 | /* setup default parameter */ 194 | p->fps = 30; 195 | p->gop = 60; 196 | p->bps = p->width * p->height / 8 * p->fps; 197 | 198 | prep_cfg->change = MPP_ENC_PREP_CFG_CHANGE_INPUT | 199 | MPP_ENC_PREP_CFG_CHANGE_ROTATION | 200 | MPP_ENC_PREP_CFG_CHANGE_FORMAT; 201 | prep_cfg->width = p->width; 202 | prep_cfg->height = p->height; 203 | prep_cfg->hor_stride = p->hor_stride; 204 | prep_cfg->ver_stride = p->ver_stride; 205 | prep_cfg->format = p->fmt; 206 | prep_cfg->rotation = MPP_ENC_ROT_0; 207 | 208 | ret = mpi->control(ctx, MPP_ENC_SET_PREP_CFG, prep_cfg); 209 | if (ret) { 210 | printf("mpi control enc set prep cfg failed ret %d\n", ret); 211 | goto RET; 212 | } 213 | 214 | rc_cfg->change = MPP_ENC_RC_CFG_CHANGE_ALL; 215 | rc_cfg->rc_mode = MPP_ENC_RC_MODE_CBR; 216 | rc_cfg->quality = MPP_ENC_RC_QUALITY_MEDIUM; 217 | 218 | if (rc_cfg->rc_mode == MPP_ENC_RC_MODE_CBR) { 219 | /* constant bitrate has very small bps range of 1/16 bps */ 220 | rc_cfg->bps_target = p->bps; 221 | rc_cfg->bps_max = p->bps * 17 / 16; 222 | rc_cfg->bps_min = p->bps * 15 / 16; 223 | } else if (rc_cfg->rc_mode == MPP_ENC_RC_MODE_VBR) { 224 | if (rc_cfg->quality == MPP_ENC_RC_QUALITY_CQP) { 225 | /* constant QP does not have bps */ 226 | rc_cfg->bps_target = -1; 227 | rc_cfg->bps_max = -1; 228 | rc_cfg->bps_min = -1; 229 | } else { 230 | /* variable bitrate has large bps range */ 231 | rc_cfg->bps_target = p->bps; 232 | rc_cfg->bps_max = p->bps * 17 / 16; 233 | rc_cfg->bps_min = p->bps * 1 / 16; 234 | } 235 | } 236 | 237 | /* fix input / output frame rate */ 238 | rc_cfg->fps_in_flex = 0; 239 | rc_cfg->fps_in_num = p->fps; 240 | rc_cfg->fps_in_denorm = 1; 241 | rc_cfg->fps_out_flex = 0; 242 | rc_cfg->fps_out_num = p->fps; 243 | rc_cfg->fps_out_denorm = 1; 244 | 245 | rc_cfg->gop = p->gop; 246 | rc_cfg->skip_cnt = 0; 247 | 248 | //printf("mpi_enc_test bps %d fps %d gop %d\n", rc_cfg->bps_target, rc_cfg->fps_out_num, rc_cfg->gop); 249 | 250 | ret = mpi->control(ctx, MPP_ENC_SET_RC_CFG, rc_cfg); 251 | if (ret) { 252 | printf("mpi control enc set rc cfg failed ret %d\n", ret); 253 | goto RET; 254 | } 255 | 256 | codec_cfg->coding = p->type; 257 | switch (codec_cfg->coding) { 258 | case MPP_VIDEO_CodingAVC : { 259 | codec_cfg->h264.change = MPP_ENC_H264_CFG_CHANGE_PROFILE | 260 | MPP_ENC_H264_CFG_CHANGE_ENTROPY | 261 | MPP_ENC_H264_CFG_CHANGE_TRANS_8x8; 262 | /* 263 | * H.264 profile_idc parameter 264 | * 66 - Baseline profile 265 | * 77 - Main profile 266 | * 100 - High profile 267 | */ 268 | codec_cfg->h264.profile = 100; 269 | /* 270 | * H.264 level_idc parameter 271 | * 10 / 11 / 12 / 13 - qcif@15fps / cif@7.5fps / cif@15fps / cif@30fps 272 | * 20 / 21 / 22 - cif@30fps / half-D1@@25fps / D1@12.5fps 273 | * 30 / 31 / 32 - D1@25fps / 720p@30fps / 720p@60fps 274 | * 40 / 41 / 42 - 1080p@30fps / 1080p@30fps / 1080p@60fps 275 | * 50 / 51 / 52 - 4K@30fps 276 | */ 277 | codec_cfg->h264.level = 31; 278 | codec_cfg->h264.entropy_coding_mode = 1; 279 | codec_cfg->h264.cabac_init_idc = 0; 280 | codec_cfg->h264.transform8x8_mode = 1; 281 | } break; 282 | case MPP_VIDEO_CodingMJPEG : { 283 | codec_cfg->jpeg.change = MPP_ENC_JPEG_CFG_CHANGE_QP; 284 | codec_cfg->jpeg.quant = 10; 285 | } break; 286 | case MPP_VIDEO_CodingVP8 : { 287 | } break; 288 | case MPP_VIDEO_CodingHEVC : { 289 | codec_cfg->h265.change = MPP_ENC_H265_CFG_INTRA_QP_CHANGE; 290 | codec_cfg->h265.intra_qp = 26; 291 | } break; 292 | default : { 293 | printf("support encoder coding type %d\n", codec_cfg->coding); 294 | } break; 295 | } 296 | 297 | ret = mpi->control(ctx, MPP_ENC_SET_CODEC_CFG, codec_cfg); 298 | if (ret) { 299 | printf("mpi control enc set codec cfg failed ret %d\n", ret); 300 | goto RET; 301 | } 302 | 303 | /* optional */ 304 | p->sei_mode = MPP_ENC_SEI_MODE_ONE_FRAME; 305 | 306 | ret = mpi->control(ctx, MPP_ENC_SET_SEI_CFG, &p->sei_mode); 307 | if (ret) { 308 | printf("mpi control enc set sei cfg failed ret %d\n", ret); 309 | goto RET; 310 | } 311 | 312 | RET: 313 | return ret; 314 | } 315 | MPP_RET test_mpp_run(MpiEncTestData *p, void *input, void *output, int *out_size) 316 | { 317 | MPP_RET ret; 318 | MppApi *mpi; 319 | MppCtx ctx; 320 | 321 | if (NULL == p) 322 | return MPP_ERR_NULL_PTR; 323 | 324 | mpi = p->mpi; 325 | ctx = p->ctx; 326 | 327 | if (p->type == MPP_VIDEO_CodingAVC) { 328 | MppPacket packet = NULL; 329 | ret = mpi->control(ctx, MPP_ENC_GET_EXTRA_INFO, &packet); 330 | if (ret) { 331 | printf("mpi control enc get extra info failed\n"); 332 | goto RET; 333 | } 334 | 335 | /* get and write sps/pps for H.264 */ 336 | if (packet) { 337 | void *ptr = mpp_packet_get_pos(packet); 338 | size_t len = mpp_packet_get_length(packet); 339 | 340 | if (p->fp_output) 341 | fwrite(ptr, 1, len, p->fp_output); 342 | else { 343 | memcpy(output, ptr, len); 344 | *out_size = len; 345 | } 346 | 347 | 348 | packet = NULL; 349 | } 350 | } 351 | 352 | while (!p->pkt_eos) { 353 | MppFrame frame = NULL; 354 | MppPacket packet = NULL; 355 | void *buf = mpp_buffer_get_ptr(p->frm_buf); 356 | 357 | if (p->fp_input) { 358 | 359 | /*ret = read_yuv_image(buf, p->fp_input, p->width, p->height, 360 | p->hor_stride, p->ver_stride, p->fmt);*/ 361 | if (ret == MPP_NOK || feof(p->fp_input)) { 362 | printf("found last frame. feof %d\n", feof(p->fp_input)); 363 | p->frm_eos = 1; 364 | } else if (ret == MPP_ERR_VALUE) 365 | goto RET; 366 | } else { 367 | /*ret = fill_yuv_image(buf, p->width, p->height, p->hor_stride, 368 | p->ver_stride, p->fmt, p->frame_count);*/ 369 | 370 | memcpy(buf, input, p->frame_size);//hisping 371 | if (ret) 372 | goto RET; 373 | } 374 | 375 | ret = mpp_frame_init(&frame); 376 | if (ret) { 377 | printf("mpp_frame_init failed\n"); 378 | goto RET; 379 | } 380 | 381 | mpp_frame_set_width(frame, p->width); 382 | mpp_frame_set_height(frame, p->height); 383 | mpp_frame_set_hor_stride(frame, p->hor_stride); 384 | mpp_frame_set_ver_stride(frame, p->ver_stride); 385 | mpp_frame_set_fmt(frame, p->fmt); 386 | mpp_frame_set_eos(frame, p->frm_eos); 387 | 388 | if (p->fp_input && feof(p->fp_input)){ 389 | mpp_frame_set_buffer(frame, NULL); 390 | }else{ 391 | 392 | mpp_frame_set_buffer(frame, p->frm_buf); 393 | } 394 | ret = mpi->encode_put_frame(ctx, frame); 395 | if (ret) { 396 | printf("mpp encode put frame failed\n"); 397 | goto RET; 398 | } 399 | 400 | ret = mpi->encode_get_packet(ctx, &packet); 401 | if (ret) { 402 | printf("mpp encode get packet failed\n"); 403 | goto RET; 404 | } 405 | 406 | if (packet) { 407 | // write packet to file here 408 | void *ptr = mpp_packet_get_pos(packet); 409 | size_t len = mpp_packet_get_length(packet); 410 | 411 | p->pkt_eos = mpp_packet_get_eos(packet); 412 | 413 | if (p->fp_output) 414 | fwrite(ptr, 1, len, p->fp_output); 415 | else { 416 | 417 | memcpy(output + (*out_size), ptr, len); 418 | *out_size += len; 419 | }//hisping 420 | 421 | mpp_packet_deinit(&packet); 422 | #ifdef SHOW_LOG 423 | printf("encoded frame %d size %d\n", p->frame_count, len); 424 | #endif 425 | p->stream_size += len; 426 | p->frame_count++; 427 | 428 | if (p->pkt_eos) { 429 | printf("found last packet\n"); 430 | //mpp_assert(p->frm_eos); 431 | } 432 | } 433 | 434 | if (p->num_frames && p->frame_count >= p->num_frames) { 435 | #ifdef SHOW_LOG 436 | printf("encode max %d frames", p->frame_count); 437 | #endif 438 | break; 439 | } 440 | if (p->frm_eos && p->pkt_eos) 441 | break; 442 | } 443 | RET: 444 | 445 | return ret; 446 | } 447 | int mpi_enc_test(MpiEncTestCmd *cmd, void *input, void *output, int *out_size) 448 | { 449 | MPP_RET ret = MPP_OK; 450 | MpiEncTestData *p = NULL; 451 | 452 | //printf("mpi_enc_test start\n"); 453 | 454 | ret = test_ctx_init(&p, cmd); 455 | if (ret) { 456 | printf("test data init failed ret %d\n", ret); 457 | goto MPP_TEST_OUT; 458 | } 459 | 460 | ret = mpp_buffer_get(NULL, &p->frm_buf, p->frame_size); 461 | if (ret) { 462 | printf("failed to get buffer for input frame ret %d\n", ret); 463 | goto MPP_TEST_OUT; 464 | } 465 | 466 | //printf("mpi_enc_test encoder test start w %d h %d type %d\n", p->width, p->height, p->type); 467 | 468 | // encoder demo 469 | ret = mpp_create(&p->ctx, &p->mpi); 470 | if (ret) { 471 | printf("mpp_create failed ret %d\n", ret); 472 | goto MPP_TEST_OUT; 473 | } 474 | 475 | ret = mpp_init(p->ctx, MPP_CTX_ENC, p->type); 476 | if (ret) { 477 | printf("mpp_init failed ret %d\n", ret); 478 | goto MPP_TEST_OUT; 479 | } 480 | 481 | ret = test_mpp_setup(p); 482 | if (ret) { 483 | printf("test mpp setup failed ret %d\n", ret); 484 | goto MPP_TEST_OUT; 485 | } 486 | 487 | ret = test_mpp_run(p, input, output, out_size); 488 | if (ret) { 489 | printf("test mpp run failed ret %d\n", ret); 490 | goto MPP_TEST_OUT; 491 | } 492 | 493 | ret = p->mpi->reset(p->ctx); 494 | if (ret) { 495 | printf("mpi->reset failed\n"); 496 | goto MPP_TEST_OUT; 497 | } 498 | 499 | MPP_TEST_OUT: 500 | if (p->ctx) { 501 | mpp_destroy(p->ctx); 502 | p->ctx = NULL; 503 | } 504 | 505 | if (p->frm_buf) { 506 | mpp_buffer_put(p->frm_buf); 507 | p->frm_buf = NULL; 508 | } 509 | 510 | if (MPP_OK == ret) 511 | ; 512 | // printf("mpi_enc_test success total frame %d bps %lld\n", p->frame_count, (RK_U64)((p->stream_size * 8 * p->fps) / p->frame_count)); 513 | else 514 | printf("mpi_enc_test failed ret %d\n", ret); 515 | 516 | test_ctx_deinit(&p); 517 | 518 | return ret; 519 | } 520 | void StreamEncoder::flushInput() { 521 | 522 | } 523 | 524 | ////////////////////////// H264or5Fragmenter implementation //////////////////////////////// 525 | 526 | StreamEncoder *StreamEncoder::createNew(UsageEnvironment& env,FramedSource* inputSource, 527 | unsigned inputBufferMax) 528 | { 529 | return new StreamEncoder(env,inputSource,inputBufferMax); 530 | } 531 | 532 | StreamEncoder::StreamEncoder(UsageEnvironment& env, FramedSource* inputSource, unsigned inputBufferMax) 533 | : FramedFilter(env, inputSource), 534 | fOutputBufferSize(0),fOutputBuffer(NULL), 535 | fNumValidDataBytes(0),fMaxOfFrameToSend(1),fTotOfFrameToSend(0), 536 | width(640),height(480), 537 | pictureSize(0),srcfmt(AV_PIX_FMT_NV12) 538 | { 539 | 540 | pictureSize = avpicture_get_size(srcfmt, width, height); 541 | printf("pictureSize=%d\n", pictureSize); 542 | srcbuf = (unsigned char *)malloc(pictureSize); 543 | 544 | } 545 | 546 | StreamEncoder::~StreamEncoder() { 547 | printf("__StreamEncoder::H264_encode_close__\n"); 548 | free(srcbuf); 549 | } 550 | 551 | 552 | unsigned StreamEncoder::maxFrameSize() const { 553 | return 80 * 1024; 554 | } 555 | 556 | 557 | void StreamEncoder::registerInputInterest(void) { 558 | fOutputBufferSize=fMaxSize; 559 | fOutputBuffer=fTo; 560 | fTotOfFrameToSend=0; 561 | fNumValidDataBytes=0; 562 | } 563 | 564 | void StreamEncoder::reset() 565 | { 566 | fNumValidDataBytes=0; 567 | fOutputBufferSize=0; 568 | fOutputBuffer=NULL; 569 | fTotOfFrameToSend=0; 570 | } 571 | 572 | 573 | 574 | void StreamEncoder::copy_to_outputbuffer(void) 575 | { 576 | memmove(fOutputBuffer, dstbuf, dstsize); 577 | fNumValidDataBytes += dstsize; 578 | fOutputBuffer += dstsize; 579 | fTotOfFrameToSend++; 580 | //printf("__fNumValidDataBytes:%d\n",fNumValidDataBytes); 581 | //printf("dstsize:%d\n",dstsize); 582 | if(dstbuf != NULL) 583 | free(dstbuf); 584 | } 585 | 586 | 587 | void StreamEncoder::encoder_to_h264(void) 588 | { 589 | MpiEncTestCmd cmd_ctx; 590 | MpiEncTestCmd* cmd = &cmd_ctx; 591 | memset((void*)cmd, 0, sizeof(*cmd)); 592 | 593 | cmd->width = width; 594 | cmd->height = height; 595 | cmd->format = MPP_FMT_YUV420SP; 596 | cmd->type = MPP_VIDEO_CodingAVC; 597 | cmd->num_frames = 1; 598 | 599 | 600 | dstbuf = (uint8_t *)malloc(pictureSize); 601 | dstsize = 0; 602 | mpi_enc_test(cmd, srcbuf, dstbuf, &dstsize); 603 | } 604 | 605 | 606 | 607 | 608 | void StreamEncoder::continueReadProcessing1(unsigned frameSize,unsigned numTruncatedBytes, 609 | struct timeval presentationTime, 610 | unsigned durationInMicroseconds){ 611 | 612 | fNumTruncatedBytes = numTruncatedBytes; 613 | fPresentationTime = presentationTime; 614 | fDurationInMicroseconds = durationInMicroseconds; 615 | 616 | encoder_to_h264(); 617 | 618 | if(fNumValidDataBytes+dstsizecontinueReadProcessing1(frameSize,numTruncatedBytes, 636 | presentationTime, 637 | durationInMicroseconds); 638 | 639 | } 640 | 641 | void StreamEncoder::readAndProcessing(void) 642 | { 643 | fInputSource->getNextFrame(srcbuf,pictureSize, 644 | StreamEncoder::continueReadProcessing, this, 645 | FramedSource::handleClosure, this); 646 | 647 | } 648 | 649 | void StreamEncoder::doGetNextFrame() { 650 | 651 | long begin = get_time(); 652 | registerInputInterest(); 653 | readAndProcessing(); 654 | long end = get_time(); 655 | 656 | } 657 | 658 | void StreamEncoder::doStopGettingFrames() 659 | { 660 | 661 | } 662 | -------------------------------------------------------------------------------- /src/rtsp/demo.cpp: -------------------------------------------------------------------------------- 1 | #include "./include/DD_H264VideoFileServerMediaSubsession.hh" 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | rtsp_process rtsp_; 9 | void *rtsp_thread(void *threadarg) 10 | { 11 | rtsp_.process(); 12 | } 13 | int main(int argc, char *argv[]) 14 | { 15 | pthread_t threads_rtsp; 16 | //多线程 17 | pthread_create(&threads_rtsp, NULL, rtsp_thread, NULL); 18 | 19 | while(1) 20 | { 21 | //do something else 22 | ; 23 | } 24 | 25 | return 0; 26 | } -------------------------------------------------------------------------------- /src/rtsp/include/DD_H264VideoFileServerMediaSubsession.hh: -------------------------------------------------------------------------------- 1 | /********** 2 | This library is free software; you can redistribute it and/or modify it under 3 | the terms of the GNU Lesser General Public License as published by the 4 | Free Software Foundation; either version 2.1 of the License, or (at your 5 | option) any later version. (See .) 6 | 7 | This library is distributed in the hope that it will be useful, but WITHOUT 8 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 9 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for 10 | more details. 11 | 12 | You should have received a copy of the GNU Lesser General Public License 13 | along with this library; if not, write to the Free Software Foundation, Inc., 14 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | **********/ 16 | // "liveMedia" 17 | // Copyright (c) 1996-2014 Live Networks, Inc. All rights reserved. 18 | // A 'ServerMediaSubsession' object that creates new, unicast, "RTPSink"s 19 | // on demand, from a H264 Elementary Stream video file. 20 | // C++ header 21 | 22 | #ifndef _DD_H264_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH 23 | #define _DD_H264_VIDEO_FILE_SERVER_MEDIA_SUBSESSION_HH 24 | 25 | #ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH 26 | #include "OnDemandServerMediaSubsession.hh" 27 | #endif 28 | #include "RTSP.hh" 29 | #include 30 | #include 31 | class rtsp_process{ 32 | public: 33 | char eventLoopWatchVariable; 34 | UsageEnvironment* env; 35 | unsigned long get_time(void); 36 | void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms, 37 | char const* streamName, char const* inputFileName); 38 | void init(); 39 | int process(); 40 | }; 41 | 42 | class DD_H264VideoFileServerMediaSubsession: public OnDemandServerMediaSubsession { 43 | public: 44 | static DD_H264VideoFileServerMediaSubsession* 45 | createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); 46 | 47 | 48 | protected: 49 | DD_H264VideoFileServerMediaSubsession(UsageEnvironment& env, 50 | char const* fileName, Boolean reuseFirstSource); 51 | 52 | virtual ~DD_H264VideoFileServerMediaSubsession(); 53 | 54 | 55 | protected: 56 | virtual char const* getAuxSDPLine(RTPSink* rtpSink, 57 | FramedSource* inputSource); 58 | virtual FramedSource* createNewStreamSource(unsigned clientSessionId, 59 | unsigned& estBitrate); 60 | virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, 61 | unsigned char rtpPayloadTypeIfDynamic, 62 | FramedSource* inputSource); 63 | virtual void startStream(unsigned clientSessionId, void* streamToken, 64 | TaskFunc* rtcpRRHandler, 65 | void* rtcpRRHandlerClientData, 66 | unsigned short& rtpSeqNum, 67 | unsigned& rtpTimestamp, 68 | ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler, 69 | void* serverRequestAlternativeByteHandlerClientData); 70 | virtual void pauseStream(unsigned clientSessionId, void* streamToken); 71 | 72 | private: 73 | char* fAuxSDPLine; 74 | RTSPFramedSource *camera; 75 | }; 76 | 77 | #endif 78 | -------------------------------------------------------------------------------- /src/rtsp/include/RTSP.hh: -------------------------------------------------------------------------------- 1 | 2 | #ifndef RTSP_H_ 3 | #define RTSP_H_ 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | 22 | #ifndef _FRAMED_SOURCE_HH 23 | #include "FramedSource.hh" 24 | #endif 25 | 26 | extern "C" 27 | { 28 | #include 29 | #include 30 | #include "libavutil/opt.h" 31 | #include 32 | } 33 | 34 | #define CLEAR(x) memset(&(x),0,sizeof(x)) 35 | 36 | 37 | class RTSPFramedSource: public FramedSource 38 | { 39 | public: 40 | int white_pos; 41 | RTSPFramedSource(UsageEnvironment& env); 42 | 43 | protected: 44 | virtual void doGetNextFrame(); 45 | 46 | private: 47 | inline void reset(void); 48 | inline void registerOutputInterest(void); 49 | inline void convernt_to_OutputBuffer(void); 50 | 51 | unsigned char* fOutputBuffer;//缓存起始地址 52 | unsigned fMaxOfFrameToSend;//最大输出图像帧数 53 | unsigned fTotOfFrameToSend;//图像输出总帧数 54 | 55 | }; 56 | 57 | #endif /* V4L2_H_ */ 58 | -------------------------------------------------------------------------------- /src/rtsp/include/StreamEncoder.hh: -------------------------------------------------------------------------------- 1 | 2 | #ifndef _STREAM_ENCODER_HH 3 | #define _STREAM_ENCODER_HH 4 | 5 | #ifndef _FRAMED_FILTER_HH 6 | #include "FramedFilter.hh" 7 | #endif 8 | 9 | 10 | extern "C" 11 | { 12 | #include 13 | #include 14 | #include "libavutil/opt.h" 15 | #include 16 | } 17 | 18 | class StreamEncoder: public FramedFilter { 19 | public: 20 | virtual void flushInput(); 21 | static StreamEncoder *createNew(UsageEnvironment& env,FramedSource* inputSource, 22 | unsigned inputBufferMax); 23 | StreamEncoder(UsageEnvironment& env, FramedSource* inputSource,unsigned inputBufferMax); 24 | virtual ~StreamEncoder(); 25 | virtual unsigned maxFrameSize() const; 26 | 27 | private: 28 | virtual void doGetNextFrame(); 29 | virtual void doStopGettingFrames(); 30 | 31 | 32 | private: 33 | void reset(); 34 | void registerInputInterest(void); 35 | static void continueReadProcessing(void* clientData, 36 | unsigned frameSize,unsigned numTruncatedBytes, 37 | struct timeval presentationTime, 38 | unsigned durationInMicroseconds); 39 | void continueReadProcessing1(unsigned frameSize,unsigned numTruncatedBytes, 40 | struct timeval presentationTime, 41 | unsigned durationInMicroseconds); 42 | void encoder_to_h264(void); 43 | void copy_to_outputbuffer(void); 44 | void readAndProcessing(void); 45 | 46 | private: 47 | unsigned fOutputBufferSize; 48 | unsigned char* fOutputBuffer; 49 | unsigned fNumValidDataBytes; 50 | unsigned fMaxOfFrameToSend; 51 | unsigned fTotOfFrameToSend; 52 | 53 | int width; 54 | int height; 55 | unsigned int pictureSize; 56 | AVPixelFormat srcfmt; 57 | unsigned char *srcbuf; 58 | 59 | unsigned char *dstbuf; 60 | int dstsize; 61 | }; 62 | #endif 63 | -------------------------------------------------------------------------------- /src/rtsp/testOnDemandRTSPServer.cpp: -------------------------------------------------------------------------------- 1 | /********** 2 | This library is free software; you can redistribute it and/or modify it under 3 | the terms of the GNU Lesser General Public License as published by the 4 | Free Software Foundation; either version 2.1 of the License, or (at your 5 | option) any later version. (See .) 6 | 7 | This library is distributed in the hope that it will be useful, but WITHOUT 8 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 9 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for 10 | more details. 11 | 12 | You should have received a copy of the GNU Lesser General Public License 13 | along with this library; if not, write to the Free Software Foundation, Inc., 14 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | **********/ 16 | // Copyright (c) 1996-2014, Live Networks, Inc. All rights reserved 17 | // A test program that demonstrates how to stream - via unicast RTP 18 | // - various kinds of file on demand, using a built-in RTSP server. 19 | // main program 20 | // #include "../process/include.h" 21 | // #include "../menu_config/menu_config.h" 22 | // #include "../funs/funs.h" 23 | #include "./include/DD_H264VideoFileServerMediaSubsession.hh" 24 | #include 25 | #include 26 | using namespace std; 27 | // #define ACCESS_CONTROL 28 | 29 | unsigned long rtsp_process::get_time(void) 30 | { 31 | struct timeval ts; 32 | gettimeofday(&ts, NULL); 33 | return (ts.tv_sec * 1000 + ts.tv_usec / 1000); 34 | } 35 | 36 | void rtsp_process::init() 37 | { 38 | // Begin by setting up our usage environment: 39 | TaskScheduler* scheduler = BasicTaskScheduler::createNew(); 40 | env = BasicUsageEnvironment::createNew(*scheduler); 41 | Boolean reuseFirstSource = True; 42 | UserAuthenticationDatabase* authDB = NULL; 43 | #ifdef ACCESS_CONTROL 44 | // To implement client access control to the RTSP server, do the following: 45 | authDB = new UserAuthenticationDatabase; 46 | authDB->addUserRecord("admin", "1234"); // replace these with real strings 47 | // Repeat the above with each , that you wish to allow 48 | // access to the server. 49 | #endif 50 | 51 | // Create the RTSP server: 52 | RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB); 53 | if (rtspServer == NULL) { 54 | *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; 55 | exit(1); 56 | } 57 | 58 | char const* descriptionString 59 | = "Session streamed by \"testOnDemandRTSPServer\""; 60 | 61 | // Set up each of the possible streams that can be served by the 62 | // RTSP server. Each such stream is implemented using a 63 | // "ServerMediaSession" object, plus one or more 64 | // "ServerMediaSubsession" objects for each audio/video substream. 65 | // A H.264 video elementary stream: 66 | { 67 | char const* streamName = "stream"; 68 | char const* inputFileName = "test.264"; 69 | ServerMediaSession* sms 70 | = ServerMediaSession::createNew(*env, streamName, streamName, 71 | descriptionString); 72 | sms->addSubsession(DD_H264VideoFileServerMediaSubsession 73 | ::createNew(*env, inputFileName, reuseFirstSource)); 74 | rtspServer->addServerMediaSession(sms); 75 | 76 | announceStream(rtspServer, sms, streamName, inputFileName); 77 | } 78 | } 79 | int rtsp_process::process() { 80 | init(); 81 | eventLoopWatchVariable=0; 82 | env->taskScheduler().doEventLoop(&eventLoopWatchVariable); // does not return 83 | 84 | return 0; // only to prevent compiler warning 85 | } 86 | 87 | //有时候sms识别ip不上,收流地址是rtsp://ip:8554/h264ESVideoTest 88 | void rtsp_process::announceStream(RTSPServer* rtspServer, ServerMediaSession* sms, 89 | char const* streamName, char const* inputFileName) { 90 | char* url = rtspServer->rtspURL(sms); 91 | UsageEnvironment& env = rtspServer->envir(); 92 | env << "\n\"" << streamName << "\" stream, from the file \"" 93 | << inputFileName << "\"\n"; 94 | env << "Play this stream using the URL \"" << url << "\"\n"; 95 | delete[] url; 96 | } 97 | -------------------------------------------------------------------------------- /收流结果.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yijiesun/3399pro_rtsp/6ed219ddb3639b7c0ba535fe0f1aa5c4387bf71a/收流结果.mp4 --------------------------------------------------------------------------------