├── .github └── workflows │ └── cmake.yml ├── .gitignore ├── .gitmodules ├── CMakeLists.txt ├── LICENSE ├── README.md ├── capture.png ├── src ├── CMakeLists.txt ├── DummySink.cpp ├── DummySink.cpp.bak ├── DummySink.h ├── DummySink.h.bak ├── EventLoop.cpp.bak ├── EventLoop.h.bak ├── FFMpeg.cpp ├── FFMpeg.h ├── FFMpegDecoder.cpp ├── FFMpegDecoder.h ├── FFMpegEncoder.cpp ├── FFMpegEncoder.h ├── Frame.h ├── H264Decoder.cpp ├── H264Decoder.cpp.bak ├── H264Decoder.h ├── H264Decoder.h.bak ├── H264ReadCameraEncoder.cpp ├── H264ReadCameraEncoder.h ├── H264ReadScreenEncoder.cpp ├── H264ReadScreenEncoder.h ├── MediaBasicUsageEnvironment.cpp ├── MediaBasicUsageEnvironment.h ├── MediaH264MediaSink.cpp ├── MediaH264MediaSink.h ├── MediaH264VideoRTPSink.cpp ├── MediaH264VideoRTPSink.h ├── MediaRTSPClient.cpp ├── MediaRTSPClient.h ├── MediaRTSPServer.cpp ├── MediaRTSPServer.h ├── MediaRTSPSession.cpp ├── MediaRTSPSession.h ├── MediaVideoFragmenter.cpp ├── MediaVideoFragmenter.h ├── MediaVideoRTPSink.cpp ├── MediaVideoRTPSink.h ├── MediaVideoServerMediaSubsession.cpp ├── MediaVideoServerMediaSubsession.h ├── MediaVideoStreamSource.cpp ├── MediaVideoStreamSource.h ├── StreamClientState.cpp ├── StreamClientState.h ├── YUV420P_Player.cpp ├── YUV420P_Player.h ├── bs.h ├── h264_avcc.cpp ├── h264_avcc.h ├── h264_sei.cpp ├── h264_sei.h ├── h264_stream.cpp ├── h264_stream.h ├── log_utils.c ├── log_utils.h └── rtspclient_with_opengl.cpp ├── test.264 └── test └── testDecoder.cpp /.github/workflows/cmake.yml: -------------------------------------------------------------------------------- 1 | name: CMake 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | env: 10 | # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) 11 | BUILD_TYPE: Release 12 | OUT_PATH: install 13 | jobs: 14 | build: 15 | # The CMake configure and build commands are platform agnostic and should work equally well on Windows or Mac. 16 | # You can convert this to a matrix build if you need cross-platform coverage. 17 | # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v2 22 | - name: Submodule Init 23 | run: 24 | git submodule init 25 | 26 | - name: Submodule Update 27 | run: 28 | git submodule update 29 | 30 | - name: Update package 31 | run: 32 | sudo apt update 33 | 34 | - name: Install requirement package 35 | run: 36 | sudo apt install -y libxi-dev build-essential libdbus-1-dev libfontconfig1-dev libfreetype6-dev libx11-dev libgl-dev 37 | # sudo apt-get install -y mesa-common-dev 38 | # sudo apt-get install -y zlib1g-dev libqt4-opengl-dev 39 | # sudo apt-get install -y libglew-dev libglfw3-dev 40 | 41 | - name: Install OpenGL package 42 | run: 43 | sudo apt install -y libgl-dev libglu1-mesa-dev freeglut3-dev mesa-common-dev libgl1-mesa-dev libglew-dev libglfw3-dev 44 | 45 | - name: Install FFMpeg package 46 | run: 47 | sudo apt install -y ffmpeg libavcodec-dev libavutil-dev libavformat-dev libavfilter-dev libswscale-dev libavdevice-dev libavdevice-dev libavresample-dev libpostproc-dev libswresample-dev libswscale-dev 48 | 49 | - name: Install SDL2 package 50 | run: 51 | sudo apt install -y libsdl2-dev libsdl-dev libsdl-image1.2-dev 52 | 53 | 54 | - name: Configure CMake 55 | # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. 56 | # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type 57 | run: 58 | 59 | cmake -B ${{github.workspace}}/build -DCMAKE_VERBOSE_MAKEFILE=ON -DUSE_SDL2_LIBS=ON -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DCMAKE_INSTALL_PREFIX=${{env.OUT_PATH}} 60 | 61 | - name: Build 62 | # Build your program with the given configuration 63 | run: cmake --build ${{github.workspace}}/build --config ${{env.BUILD_TYPE}} 64 | 65 | - name: Test 66 | working-directory: ${{github.workspace}}/build 67 | # Execute tests defined by the CMake configuration. 68 | # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail 69 | run: ctest -C ${{env.BUILD_TYPE}} 70 | 71 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | test 2 | linux 3 | build 4 | .vs 5 | CMakeSettings.json 6 | x64 7 | win32 8 | .vscode/*.log -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "tinylib"] 2 | path = tinylib 3 | url = https://github.com/diederickh/tinylib.git 4 | [submodule "live555"] 5 | path = live555 6 | url = https://github.com/melchi45/live555.git 7 | [submodule "vcpkg"] 8 | path = vcpkg 9 | url = https://github.com/melchi45/vcpkg.git 10 | [submodule "cmake"] 11 | path = cmake 12 | url = https://github.com/melchi45/cmake.git 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # rtspclient_with_opengl 2 | RTSP Client with live555 and opengl 3 | 4 | ![test application image](https://github.com/melchi45/rtspclient_with_opengl/blob/master/capture.png?raw=true) 5 | 6 | ## Overview 7 | Vcpkg helps you get C and C++ libraries on Windows. This tool and ecosystem are currently in a preview state; your involvement is vital to its success. 8 | 9 | For short description of available commands, run `vcpkg help`. 10 | 11 | 12 | ## Prerequisites: 13 | 14 | - Windows 10, 8.1, or 7 15 | - Visual Studio 2017 or Visual Studio 2015 Update 3 16 | - Git 17 | - *Optional: CMake 3.10.2 18 | 19 | ``` 20 | set DXSDK_PATH="D:\Tools\DXSDK" 21 | set PKG_CONFIG_EXECUTABLE="D:\Tools\pkg-config\bin\pkg-config.exe" 22 | set PKG_CONFIG_PATH="D:\workspace\ffmpeg-example\vcpkg\installed\x86-windows\lib" 23 | ``` 24 | 25 | ## vcpkg 26 | 27 | about vcpkg from this url. 28 | - https://blogs.msdn.microsoft.com/vcblog/2016/09/19/vcpkg-a-tool-to-acquire-and-build-c-open-source-libraries-on-windows/ 29 | - https://docs.microsoft.com/en-us/cpp/vcpkg 30 | 31 | install vcpkg to D:\Tools folder 32 | ``` 33 | d: 34 | mkdir Tools 35 | cd D:\Tools\ 36 | ``` 37 | 38 | Clone vcpkg repository, then run 39 | ``` 40 | git clone https://github.com/Microsoft/vcpkg 41 | cd vcpkg 42 | .\bootstrap-vcpkg.bat 43 | ``` 44 | 45 | Then, to hook up user-wide integration, run (note: requires admin on first use) 46 | ``` 47 | .\vcpkg integrate install 48 | PS D:\Tools\vcpkg> .\vcpkg integrate install 49 | Applied user-wide integration for this vcpkg root. 50 | 51 | All MSBuild C++ projects can now #include any installed libraries. 52 | Linking will be handled automatically. 53 | Installing new libraries will make them instantly available. 54 | 55 | CMake projects should use: "-DCMAKE_TOOLCHAIN_FILE=D:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake" 56 | ``` 57 | 58 | Install any packages for x86(x86-windows), x64(x64-windows), arm(arm-windows), arm64(arm64-winodws) windows with 59 | ``` 60 | .\vcpkg install ffmpeg:x64-windows ffmpeg:x86-windows --recurse 61 | .\vcpkg install openssl:x86-windows openssl:x64-windows 62 | .\vcpkg install sdl2:x64-windows sdl2:x86-windows 63 | .\vcpkg install pthreads:x64-windows pthreads:x86-windows 64 | .\vcpkg install opengl:x86-windows opengl:x64-windows 65 | .\vcpkg install glew:x86-windows glew:x64-windows 66 | .\vcpkg install glfw3:x86-windows glfw3:x64-windows 67 | .\vcpkg install glad:x86-windows glad:x64-windows 68 | .\vcpkg install libpng:x86-windows libpng:x64-windows 69 | .\vcpkg install zlib:x86-windows zlib:x64-windows 70 | ``` 71 | 72 | Finally, create a New Project (or open an existing one) in Visual Studio 2017 or 2015. All installed libraries are immediately ready to be `#include`'d and used in your project. 73 | For CMake projects, simply include our toolchain file. See our [using a package](docs/examples/using-sqlite.md) example for the specifics. 74 | ## Tab-Completion / Auto-Completion 75 | `Vcpkg` supports auto-completion of commands, package names, options etc. To enable tab-completion in Powershell, use 76 | ``` 77 | .\vcpkg integrate powershell 78 | ``` 79 | and restart Powershell. 80 | 81 | Check packages list with 82 | ``` 83 | .\vcpkg list 84 | ``` 85 | 86 | ## depencency package for linux 87 | ### For OpenGL 88 | ``` 89 | sudo apt-get install -y libgl-dev 90 | sudo apt-get install -y mesa-common-dev 91 | sudo apt-get install -y libglu1-mesa-dev freeglut3-dev mesa-common-dev libgl1-mesa-dev 92 | sudo apt-get install -y libxi-dev build-essential libdbus-1-dev libfontconfig1-dev libfreetype6-dev libx11-dev 93 | sudo apt-get install -y libqt4-dev zlib1g-dev libqt4-opengl-dev 94 | sudo apt-get install -y libglew-dev libglfw3-dev 95 | ``` 96 | 97 | ### For FFMpeg 98 | ``` 99 | sudo apt-get install -y libavcodec-dev libavformat-dev libavdevice-dev 100 | sudo apt-get install -y ffmpeg libavcodec-dev libavutil-dev libavformat-dev libavfilter-dev libswscale-dev libavdevice-dev libavdevice-dev libavresample-dev libpostproc-dev libswresample-dev libswscale-dev 101 | ``` 102 | 103 | ### For SDL2 104 | ``` 105 | sudo apt-get install -y libsdl2-2.0 libsdl2-dev libsdl-dev libsdl-image1.2-dev 106 | ``` 107 | 108 | ## Build for Windows 109 | 110 | Build with IDE 111 | You have to change vcpkg path[E:\Tools\vcpkg\installed\x64-windows-static] to your environment from the this line. 112 | ``` 113 | cmake -B x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_VERBOSE_MAKEFILE=ON -DCMAKE_PREFIX_PATH="E:\Tools\vcpkg\installed\x64-windows" -DUSE_SDL2_LIBS=ON -DVCPKG_TARGET_TRIPLET=x64-windows -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=install 114 | cmake --build x64 --config Release --target install 115 | ``` 116 | 117 | Options 118 | You have to add vcpkg toolchain for ffmpeg, pthread 119 | ``` 120 | -DCMAKE_TOOLCHAIN_FILE="D:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake" 121 | ``` 122 | 123 | build type 124 | ``` 125 | -DCMAKE_BUILD_TYPE=Release 126 | ``` 127 | 128 | install path 129 | ``` 130 | -DCMAKE_INSTALL_PREFIX=install 131 | ``` 132 | 133 | build without IDE tools 134 | If you remove the -G option, it will be generate cmake build environment. 135 | ``` 136 | cmake -B windows -A x64 -DCMAKE_VERBOSE_MAKEFILE=ON -DCMAKE_PREFIX_PATH="E:\Tools\vcpkg\installed\x64-windows" -DUSE_SDL2_LIBS=ON -DVCPKG_TARGET_TRIPLET=x64-windows -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=install 137 | cmake --build windows --config Release --target install 138 | ``` 139 | 140 | ## Build for Linux 141 | ``` 142 | export OUT_PATH=./install 143 | cmake .. -G "Unix Makefiles" \ 144 | -DUSE_SDL2_LIBS=ON \ 145 | -DCMAKE_VERBOSE_MAKEFILE=ON \ 146 | -DCMAKE_BUILD_TYPE=Release \ 147 | -DCMAKE_INSTALL_PREFIX=${OUT_PATH} 148 | cmake --build linux --config Release --target ${OUT_PATH} 149 | ``` 150 | 151 | ## Executable binary 152 | How to usage 153 | 154 | -r: RTSP Camera or Video Server URL like on wowza media server rtsp://:1935/vod/sample.mp4 155 | -u: account for authentication using username and password 156 | -i: Interleaved mode for TCP channel for single port for audio, video and texture data 157 | -U: Transport upstream mode using RTSP ANNOUNCE and RECORD option. this mode support video upload to media server to media. 158 | 159 | ./rtspclient_with_opengl -r -u username password -i -Unix 160 | 161 | ``` 162 | ./rtspclient_with_opengl -r rtsp://example.com:1935/vod/sample.mp4 -u admin password 163 | ``` 164 | 165 | 166 | ## Reference 167 | http://roxlu.com/2014/039/decoding-h264-and-yuv420p-playback 168 | http://www.voidcn.com/article/p-tlcacfpn-bs.html 169 | https://github.com/sipsorcery/mediafoundationsamples/tree/master/MFWebCamRtp 170 | http://en.pudn.com/Download/item/id/1303846.html 171 | http://blog.chinaunix.net/uid-15063109-id-4482932.html 172 | https://stackoverflow.com/questions/34619418/ffmpeg-does-not-decode-some-h264-streams 173 | https://stackoverflow.com/questions/17579286/sdl2-0-alternative-for-sdl-overlay 174 | https://github.com/sipsorcery/mediafoundationsamples/blob/master/MFWebCamRtp/MFWebCamRtp.cpp 175 | https://stackoverflow.com/questions/19427576/live555-x264-stream-live-source-based-on-testondemandrtspserver 176 | 177 | H.264 Decoder using ffmpeg 178 | https://gist.github.com/roxlu/9329339 179 | https://github.com/tzyluen/h.264tzy/blob/master/cpp/H264_Decoder.cpp 180 | https://github.com/xiongziliang/ZLMediaKit 181 | https://github.com/royshil/KinectAtHomeExtension 182 | https://github.com/flowerinthenight/ffmpeg-encode-h264mp4/tree/master/H264Encoder 183 | https://codegists.com/snippet/c/h264_decodercpp_alenstar_c 184 | https://github.com/MrKepzie/openfx-io/blob/master/FFmpeg/FFmpegFile.h 185 | https://github.com/ChaoticConundrum/h264-roi/blob/master/zh264decoder.cpp 186 | https://github.com/shengbinmeng/ffmpeg-h264-dec 187 | 188 | Tinylib 189 | http://tiny-lib.readthedocs.io/en/latest/guide.html#introduction-to-tiny-lib 190 | 191 | https://www.bountysource.com/issues/46787191-can-not-find-ffmpeg-by-find_package 192 | -------------------------------------------------------------------------------- /capture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melchi45/rtspclient_with_opengl/593eb17f7549279995a870d740ee2daed23c94a4/capture.png -------------------------------------------------------------------------------- /src/CMakeLists.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melchi45/rtspclient_with_opengl/593eb17f7549279995a870d740ee2daed23c94a4/src/CMakeLists.txt -------------------------------------------------------------------------------- /src/DummySink.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "DummySink.h" 35 | 36 | #define DUMMY_SINK_RECEIVE_BUFFER_SIZE 2764800 37 | 38 | DummySink* DummySink::createNew(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId) { 39 | return new DummySink(env, subsession, streamId); 40 | } 41 | 42 | DummySink::DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId) 43 | : MediaSink(env), 44 | fSubsession(subsession) { 45 | fStreamId = strDup(streamId); 46 | fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE]; 47 | memset(fReceiveBuffer, 0, DUMMY_SINK_RECEIVE_BUFFER_SIZE); 48 | } 49 | 50 | DummySink::~DummySink() { 51 | delete[] fReceiveBuffer; 52 | delete[] fStreamId; 53 | } 54 | 55 | void DummySink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes, 56 | struct timeval presentationTime, unsigned durationInMicroseconds) { 57 | DummySink* sink = (DummySink*)clientData; 58 | sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); 59 | } 60 | 61 | // If you don't want to see debugging output for each received frame, then comment out the following line: 62 | //#define DEBUG_PRINT_EACH_RECEIVED_FRAME 1 63 | #define DEBUG_PRINT_NPT 1 64 | 65 | void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, 66 | struct timeval presentationTime, unsigned /*durationInMicroseconds*/) { 67 | // We've just received a frame of data. (Optionally) print out information about it: 68 | #ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME 69 | if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; "; 70 | envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes"; 71 | 72 | if (numTruncatedBytes > 0) { 73 | envir() << " (with " << numTruncatedBytes << " bytes truncated)"; 74 | } 75 | 76 | // calculate microseconds 77 | char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time 78 | sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec); 79 | 80 | envir() << ".\tPresentation time: " << (unsigned)presentationTime.tv_sec << "." << uSecsStr; 81 | 82 | if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) { 83 | envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized 84 | } 85 | #ifdef DEBUG_PRINT_NPT 86 | envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime); 87 | #endif 88 | envir() << "\n"; 89 | #endif 90 | 91 | // Then continue, to request the next frame of data: 92 | continuePlaying(); 93 | } 94 | 95 | Boolean DummySink::continuePlaying() { 96 | if (fSource == NULL) return False; // sanity check (should not happen) 97 | 98 | // Request the next frame of data from our input source. "afterGettingFrame()" will get called later, when it arrives: 99 | fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE, 100 | afterGettingFrame, this, 101 | onSourceClosure, this); 102 | 103 | return True; 104 | } -------------------------------------------------------------------------------- /src/DummySink.cpp.bak: -------------------------------------------------------------------------------- 1 | // Implementation of "DummySink": 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include "DummySink.h" 7 | 8 | ////////////////////////////////////////////////////////////////////////// 9 | // my variable 10 | extern std::vector data; 11 | extern std::map inds; 12 | extern int nowind; 13 | extern std::string nowstr; 14 | 15 | // Even though we're not going to be doing anything with the incoming data, we still need to receive it. 16 | // Define the size of the buffer that we'll use: 17 | #define DUMMY_SINK_RECEIVE_BUFFER_SIZE 100000 18 | 19 | DummySink* DummySink::createNew(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId) { 20 | return new DummySink(env, subsession, streamId); 21 | } 22 | 23 | DummySink::DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId) 24 | : MediaSink(env), 25 | fSubsession(subsession) { 26 | fStreamId = strDup(streamId); 27 | fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE]; 28 | 29 | ////////////////////////////////////////////////////////////////////////// 30 | // my dcde 31 | fReceiveBufferadd4 = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE + 4]; 32 | fReceiveBufferadd4[0] = 0; 33 | fReceiveBufferadd4[1] = 0; 34 | fReceiveBufferadd4[2] = 0; 35 | fReceiveBufferadd4[3] = 1; 36 | 37 | // my code 38 | ////////////////////////////////////////////////////////////////////////// 39 | 40 | } 41 | 42 | DummySink::~DummySink() { 43 | delete[] fReceiveBuffer; 44 | delete[] fStreamId; 45 | 46 | delete[] fReceiveBufferadd4; 47 | } 48 | 49 | void DummySink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes, 50 | struct timeval presentationTime, unsigned durationInMicroseconds) { 51 | DummySink* sink = (DummySink*)clientData; 52 | sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); 53 | } 54 | 55 | // If you don't want to see debugging output for each received frame, then comment out the following line: 56 | // #define DEBUG_PRINT_EACH_RECEIVED_FRAME 1 57 | 58 | ////////////////////////////////////////////////////////////////////////// 59 | // my code 60 | void DummySink::setSprop(u_int8_t const* prop, unsigned size) 61 | { 62 | u_int8_t *buf; 63 | u_int8_t *buf_start; 64 | buf = new u_int8_t[1000]; 65 | buf_start = buf + 4; 66 | 67 | buf[0] = 0; 68 | buf[1] = 0; 69 | buf[2] = 0; 70 | buf[3] = 1; 71 | memcpy(buf_start, prop, size); 72 | 73 | std::stringstream stream; 74 | for (int i = 0; i< size + 4; i++) 75 | { 76 | stream << buf[i]; 77 | } 78 | 79 | nowstr = stream.str(); 80 | data[nowind] = data[nowind] + nowstr; 81 | 82 | delete[] buf; 83 | 84 | // envir() << "after setSprop\n"; 85 | } 86 | // my code end 87 | ////////////////////////////////////////////////////////////////////////// 88 | 89 | void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, 90 | struct timeval presentationTime, unsigned /*durationInMicroseconds*/) { 91 | // We've just received a frame of data. (Optionally) print out information about it: 92 | #ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME 93 | if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; "; 94 | envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes"; 95 | if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)"; 96 | char uSecsStr[6 + 1]; // used to output the 'microseconds' part of the presentation time 97 | sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec); 98 | envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr; 99 | if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) { 100 | envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized 101 | } 102 | #ifdef DEBUG_PRINT_NPT 103 | envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime); 104 | #endif 105 | envir() << "\n"; 106 | #endif 107 | 108 | ////////////////////////////////////////////////////////////////////////// 109 | // my code 110 | if (!strcmp("video", fSubsession.mediumName()) && 111 | !strcmp("H264", fSubsession.codecName())) 112 | { 113 | if (frameSize + 4 != 0) 114 | { 115 | memcpy(fReceiveBufferadd4 + 4, fReceiveBuffer, frameSize); 116 | 117 | std::stringstream stream; 118 | for (int i = 0; i< frameSize + 4; i++) 119 | { 120 | stream << fReceiveBufferadd4[i]; 121 | } 122 | 123 | char name[256]; 124 | sprintf(name, "%s", fStreamId); 125 | int strl = strlen(name); 126 | name[strl - 1] = '\0'; 127 | nowind = inds[name]; 128 | 129 | nowstr = stream.str(); 130 | data[nowind] = data[nowind] + nowstr; 131 | } 132 | 133 | int height = fSubsession.videoHeight(); 134 | int width = fSubsession.videoWidth(); 135 | } 136 | // ,y code end 137 | ////////////////////////////////////////////////////////////////////////// 138 | 139 | // Then continue, to request the next frame of data: 140 | continuePlaying(); 141 | } 142 | 143 | Boolean DummySink::continuePlaying() { 144 | if (fSource == NULL) return False; // sanity check (should not happen) 145 | 146 | // Request the next frame of data from our input source. "afterGettingFrame()" will get called later, when it arrives: 147 | fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE, 148 | afterGettingFrame, this, 149 | onSourceClosure, this); 150 | return True; 151 | } -------------------------------------------------------------------------------- /src/DummySink.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef DUMMYSINK_H_ 35 | #define DUMMYSINK_H_ 36 | 37 | #include 38 | #include "liveMedia.hh" 39 | 40 | // Define a data sink (a subclass of "MediaSink") to receive the data for each subsession (i.e., each audio or video 'substream'). 41 | // In practice, this might be a class (or a chain of classes) that decodes and then renders the incoming audio or video. 42 | // Or it might be a "FileSink", for outputting the received data into a file (as is done by the "openRTSP" application). 43 | // In this example code, however, we define a simple 'dummy' sink that receives incoming data, but does nothing with it. 44 | class DummySink : public MediaSink { 45 | public: 46 | static DummySink* createNew(UsageEnvironment& env, 47 | MediaSubsession& subsession, // identifies the kind of data that's being received 48 | char const* streamId = NULL); // identifies the stream itself (optional) 49 | 50 | private: 51 | DummySink(UsageEnvironment& env, MediaSubsession& subsession, 52 | char const* streamId); 53 | // called only by "createNew()" 54 | virtual ~DummySink(); 55 | 56 | static void afterGettingFrame(void* clientData, unsigned frameSize, 57 | unsigned numTruncatedBytes, struct timeval presentationTime, 58 | unsigned durationInMicroseconds); 59 | void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, 60 | struct timeval presentationTime, unsigned durationInMicroseconds); 61 | 62 | private: 63 | // redefined virtual functions: 64 | virtual Boolean continuePlaying(); 65 | 66 | private: 67 | u_int8_t* fReceiveBuffer; 68 | MediaSubsession& fSubsession; 69 | char* fStreamId; 70 | }; 71 | 72 | #endif /* DUMMYSINK_H_ */ 73 | -------------------------------------------------------------------------------- /src/DummySink.h.bak: -------------------------------------------------------------------------------- 1 | #ifndef _DUMMY_SINK_HH 2 | #define _DUMMY_SINK_HH 3 | 4 | #include "liveMedia.hh" 5 | 6 | // Define a data sink (a subclass of "MediaSink") to receive the data for each subsession (i.e., each audio or video 'substream'). 7 | // In practice, this might be a class (or a chain of classes) that decodes and then renders the incoming audio or video. 8 | // Or it might be a "FileSink", for outputting the received data into a file (as is done by the "openRTSP" application). 9 | // In this example code, however, we define a simple 'dummy' sink that receives incoming data, but does nothing with it. 10 | 11 | class DummySink : public MediaSink { 12 | public: 13 | static DummySink* createNew(UsageEnvironment& env, 14 | MediaSubsession& subsession, // identifies the kind of data that's being received 15 | char const* streamId = NULL); // identifies the stream itself (optional) 16 | 17 | private: 18 | DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId); 19 | // called only by "createNew()" 20 | virtual ~DummySink(); 21 | 22 | static void afterGettingFrame(void* clientData, unsigned frameSize, 23 | unsigned numTruncatedBytes, 24 | struct timeval presentationTime, 25 | unsigned durationInMicroseconds); 26 | void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, 27 | struct timeval presentationTime, unsigned durationInMicroseconds); 28 | 29 | private: 30 | // redefined virtual functions: 31 | virtual Boolean continuePlaying(); 32 | 33 | private: 34 | u_int8_t * fReceiveBuffer; 35 | MediaSubsession& fSubsession; 36 | char* fStreamId; 37 | 38 | ////////////////////////////////////////////////////////////////////////// 39 | // my code 40 | 41 | private: //H264 42 | u_int8_t * fReceiveBufferadd4; 43 | 44 | u_int8_t const* sps; 45 | unsigned spsSize; 46 | u_int8_t const* pps; 47 | unsigned ppsSize; 48 | 49 | public: 50 | void setSprop(u_int8_t const* prop, unsigned size); 51 | // mycode end 52 | ////////////////////////////////////////////////////////////////////////// 53 | }; 54 | 55 | #endif // _DUMMY_SINK_HH -------------------------------------------------------------------------------- /src/EventLoop.cpp.bak: -------------------------------------------------------------------------------- 1 | // Implementation of "EventLoop": 2 | 3 | #include "EventLoop.h" 4 | 5 | extern bool isend; 6 | 7 | /// ////////////////////////////////////////////////////////////////////////// 8 | EventLoop::EventLoop() 9 | { 10 | 11 | } 12 | 13 | void EventLoop::doEventLoop(BasicTaskScheduler0* Basicscheduler) 14 | { // Repeatedly loop, handling readble sockets and timed events: 15 | while (isend) { 16 | //printf("zjk\n"); 17 | Basicscheduler->SingleStep(); 18 | //ADD Sth else 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/EventLoop.h.bak: -------------------------------------------------------------------------------- 1 | #ifndef _EVENT_LOOP_HH 2 | #define _EVENT_LOOP_HH 3 | 4 | #ifndef _BASIC_USAGE_ENVIRONMENT0_HH 5 | #include "BasicUsageEnvironment0.hh" 6 | #endif 7 | 8 | class EventLoop 9 | { 10 | public: 11 | EventLoop(); 12 | void doEventLoop(BasicTaskScheduler0* Basicscheduler); 13 | }; 14 | 15 | #endif // _EVENT_LOOP_HH -------------------------------------------------------------------------------- /src/FFMpeg.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _FFMPEG_H_ 35 | #define _FFMPEG_H_ 36 | 37 | // reference from 38 | // http://blog.chinaunix.net/uid-15063109-id-4482932.html 39 | 40 | #include // for std::function 41 | #include 42 | #include 43 | 44 | extern "C" { 45 | // disable warnings about badly formed documentation from ffmpeg, which don't need at all 46 | #pragma warning(disable:4635) 47 | // disable warning about conversion int64 to int32 48 | #pragma warning(disable:4244) 49 | #include 50 | #include 51 | #include 52 | #include 53 | #include 54 | 55 | #pragma comment(lib, "avutil.lib") 56 | #pragma comment(lib, "avcodec.lib") 57 | #pragma comment(lib, "avformat.lib") 58 | #pragma comment(lib, "avdevice.lib") 59 | #pragma comment(lib, "swscale.lib") 60 | } 61 | 62 | /* Fallback support for older libavcodec versions */ 63 | #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(54, 59, 100) 64 | #define AV_CODEC_ID_H264 CODEC_ID_H264 65 | #endif 66 | 67 | #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(56, 34, 2) 68 | #define AV_CODEC_FLAG_LOOP_FILTER CODEC_FLAG_LOOP_FILTER 69 | #define AV_CODEC_CAP_TRUNCATED CODEC_CAP_TRUNCATED 70 | #define AV_CODEC_FLAG_TRUNCATED CODEC_FLAG_TRUNCATED 71 | #endif 72 | 73 | #if LIBAVUTIL_VERSION_MAJOR < 52 74 | #define AV_PIX_FMT_YUV420P PIX_FMT_YUV420P 75 | #endif 76 | 77 | #define USE_LIVE555 1 78 | 79 | class Frame; 80 | class Listener 81 | { 82 | private: 83 | 84 | }; 85 | 86 | class DecodeListener : public Listener 87 | { 88 | public: 89 | virtual void onDecoded(void* frame) = 0; 90 | }; 91 | 92 | class EncodeListener : public Listener 93 | { 94 | public: 95 | virtual void onEncoded() = 0; 96 | }; 97 | 98 | #if USE_LIVE555 99 | class UsageEnvironment; 100 | #endif 101 | class FFMpeg 102 | { 103 | public: 104 | #if USE_LIVE555 105 | FFMpeg(UsageEnvironment& env); 106 | #else 107 | FFMpeg(); 108 | #endif 109 | virtual ~FFMpeg(); 110 | 111 | virtual int intialize(); 112 | virtual int finalize(); 113 | 114 | //virtual int decode(uint8_t* input, int nLen, bool bWaitIFrame = false) = 0; 115 | 116 | void setOnDecodedCallbackFunction(std::function func); 117 | void setOnEncodedCallbackFunction(std::function func); 118 | 119 | void setListener(Listener* listener) { m_plistener = listener; } 120 | 121 | void setDstWidth(int width) { dstWidth = width; } 122 | void setDstHeight(int height) { dstHeight = height; } 123 | void setRescaleSize(int width, int height) { dstWidth = width; dstHeight = height; } 124 | int getWidth() { return dstWidth; } 125 | int getHeight() { return dstHeight; } 126 | 127 | void setSWSType(int type) { sws_flags = type; } 128 | int getSWSType() { return sws_flags; } 129 | 130 | protected: 131 | int save_frame_as_jpeg(AVFrame *pframe); 132 | int save_frame_as_png(AVFrame *pframe); 133 | int save_frame_as_ppm(AVFrame *pframe); 134 | int save_frame_as_yuv420p(AVFrame *pFrame); 135 | int save_frame_as_yuv422p(AVFrame *pFrame); 136 | 137 | AVFrame* frame_rgb_yuv420p(AVFrame* pFrame); 138 | AVFrame* frame_yuv420p_rgb(AVFrame* pFrame); 139 | protected: 140 | bool m_bInit; 141 | AVCodecContext *pCodecCtx; 142 | AVFormatContext *pFormatCtx; 143 | struct SwsContext *img_convert_ctx; 144 | AVCodecID codec_id; 145 | AVFrame *pFrame; 146 | AVCodec * pCodec; 147 | AVStream* pStream; 148 | #if USE_LIVE555 149 | UsageEnvironment& fEnviron; 150 | #endif 151 | Listener* m_plistener; 152 | std::function onDecoded; 153 | std::function onEncoded; 154 | 155 | int srcWidth, dstWidth; 156 | int srcHeight, dstHeight; 157 | 158 | int frame_count; 159 | 160 | std::queue inqueue; 161 | pthread_mutex_t inqueue_mutex; 162 | std::queue outqueue; 163 | pthread_mutex_t outqueue_mutex; 164 | 165 | private: 166 | int sws_flags; 167 | }; 168 | #endif // _FFMPEG_H_ -------------------------------------------------------------------------------- /src/FFMpegDecoder.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "FFMpegDecoder.h" 35 | //#include "log_utils.h" 36 | #include "liveMedia.hh" 37 | 38 | //#define SAVE_AVFRAME_TO_JPEG 1 39 | 40 | static int sws_flags = SWS_BICUBIC; 41 | 42 | /* Fallback support for older libavcodec versions */ 43 | #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(54, 59, 100) 44 | #define AV_CODEC_ID_H264 CODEC_ID_H264 45 | #endif 46 | 47 | #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(56, 34, 2) 48 | #define AV_CODEC_FLAG_LOOP_FILTER CODEC_FLAG_LOOP_FILTER 49 | #define AV_CODEC_CAP_TRUNCATED CODEC_CAP_TRUNCATED 50 | #define AV_CODEC_FLAG_TRUNCATED CODEC_FLAG_TRUNCATED 51 | #endif 52 | 53 | #if LIBAVUTIL_VERSION_MAJOR < 52 54 | #define AV_PIX_FMT_YUV420P PIX_FMT_YUV420P 55 | #endif 56 | 57 | FFmpegDecoder::FFmpegDecoder(UsageEnvironment& env) 58 | : frame_count(0) 59 | , m_bInit(false) 60 | , img_convert_ctx(NULL) 61 | , pClient(NULL) 62 | , fEnviron(env) 63 | { 64 | } 65 | 66 | FFmpegDecoder::~FFmpegDecoder() 67 | { 68 | //av_lockmgr_register(NULL); 69 | } 70 | 71 | 72 | int FFmpegDecoder::intialize() 73 | { 74 | // Intialize FFmpeg enviroment 75 | av_register_all(); 76 | avdevice_register_all(); 77 | avcodec_register_all(); 78 | avformat_network_init(); 79 | 80 | //if (av_lockmgr_register(lockmgr)) 81 | { 82 | // m_state = RC_STATE_INIT_ERROR; 83 | // return -1; 84 | } 85 | return 0; 86 | } 87 | 88 | int FFmpegDecoder::openDecoder(int width, int height, CDecodeCB* pCB) 89 | { 90 | m_nWidth = width; 91 | m_nHeight = height; 92 | m_pCB = pCB; 93 | if (m_bInit) 94 | return -1; 95 | 96 | // FORMAT CONTEXT SETUP 97 | // format_context = avformat_alloc_context(); 98 | // format_context->flags = AVFMT_NOFILE; 99 | 100 | // DECODER SETUP 101 | decoder = avcodec_find_decoder(AV_CODEC_ID_H264); 102 | if (!decoder) 103 | { 104 | //log_error("codec not found"); 105 | fEnviron << "codec not found"; 106 | return -2; 107 | } 108 | 109 | 110 | decoder_context = avcodec_alloc_context3(decoder); 111 | if (!decoder_context) 112 | { 113 | //log_error("codec context not found"); 114 | fEnviron << "codec context not found"; 115 | return -3; 116 | } 117 | // decoder_context->codec_id = AV_CODEC_ID_H264; 118 | // decoder_context->codec_type = AVMEDIA_TYPE_VIDEO; 119 | // decoder_context->pix_fmt = AV_PIX_FMT_YUV420P; 120 | // decoder_context->width = width; 121 | // decoder_context->height = height; 122 | // avcodec_parameters_to_context(decoder_context, st->codecpar); 123 | 124 | #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 45, 101) 125 | decoder_picture = av_frame_alloc(); 126 | #else 127 | decode_picture = avcodec_alloc_frame(); 128 | #endif 129 | 130 | if (avcodec_open2(decoder_context, decoder, NULL) < 0) 131 | { 132 | //log_error("could not open codec"); 133 | fEnviron << "could not open codec"; 134 | return -4; 135 | } 136 | m_bInit = true; 137 | return 0; 138 | } 139 | 140 | int FFmpegDecoder::finalize() 141 | { 142 | if (decoder_context) 143 | { 144 | avcodec_close(decoder_context); 145 | //avcodec_free_context(&decoder_context); 146 | av_free(decoder_context); 147 | } 148 | if (decoder_picture) 149 | av_free(decoder_picture); 150 | 151 | m_bInit = false; 152 | 153 | return 0; 154 | } 155 | 156 | int FFmpegDecoder::decode_rtsp_frame(uint8_t* input, int nLen, bool bWaitIFrame /*= false*/) 157 | { 158 | if (!m_bInit) 159 | return -1; 160 | 161 | if (input == NULL || nLen <= 0) 162 | return -2; 163 | 164 | try { 165 | int got_picture; 166 | int size = nLen; 167 | 168 | AVPacket avpkt; 169 | av_init_packet(&avpkt); 170 | avpkt.size = size; 171 | avpkt.data = input; 172 | 173 | //while (avpkt.size > 0) 174 | { 175 | #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(52, 72, 2) 176 | int len = avcodec_decode_video(decoder_context, decoder_picture, &got_picture, avpkt->data, avpkt->size); 177 | #else 178 | int len = avcodec_decode_video2(decoder_context, decoder_picture, &got_picture, &avpkt); // libavcodec >= 52.72.2 (0.6) 179 | #endif 180 | 181 | if (len == -1) 182 | { 183 | return -3; 184 | } 185 | 186 | if (got_picture) 187 | { 188 | ++frame_count; 189 | // int w = decode_context->width; 190 | // int h = decode_context->height; 191 | int numBytes = avpicture_get_size(AV_PIX_FMT_RGB24, decoder_context->width, decoder_context->height); 192 | uint8_t * buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t)); 193 | 194 | // YUV to RGB Color 195 | // reference 196 | // https://gist.github.com/lkraider/832062 197 | #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 45, 101) 198 | AVFrame *pFrameRGB = av_frame_alloc(); 199 | #else 200 | AVFrame *pFrameRGB = avcodec_alloc_frame(); 201 | #endif 202 | avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24, decoder_context->width, decoder_context->height); 203 | 204 | img_convert_ctx = sws_getCachedContext(img_convert_ctx, 205 | decoder_context->width, decoder_context->height, decoder_context->pix_fmt, decoder_context->width, decoder_context->height, AV_PIX_FMT_RGB24, sws_flags, NULL, NULL, NULL); 206 | if (img_convert_ctx == NULL) 207 | { 208 | //log_error("Cannot initialize the conversion context"); 209 | fEnviron << "Cannot initialize the conversion context"; 210 | //exit(1); 211 | return -4; 212 | } 213 | sws_scale(img_convert_ctx, decoder_picture->data, decoder_picture->linesize, 214 | 0, decoder_context->height, pFrameRGB->data, pFrameRGB->linesize); 215 | 216 | #if defined(SAVE_AVFRAME_TO_PPM) 217 | save_frame_as_ppm(pFrameRGB); 218 | #elif defined(SAVE_AVFRAME_TO_JPEG) 219 | save_frame_as_jpeg(decoder_picture); 220 | #endif 221 | if (m_pCB) 222 | { 223 | int pitch = pFrameRGB->linesize[0]; 224 | m_pCB->videoCB(decoder_context->width, decoder_context->height, pFrameRGB->data[0], numBytes * sizeof(uint8_t), pitch, pClient); 225 | } 226 | 227 | av_free(buffer); 228 | av_free(pFrameRGB); 229 | return 0; 230 | 231 | if (avpkt.data) 232 | { 233 | avpkt.size -= len; 234 | avpkt.data += len; 235 | } 236 | } 237 | else 238 | { 239 | return -5; 240 | } 241 | //return 0; 242 | } 243 | 244 | //return 0; 245 | } 246 | catch (...) 247 | { 248 | } 249 | 250 | return -6; 251 | } 252 | 253 | int FFmpegDecoder::save_frame_as_ppm(AVFrame *pframe) 254 | { 255 | FILE *pFile; 256 | char szFilename[32]; 257 | int y; 258 | 259 | // Open file 260 | sprintf(szFilename, "frame_%06ld.ppm", frame_count); 261 | pFile = fopen(szFilename, "wb"); 262 | if (pFile == NULL) 263 | return -7; 264 | 265 | // Write header 266 | fprintf(pFile, "P6\n%d %d\n255\n", decoder_context->width, decoder_context->height); 267 | 268 | // Write pixel data 269 | // Write pixel data 270 | for (int y = 0; yheight; y++) 271 | fwrite(pframe->data[0] + y * pframe->linesize[0], 1, decoder_context->width * 3, pFile); 272 | 273 | // Close file 274 | fclose(pFile); 275 | } 276 | 277 | int FFmpegDecoder::save_frame_as_jpeg(AVFrame *pframe) 278 | { 279 | char szFilename[32]; 280 | //sprintf_s(szFilename, sizeof(szFilename), "frame_%06ld.jpg", frame_count); 281 | sprintf(szFilename, "frame_%06ld.jpg", frame_count); 282 | 283 | AVFormatContext* pFormatCtx = avformat_alloc_context(); 284 | 285 | pFormatCtx->oformat = av_guess_format("mjpeg", NULL, NULL); 286 | if (avio_open(&pFormatCtx->pb, szFilename, AVIO_FLAG_READ_WRITE) < 0) { 287 | //printf("Couldn't open output file."); 288 | fEnviron << "Couldn't open output file."; 289 | return -1; 290 | } 291 | 292 | // Begin Output some information 293 | av_dump_format(pFormatCtx, 0, szFilename, 1); 294 | // End Output some information 295 | 296 | AVStream* pAVStream = avformat_new_stream(pFormatCtx, 0); 297 | if (pAVStream == NULL) { 298 | return -1; 299 | } 300 | 301 | AVCodecContext* pCodecCtx = pAVStream->codec; 302 | 303 | pCodecCtx->codec_id = pFormatCtx->oformat->video_codec; 304 | pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO; 305 | pCodecCtx->pix_fmt = AV_PIX_FMT_YUVJ420P; 306 | pCodecCtx->width = decoder_context->width; 307 | pCodecCtx->height = decoder_context->height; 308 | pCodecCtx->time_base.num = 1; 309 | pCodecCtx->time_base.den = 25; 310 | 311 | AVCodec* pCodec = avcodec_find_encoder(pCodecCtx->codec_id); 312 | if (!pCodec) { 313 | //printf("Codec not found."); 314 | fEnviron << "Codec not found."; 315 | return -1; 316 | } 317 | if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { 318 | //printf("Could not open codec."); 319 | fEnviron << "Could not open codec."; 320 | return -1; 321 | } 322 | 323 | //Write Header 324 | avformat_write_header(pFormatCtx, NULL); 325 | 326 | int y_size = pCodecCtx->width * pCodecCtx->height; 327 | 328 | //Encode 329 | AVPacket pkt; 330 | av_new_packet(&pkt, y_size * 3); 331 | 332 | // 333 | int got_picture = 0; 334 | int ret = avcodec_encode_video2(pCodecCtx, &pkt, pframe, &got_picture); 335 | if (ret < 0) { 336 | //printf("Encode Error.\n"); 337 | fEnviron << "Encode Error."; 338 | return -1; 339 | } 340 | if (got_picture == 1) { 341 | //pkt.stream_index = pAVStream->index; 342 | ret = av_write_frame(pFormatCtx, &pkt); 343 | } 344 | 345 | av_free_packet(&pkt); 346 | 347 | //Write Trailer 348 | av_write_trailer(pFormatCtx); 349 | 350 | //printf("Encode Successful.\n"); 351 | fEnviron << "Image Frame Encode Successful."; 352 | 353 | if (pAVStream) { 354 | avcodec_close(pAVStream->codec); 355 | } 356 | avio_close(pFormatCtx->pb); 357 | avformat_free_context(pFormatCtx); 358 | 359 | return 0; 360 | } 361 | 362 | void FFmpegDecoder::setClient(RTSPClient* client) 363 | { 364 | this->pClient = client; 365 | } -------------------------------------------------------------------------------- /src/FFMpegDecoder.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _FFMPEG_DECODER_H_ 35 | #define _FFMPEG_DECODER_H_ 36 | 37 | // reference from 38 | // http://blog.chinaunix.net/uid-15063109-id-4482932.html 39 | 40 | #include // for std::function 41 | 42 | extern "C" { 43 | // disable warnings about badly formed documentation from ffmpeg, which don't need at all 44 | #pragma warning(disable:4635) 45 | // disable warning about conversion int64 to int32 46 | #pragma warning(disable:4244) 47 | #include 48 | #include 49 | #include 50 | #include 51 | #include 52 | 53 | #pragma comment(lib, "avutil.lib") 54 | #pragma comment(lib, "avcodec.lib") 55 | #pragma comment(lib, "avformat.lib") 56 | #pragma comment(lib, "swscale.lib") 57 | } 58 | 59 | class RTSPClient; 60 | class UsageEnvironment; 61 | class CDecodeCB 62 | { 63 | public: 64 | virtual void videoCB(int width, int height, uint8_t* buff, int len, int pitch, RTSPClient* pClient) = 0; 65 | }; 66 | 67 | class FFmpegDecoder 68 | { 69 | public: 70 | FFmpegDecoder(UsageEnvironment& env); 71 | ~FFmpegDecoder(); 72 | 73 | int intialize(); 74 | int finalize(); 75 | 76 | int openDecoder(int width, int height, CDecodeCB* pCB); 77 | void setClient(RTSPClient* client); 78 | int decode_rtsp_frame(uint8_t* input, int nLen, bool bWaitIFrame = false); 79 | 80 | private: 81 | int save_frame_as_jpeg(AVFrame *pframe); 82 | int save_frame_as_ppm(AVFrame *pframe); 83 | 84 | private: 85 | bool m_bInit; 86 | AVCodec *decoder; 87 | AVCodecContext *decoder_context; 88 | AVFrame *decoder_picture; 89 | long frame_count; 90 | // AVFormatContext *format_context; 91 | struct SwsContext *img_convert_ctx; 92 | CDecodeCB* m_pCB; 93 | int m_nWidth; 94 | int m_nHeight; 95 | RTSPClient* pClient; 96 | UsageEnvironment& fEnviron; 97 | }; 98 | #endif // _FFMPEG_DECODER_H_ -------------------------------------------------------------------------------- /src/FFMpegEncoder.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "FFMpegEncoder.h" 35 | #include "Frame.h" 36 | 37 | #define USE_YUV_FRAME 1 38 | //#define USE_RGB_FRAME 1 39 | #define FPS 30 40 | 41 | #if USE_LIVE555 42 | FFMpegEncoder::FFMpegEncoder(UsageEnvironment& env) 43 | : FFMpeg(env) 44 | , thread_exit(0) 45 | { 46 | } 47 | #else 48 | FFMpegEncoder::FFMpegEncoder() 49 | : FFMpeg() 50 | , thread_exit(0) 51 | { 52 | } 53 | #endif 54 | 55 | FFMpegEncoder::~FFMpegEncoder() 56 | { 57 | } 58 | 59 | int FFMpegEncoder::intialize() 60 | { 61 | FFMpeg::intialize(); 62 | 63 | /// create codec context for encoder 64 | /* find the h264 video encoder */ 65 | pCodec = avcodec_find_encoder(codec_id); 66 | if (!pCodec) { 67 | fprintf(stderr, "Codec not found\n"); 68 | return -1; 69 | } 70 | 71 | pCodecCtx = avcodec_alloc_context3(pCodec); 72 | if (!pCodecCtx) { 73 | fprintf(stderr, "Could not allocate video codec context\n"); 74 | return -2; 75 | } 76 | 77 | return SetupCodec(); 78 | } 79 | 80 | int FFMpegEncoder::finalize() 81 | { 82 | if (pSourceCodecCtx) 83 | { 84 | avcodec_close(pSourceCodecCtx); 85 | av_free(pSourceCodecCtx); 86 | } 87 | 88 | if (pSourceFormatCtx) { 89 | avformat_free_context(pSourceFormatCtx); 90 | } 91 | 92 | return FFMpeg::finalize(); 93 | } 94 | 95 | void* FFMpegEncoder::run(void *param) 96 | { 97 | FFMpegEncoder *pThis = (FFMpegEncoder*)param; 98 | pThis->ReadFrame(); 99 | return NULL; 100 | } 101 | 102 | char FFMpegEncoder::GetFrame(uint8_t** FrameBuffer, unsigned int *FrameSize) 103 | { 104 | if (!outqueue.empty()) 105 | { 106 | Frame * data; 107 | data = outqueue.front(); 108 | *FrameBuffer = (uint8_t*)data->dataPointer; 109 | *FrameSize = data->dataSize; 110 | return 1; 111 | } 112 | else 113 | { 114 | *FrameBuffer = 0; 115 | *FrameSize = 0; 116 | return 0; 117 | } 118 | } 119 | 120 | char FFMpegEncoder::ReleaseFrame() 121 | { 122 | pthread_mutex_lock(&outqueue_mutex); 123 | if (!outqueue.empty()) 124 | { 125 | Frame * data = outqueue.front(); 126 | outqueue.pop(); 127 | delete data; 128 | } 129 | pthread_mutex_unlock(&outqueue_mutex); 130 | return 1; 131 | } -------------------------------------------------------------------------------- /src/FFMpegEncoder.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _FFMPEG_ENCODER_H_ 35 | #define _FFMPEG_ENCODER_H_ 36 | 37 | #include "FFmpeg.h" 38 | 39 | class FFMpegEncoder : public FFMpeg 40 | { 41 | public: 42 | #if USE_LIVE555 43 | FFMpegEncoder(UsageEnvironment& env); 44 | #else 45 | FFMpegEncoder(); 46 | #endif 47 | virtual ~FFMpegEncoder(); 48 | 49 | virtual int intialize(); 50 | virtual int finalize(); 51 | 52 | char GetFrame(uint8_t** FrameBuffer, unsigned int *FrameSize); 53 | char ReleaseFrame(); 54 | 55 | private: 56 | pthread_t thread_id; 57 | int thread_exit; 58 | int videoindex; 59 | int fps; 60 | 61 | virtual int ReadFrame() = 0; 62 | virtual int WriteFrame(AVFrame* frame) = 0; 63 | 64 | protected: 65 | static void *run(void *param); 66 | virtual int SetupCodec() = 0; 67 | 68 | protected: 69 | AVFormatContext* pSourceFormatCtx; 70 | AVCodecContext* pSourceCodecCtx; 71 | }; 72 | #endif // _FFMPEG_ENCODER_H_ -------------------------------------------------------------------------------- /src/Frame.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _FRAME_H_ 35 | #define _FRAME_H_ 36 | 37 | #include 38 | 39 | class Frame { 40 | public: 41 | uint8_t * dataPointer; 42 | int dataSize; 43 | int width; 44 | int height; 45 | int pitch; 46 | int frameID; 47 | ~Frame() 48 | { 49 | delete dataPointer; 50 | } 51 | }; 52 | 53 | #endif // _FRAME_H_ -------------------------------------------------------------------------------- /src/H264Decoder.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "H264Decoder.h" 35 | #include "Frame.h" 36 | 37 | #define SAVE_AVFRAME_TO_JPEG 1 38 | 39 | #if USE_LIVE555 40 | H264Decoder * 41 | H264Decoder::createNew(UsageEnvironment &env) 42 | { 43 | return new H264Decoder(env); 44 | } 45 | 46 | H264Decoder::H264Decoder(UsageEnvironment &env) 47 | : FFMpeg(env) 48 | { 49 | codec_id = AV_CODEC_ID_H264; 50 | dstWidth = 608; 51 | dstHeight = 800; 52 | 53 | intialize(); 54 | } 55 | #else 56 | H264Decoder::H264Decoder() 57 | : FFMpeg() 58 | { 59 | codec_id = AV_CODEC_ID_H264; 60 | dstWidth = 640; 61 | dstHeight = 320; 62 | 63 | intialize(); 64 | } 65 | #endif 66 | 67 | H264Decoder::~H264Decoder() 68 | { 69 | finalize(); 70 | } 71 | 72 | int H264Decoder::intialize() 73 | { 74 | FFMpeg::intialize(); 75 | 76 | // DECODER SETUP 77 | pCodec = avcodec_find_decoder(codec_id); 78 | if (!pCodec) 79 | { 80 | //log_error("codec not found"); 81 | //fEnviron << "codec not found"; 82 | return -2; 83 | } 84 | 85 | pCodecCtx = avcodec_alloc_context3(pCodec); 86 | if (!pCodecCtx) 87 | { 88 | //log_error("codec context not found"); 89 | //fEnviron << "codec context not found"; 90 | return -3; 91 | } 92 | 93 | if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) 94 | { 95 | //log_error("could not open codec"); 96 | //fEnviron << "could not open codec"; 97 | return -4; 98 | } 99 | m_bInit = true; 100 | 101 | return 0; 102 | } 103 | 104 | int H264Decoder::finalize() 105 | { 106 | 107 | return FFMpeg::finalize(); 108 | } 109 | 110 | int H264Decoder::decode(uint8_t* input, int nLen, bool bWaitIFrame /*= false*/) 111 | { 112 | if (!m_bInit) 113 | return -1; 114 | 115 | if (input == NULL || nLen <= 0) 116 | return -2; 117 | 118 | try { 119 | int got_picture; 120 | int size = nLen; 121 | 122 | // set packet 123 | AVPacket avpkt; 124 | av_init_packet(&avpkt); 125 | avpkt.size = size; 126 | avpkt.data = input; 127 | 128 | //while (avpkt.size > 0) 129 | { 130 | #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(52, 72, 2) 131 | int len = avcodec_decode_video(decoder_context, decoder_picture, &got_picture, avpkt->data, avpkt->size); 132 | #else 133 | int len = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &avpkt); // libavcodec >= 52.72.2 (0.6) 134 | #endif 135 | 136 | if (len == -1) 137 | { 138 | return -3; 139 | } 140 | 141 | if (got_picture) 142 | { 143 | // set image size 144 | srcWidth = pCodecCtx->width; 145 | srcHeight = pCodecCtx->height; 146 | // calculate byte size from rgb image by width and height 147 | int numBytes = avpicture_get_size(AV_PIX_FMT_RGB24, dstWidth, dstHeight); 148 | uint8_t * buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t)); 149 | 150 | // YUV to RGB Color 151 | // reference 152 | // https://gist.github.com/lkraider/832062 153 | #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 45, 101) 154 | AVFrame *pFrameRGB = av_frame_alloc(); 155 | #else 156 | AVFrame *pFrameRGB = avcodec_alloc_frame(); 157 | #endif 158 | // initialize RGB frame 159 | avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24, dstWidth, dstHeight); 160 | 161 | img_convert_ctx = sws_getCachedContext(img_convert_ctx, 162 | pCodecCtx->width, // width of source image 163 | pCodecCtx->height, // height of source image 164 | pCodecCtx->pix_fmt, 165 | dstWidth, // width of destination image 166 | dstHeight, // height of destination image 167 | AV_PIX_FMT_RGB24, getSWSType(), NULL, NULL, NULL); 168 | if (img_convert_ctx == NULL) 169 | { 170 | //log_error("Cannot initialize the conversion context"); 171 | //fEnviron << "Cannot initialize the conversion context"; 172 | return -4; 173 | } 174 | sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 175 | 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); 176 | 177 | pFrameRGB->format = AV_PIX_FMT_RGB24; 178 | pFrameRGB->width = dstWidth; 179 | pFrameRGB->height = dstHeight; 180 | 181 | #if defined(SAVE_AVFRAME_TO_PPM) 182 | save_frame_as_ppm(pFrameRGB); 183 | #elif defined(SAVE_AVFRAME_TO_JPEG) 184 | save_frame_as_jpeg(pFrameRGB); 185 | #endif 186 | Frame* frame = new Frame(); 187 | // memory initialize 188 | frame->dataPointer = new uint8_t[numBytes * sizeof(uint8_t)]; 189 | memset(frame->dataPointer, 0x00, numBytes * sizeof(uint8_t)); 190 | // image copy 191 | memcpy(frame->dataPointer, pFrameRGB->data[0], numBytes * sizeof(uint8_t)); 192 | // set image data 193 | frame->dataSize = numBytes * sizeof(uint8_t); 194 | frame->frameID = frame_count++; 195 | frame->width = dstWidth; 196 | frame->height = dstHeight; 197 | frame->pitch = pFrameRGB->linesize[0]; 198 | 199 | if (m_plistener != NULL) { 200 | ((DecodeListener*)m_plistener)->onDecoded(frame); 201 | } else if (onDecoded != NULL) { 202 | onDecoded(frame); 203 | } 204 | 205 | av_free(buffer); 206 | av_free(pFrameRGB); 207 | return 0; 208 | 209 | if (avpkt.data) 210 | { 211 | avpkt.size -= len; 212 | avpkt.data += len; 213 | } 214 | /* 215 | pthread_mutex_lock(&outqueue_mutex); 216 | 217 | if (outqueue.size() < 30) 218 | { 219 | outqueue.push(frame); 220 | } 221 | else 222 | { 223 | delete frame; 224 | } 225 | 226 | pthread_mutex_unlock(&outqueue_mutex); 227 | */ 228 | } 229 | else 230 | { 231 | return -5; 232 | } 233 | } 234 | } 235 | catch (...) 236 | { 237 | } 238 | 239 | return -6; 240 | } -------------------------------------------------------------------------------- /src/H264Decoder.cpp.bak: -------------------------------------------------------------------------------- 1 | #include "H264Decoder.h" 2 | #include "log_utils.h" 3 | 4 | #define ROXLU_IMPLEMENTATION 5 | #define ROXLU_USE_MATH 6 | #include 7 | 8 | H264_Decoder::H264_Decoder(h264_decoder_callback frameCallback, void* user) 9 | : codec(NULL) 10 | , codec_context(NULL) 11 | , parser(NULL) 12 | , fp(NULL) 13 | , framecount(0) 14 | , cb_frame(frameCallback) 15 | , cb_user(user) 16 | , frame_timeout(0) 17 | , frame_delay(0) 18 | { 19 | // avcodec init 20 | //avcodec_register_all(); 21 | //av_register_all(); 22 | } 23 | 24 | H264_Decoder::~H264_Decoder() { 25 | 26 | if (parser) { 27 | av_parser_close(parser); 28 | parser = NULL; 29 | } 30 | 31 | if (codec_context) { 32 | avcodec_close(codec_context); 33 | av_free(codec_context); 34 | codec_context = NULL; 35 | } 36 | 37 | if (picture) { 38 | av_free(picture); 39 | picture = NULL; 40 | } 41 | 42 | if (fp) { 43 | fclose(fp); 44 | fp = NULL; 45 | } 46 | 47 | cb_frame = NULL; 48 | cb_user = NULL; 49 | framecount = 0; 50 | frame_timeout = 0; 51 | } 52 | 53 | AVPixelFormat pickDecodeFormat(AVCodecContext *s, const AVPixelFormat *fmt) 54 | { 55 | return AV_PIX_FMT_YUV420P; 56 | } 57 | 58 | bool H264_Decoder::load(const char* filepath, float fps) 59 | { 60 | // create AVCodec 61 | codec = avcodec_find_decoder(AV_CODEC_ID_H264); 62 | if (!codec) { 63 | log_error("Error: cannot find the h264 codec: %s", filepath); 64 | return false; 65 | } 66 | 67 | // alloc context 68 | codec_context = avcodec_alloc_context3(codec); 69 | if (codec->capabilities & CODEC_CAP_TRUNCATED) { 70 | codec_context->flags |= CODEC_FLAG_TRUNCATED; 71 | } 72 | 73 | codec_context->get_format = pickDecodeFormat; 74 | 75 | // verify 76 | if (avcodec_open2(codec_context, codec, NULL) < 0) { 77 | log_error("Error: could not open codec."); 78 | return false; 79 | } 80 | 81 | fp = fopen(filepath, "rb"); 82 | if (!fp) { 83 | log_error("Error: cannot open: %s\n", filepath); 84 | return false; 85 | } 86 | 87 | // init parser 88 | picture = av_frame_alloc(); 89 | parser = av_parser_init(AV_CODEC_ID_H264); 90 | if (!parser) { 91 | log_error("Erorr: cannot create H264 parser.\n"); 92 | return false; 93 | } 94 | 95 | framecount = 0; 96 | 97 | if (fps > 0.0001f) { 98 | frame_delay = (1.0f / fps) * 1000ull * 1000ull * 1000ull; 99 | frame_timeout = rx_hrtime() + frame_delay; 100 | } 101 | 102 | // kickoff reading... 103 | readBuffer(); 104 | 105 | return true; 106 | } 107 | 108 | void H264_Decoder::forceFPS(float fps) 109 | { 110 | forcefps = fps; 111 | } 112 | 113 | double H264_Decoder::getFPS() const 114 | { 115 | if (forcefps) 116 | return forcefps; 117 | 118 | AVRational rational = codec_context->time_base; 119 | return av_q2d(rational); 120 | } 121 | 122 | bool H264_Decoder::readFrame() 123 | { 124 | uint64_t now = rx_hrtime(); 125 | if (now < frame_timeout) { 126 | return false; 127 | } 128 | 129 | bool needs_more = false; 130 | 131 | while (!update(needs_more)) { 132 | if (needs_more) { 133 | readBuffer(); 134 | } 135 | } 136 | 137 | // it may take some 'reads' before we can set the fps 138 | if (frame_timeout == 0 && frame_delay == 0) { 139 | double fps = av_q2d(codec_context->time_base); 140 | if (fps > 0.0) { 141 | frame_delay = fps * 1000ull * 1000ull * 1000ull; 142 | } 143 | } 144 | 145 | if (frame_delay > 0) { 146 | frame_timeout = rx_hrtime() + frame_delay; 147 | } 148 | 149 | return true; 150 | } 151 | 152 | void H264_Decoder::decodeFrame(uint8_t* data, int size) 153 | { 154 | AVPacket pkt; 155 | int got_picture = 0; 156 | int len = 0; 157 | 158 | av_init_packet(&pkt); 159 | 160 | pkt.data = data; 161 | pkt.size = size; 162 | 163 | len = avcodec_decode_video2(codec_context, picture, &got_picture, &pkt); 164 | if (len < 0) { 165 | log_error("Error while decoding a frame.\n"); 166 | } 167 | 168 | if (got_picture == 0) { 169 | return; 170 | } 171 | 172 | ++framecount; 173 | 174 | if (cb_frame) { 175 | cb_frame(codec_context, picture, &pkt, framecount, cb_user); 176 | } 177 | } 178 | 179 | int H264_Decoder::readBuffer() 180 | { 181 | int bytes_read = (int)fread(inbuf, 1, H264_INBUF_SIZE, fp); 182 | 183 | if (bytes_read) { 184 | std::copy(inbuf, inbuf + bytes_read, std::back_inserter(buffer)); 185 | } 186 | 187 | return bytes_read; 188 | } 189 | 190 | bool H264_Decoder::update(bool& needsMoreBytes) 191 | { 192 | needsMoreBytes = false; 193 | 194 | if (!fp) { 195 | log_error("Cannot update .. file not opened...\n"); 196 | return false; 197 | } 198 | 199 | if (buffer.size() == 0) { 200 | needsMoreBytes = true; 201 | return false; 202 | } 203 | 204 | uint8_t* data = NULL; 205 | int size = 0; 206 | int len = av_parser_parse2(parser, codec_context, &data, &size, 207 | &buffer[0], buffer.size(), 0, 0, AV_NOPTS_VALUE); 208 | 209 | if (size == 0 && len >= 0) { 210 | needsMoreBytes = true; 211 | return false; 212 | } 213 | 214 | if (len) { 215 | decodeFrame(&buffer[0], size); 216 | buffer.erase(buffer.begin(), buffer.begin() + len); 217 | return true; 218 | } 219 | 220 | return false; 221 | } -------------------------------------------------------------------------------- /src/H264Decoder.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _H264_DECODER_H_ 35 | #define _H264_DECODER_H_ 36 | 37 | #include "FFmpeg.h" 38 | 39 | class H264Decoder : public FFMpeg 40 | { 41 | public: 42 | #if USE_LIVE555 43 | static H264Decoder * 44 | createNew(UsageEnvironment &env); 45 | H264Decoder(UsageEnvironment &env); 46 | #else 47 | H264Decoder(); 48 | #endif 49 | virtual ~H264Decoder(); 50 | 51 | virtual int intialize(); 52 | virtual int finalize(); 53 | 54 | // int openDecoder(int width, int height, CDecodeCB* pCB); 55 | virtual int decode(uint8_t* input, int nLen, bool bWaitIFrame = false); 56 | }; 57 | #endif /* _H264_DECODER_H_ */ -------------------------------------------------------------------------------- /src/H264Decoder.h.bak: -------------------------------------------------------------------------------- 1 | #ifndef H264_DECODER_H 2 | #define H264_DECODER_H 3 | 4 | #define H264_INBUF_SIZE 16384 /* number of bytes we read per chunk */ 5 | 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | extern "C" { 12 | #include 13 | #include 14 | #include 15 | } 16 | 17 | #pragma comment(lib, "avutil.lib") 18 | #pragma comment(lib, "avcodec.lib") 19 | #pragma comment(lib, "avformat.lib") 20 | 21 | typedef void(*h264_decoder_callback)(AVCodecContext *pCodecCtx, AVFrame* frame, AVPacket* pkt, int framecount, void* user); /* the decoder callback, which will be called when we have decoded a frame */ 22 | 23 | class H264_Decoder { 24 | 25 | public: 26 | H264_Decoder(h264_decoder_callback frameCallback, void* user); /* pass in a callback function that is called whenever we decoded a video frame, make sure to call `readFrame()` repeatedly */ 27 | ~H264_Decoder(); /* d'tor, cleans up the allocated objects and closes the codec context */ 28 | bool load(const char* filepath, float fps = 0.0f); /* load a video file which is encoded with x264 */ 29 | bool readFrame(); /* read a frame if necessary */ 30 | 31 | void forceFPS(float fps); 32 | double getFPS() const; 33 | 34 | private: 35 | bool update(bool& needsMoreBytes); /* internally used to update/parse the data we read from the buffer or file */ 36 | int readBuffer(); /* read a bit more data from the buffer */ 37 | void decodeFrame(uint8_t* data, int size); /* decode a frame we read from the buffer */ 38 | 39 | public: 40 | AVCodec * codec; /* the AVCodec* which represents the H264 decoder */ 41 | AVCodecContext* codec_context; /* the context; keeps generic state */ 42 | AVCodecParserContext* parser; /* parser that is used to decode the h264 bitstream */ 43 | AVFrame* picture; /* will contain a decoded picture */ 44 | uint8_t inbuf[H264_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE]; /* used to read chunks from the file */ 45 | FILE* fp; /* file pointer to the file from which we read the h264 data */ 46 | int framecount; /* the number of decoded frames */ 47 | h264_decoder_callback cb_frame; /* the callback function which will receive the frame/packet data */ 48 | void* cb_user; /* the void* with user data that is passed into the set callback */ 49 | uint64_t frame_timeout; /* timeout when we need to parse a new frame */ 50 | uint64_t frame_delay; /* delay between frames (in ns) */ 51 | std::vector buffer; /* buffer we use to keep track of read/unused bitstream data */ 52 | float forcefps; 53 | }; 54 | 55 | #endif -------------------------------------------------------------------------------- /src/H264ReadCameraEncoder.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "H264ReadCameraEncoder.h" 35 | #include "Frame.h" 36 | 37 | #define USE_YUV_FRAME 1 38 | //#define USE_RGB_FRAME 1 39 | #define FPS 30 40 | 41 | #if USE_LIVE555 42 | FFMpegEncoder * 43 | H264ReadCameraEncoder::createNew(UsageEnvironment &env) 44 | { 45 | return new H264ReadCameraEncoder(env); 46 | } 47 | 48 | H264ReadCameraEncoder::H264ReadCameraEncoder(UsageEnvironment &env) 49 | : FFMpegEncoder(env) 50 | , thread_exit(0) 51 | , videoindex(-1) 52 | , fps(30) 53 | { 54 | codec_id = AV_CODEC_ID_H264; 55 | dstWidth = 640; 56 | dstHeight = 320; 57 | 58 | intialize(); 59 | } 60 | #else 61 | H264ReadCameraEncoder::H264ReadCameraEncoder() 62 | : FFMpegEncoder() 63 | , thread_exit(0) 64 | , videoindex(-1) 65 | , fps(30) 66 | { 67 | codec_id = AV_CODEC_ID_H264; 68 | dstWidth = 640; 69 | dstHeight = 320; 70 | 71 | intialize(); 72 | } 73 | #endif 74 | 75 | H264ReadCameraEncoder::~H264ReadCameraEncoder() 76 | { 77 | finalize(); 78 | } 79 | 80 | int H264ReadCameraEncoder::intialize() 81 | { 82 | return FFMpegEncoder::intialize(); 83 | } 84 | 85 | int H264ReadCameraEncoder::SetupCodec() 86 | { 87 | pCodecCtx->codec_id = AV_CODEC_ID_H264; 88 | pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO; 89 | pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P; 90 | 91 | /* put sample parameters */ 92 | pCodecCtx->bit_rate = 400000; 93 | /* resolution must be a multiple of two */ 94 | pCodecCtx->width = dstWidth; 95 | pCodecCtx->height = dstHeight; 96 | /* frames per second */ 97 | AVRational timebase; 98 | timebase.num = 1; 99 | timebase.den = fps; 100 | pCodecCtx->time_base = timebase; 101 | 102 | AVRational framerate; 103 | framerate.num; 104 | framerate.den = fps; 105 | 106 | pCodecCtx->framerate = framerate; 107 | 108 | pCodecCtx->gop_size = 12; /* emit one intra frame every ten frames */ 109 | pCodecCtx->max_b_frames = 2; 110 | // pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER; 111 | pCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; 112 | 113 | if (codec_id == AV_CODEC_ID_H264) { 114 | //av_opt_set(pCodecCtx->priv_data, "preset", "slow", 0); 115 | av_opt_set(pCodecCtx->priv_data, "profile", "baseline", 0); 116 | } 117 | 118 | /* open it */ 119 | if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { 120 | fprintf(stderr, "avcodec_open failed for h.264 encode\n"); 121 | return - 3; 122 | } 123 | 124 | /// create codec context for screen capture 125 | pSourceFormatCtx = avformat_alloc_context(); 126 | if (pSourceFormatCtx == NULL) { 127 | // throw AVException(ENOMEM, "can not alloc av context"); 128 | return -4; 129 | } 130 | 131 | #ifdef _WIN32 132 | #if USE_DSHOW 133 | AVInputFormat *ifmt = av_find_input_format("dshow"); 134 | //Set own video device's name 135 | if (avformat_open_input(&pFormatCtx, "video=Integrated Camera", ifmt, NULL) != 0) { 136 | //printf("Couldn't open input stream.\n"); 137 | return -1; 138 | } 139 | #else 140 | AVInputFormat *ifmt = av_find_input_format("vfwcap"); 141 | if (ifmt == NULL) 142 | { 143 | //printf("can not find_input_format\n"); 144 | return -5; 145 | } 146 | char *dev_name = "0"; 147 | if (avformat_open_input(&pSourceFormatCtx, dev_name, ifmt, NULL) != 0) { 148 | //printf("Couldn't open input stream.\n"); 149 | return -6; 150 | } 151 | #endif 152 | #elif defined linux 153 | //Linux 154 | AVInputFormat *ifmt = av_find_input_format("video4linux2"); 155 | if (avformat_open_input(&pSourceFormatCtx, "/dev/video0", ifmt, NULL) != 0) { 156 | printf("Couldn't open input stream.\n"); 157 | return -1; 158 | } 159 | #else 160 | show_avfoundation_device(); 161 | //Mac 162 | AVInputFormat *ifmt = av_find_input_format("avfoundation"); 163 | //Avfoundation 164 | //[video]:[audio] 165 | if (avformat_open_input(&pSourceFormatCtx, "0", ifmt, NULL) != 0) { 166 | printf("Couldn't open input stream.\n"); 167 | return -1; 168 | } 169 | #endif 170 | 171 | if (avformat_find_stream_info(pSourceFormatCtx, NULL)<0) 172 | { 173 | printf("Couldn't find stream information.\n"); 174 | return -7; 175 | } 176 | 177 | for (int i = 0; i < pSourceFormatCtx->nb_streams; i++) 178 | if (pSourceFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) 179 | { 180 | videoindex = i; 181 | break; 182 | } 183 | if (videoindex == -1) 184 | { 185 | printf("Couldn't find a video stream.\n"); 186 | return -1; 187 | } 188 | pSourceCodecCtx = pSourceFormatCtx->streams[videoindex]->codec; 189 | AVCodec* codec = avcodec_find_decoder(pSourceCodecCtx->codec_id); 190 | if (codec == NULL) 191 | { 192 | printf("Codec not found.\n"); 193 | return -8; 194 | } 195 | if (avcodec_open2(pSourceCodecCtx, codec, NULL)<0) 196 | { 197 | printf("Could not open codec.\n"); 198 | return -9; 199 | } 200 | 201 | pthread_attr_t attr1; 202 | pthread_attr_init(&attr1); 203 | pthread_attr_setdetachstate(&attr1, PTHREAD_CREATE_DETACHED); 204 | int r = pthread_create(&thread_id, NULL, run, this); 205 | if (r) 206 | { 207 | perror("pthread_create()"); 208 | return -10; 209 | } 210 | 211 | // wait for threads to finish 212 | pthread_join(thread_id, NULL); 213 | 214 | m_bInit = true; 215 | 216 | return 0; 217 | } 218 | 219 | int H264ReadCameraEncoder::finalize() 220 | { 221 | return FFMpegEncoder::finalize(); 222 | } 223 | 224 | int H264ReadCameraEncoder::ReadFrame() 225 | { 226 | int ret, got_picture; 227 | 228 | try { 229 | AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket)); 230 | img_convert_ctx = sws_getContext( 231 | pSourceCodecCtx->width, 232 | pSourceCodecCtx->height, 233 | pSourceCodecCtx->pix_fmt, 234 | pSourceCodecCtx->width, 235 | pSourceCodecCtx->height, 236 | AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 237 | 238 | while (!thread_exit) { 239 | if (av_read_frame(pSourceFormatCtx, packet) >= 0) { 240 | if (packet->stream_index == videoindex) { 241 | ret = avcodec_decode_video2(pSourceCodecCtx, pFrame, &got_picture, packet); 242 | if (ret < 0) { 243 | printf("Decode Error.\n"); 244 | return -1; 245 | } 246 | if (got_picture) { 247 | // set image size 248 | srcWidth = pSourceCodecCtx->width; 249 | srcHeight = pSourceCodecCtx->height; 250 | // calculate byte size from rgb image by width and height 251 | 252 | int numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, dstWidth, dstHeight); 253 | uint8_t * buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t)); 254 | 255 | #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 45, 101) 256 | AVFrame *pFrameYUV = av_frame_alloc(); 257 | #else 258 | AVFrame *pFrameYUV = avcodec_alloc_frame(); 259 | #endif 260 | // set H.264 context info to frame for encoder 261 | pFrameYUV->format = pCodecCtx->pix_fmt; 262 | pFrameYUV->width = pCodecCtx->width; 263 | pFrameYUV->height = pCodecCtx->height; 264 | pFrameYUV->pts = frame_count++; 265 | 266 | avpicture_fill((AVPicture *)pFrameYUV, buffer, AV_PIX_FMT_YUV420P, dstWidth, dstHeight); 267 | 268 | // get scaling context from context image size to destination image size 269 | img_convert_ctx = sws_getCachedContext(img_convert_ctx, 270 | pSourceCodecCtx->width, // width of source image 271 | pSourceCodecCtx->height, // height of source image 272 | pSourceCodecCtx->pix_fmt, 273 | dstWidth, // width of destination image 274 | dstHeight, // height of destination image 275 | AV_PIX_FMT_YUV420P, getSWSType(), NULL, NULL, NULL); 276 | if (img_convert_ctx == NULL) 277 | { 278 | fprintf(stderr, "Cannot initialize the conversion context [%s:%d]\n", __FILE__, __LINE__); 279 | return -4; 280 | } 281 | // scaling 282 | sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 283 | 0, pSourceCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize); 284 | 285 | save_frame_as_jpeg(pFrameYUV); 286 | 287 | if (WriteFrame(pFrameYUV) < 0) { 288 | fprintf(stderr, "Cannot encode frame [%s:%d]\n", __FILE__, __LINE__); 289 | return -4; 290 | } 291 | 292 | av_free(pFrameYUV); 293 | av_free(buffer); 294 | } 295 | else 296 | { 297 | return -5; 298 | } 299 | } 300 | if (packet != NULL) 301 | av_free_packet(packet); 302 | } 303 | else { 304 | //Exit Thread 305 | thread_exit = 1; 306 | } 307 | } 308 | } 309 | catch (...) 310 | { 311 | } 312 | 313 | return 0; 314 | } 315 | 316 | /// 317 | /// 318 | /// 319 | /// yuv frame pointer 320 | /// error number 321 | /// 322 | /// https://stackoverflow.com/questions/2940671/how-does-one-encode-a-series-of-images-into-h264-using-the-x264-c-api 323 | /// https://stackoverflow.com/questions/28727772/ffmpeg-c-api-h-264-encoding-mpeg2-ts-streaming-problems 324 | /// 325 | int H264ReadCameraEncoder::WriteFrame(AVFrame* frame) 326 | { 327 | AVPacket avpkt; 328 | int got_output; 329 | int ret; 330 | 331 | // ready to packet data from H.264 encoder 332 | av_init_packet(&avpkt); 333 | avpkt.size = 0; 334 | avpkt.data = NULL; 335 | 336 | // set to i frame for encoder when frame_count divide with fps 337 | if((frame->pts % fps) == 0) { 338 | frame->key_frame = 1; 339 | frame->pict_type = AV_PICTURE_TYPE_I; 340 | } else { 341 | frame->key_frame = 0; 342 | frame->pict_type = AV_PICTURE_TYPE_P; 343 | } 344 | 345 | /* encode the image */ 346 | ret = avcodec_encode_video2(pCodecCtx, &avpkt, frame, &got_output); 347 | if (ret < 0) 348 | { 349 | fprintf(stderr, "Error encoding frame [%s:%d]\n", __FILE__, __LINE__); 350 | return -1; 351 | } 352 | 353 | if (got_output) 354 | { 355 | // avpkt.stream_index = frame_count; 356 | Frame * data = new Frame(); 357 | data->dataPointer = new uint8_t[avpkt.size]; 358 | data->dataSize = avpkt.size - 4; 359 | data->frameID = frame_count; 360 | data->width = dstWidth; 361 | data->height = dstHeight; 362 | 363 | memcpy(data->dataPointer, avpkt.data + 4, avpkt.size - 4); 364 | 365 | pthread_mutex_lock(&outqueue_mutex); 366 | 367 | if (outqueue.size()<30) { 368 | printf("complete add frame: %zd", outqueue.size()); 369 | outqueue.push(data); 370 | } else { 371 | delete data; 372 | } 373 | 374 | pthread_mutex_unlock(&outqueue_mutex); 375 | 376 | av_free_packet(&avpkt); 377 | 378 | if (m_plistener != NULL) { 379 | ((EncodeListener*)m_plistener)->onEncoded(); 380 | } 381 | else if (onEncoded != NULL) { 382 | onEncoded(); 383 | } 384 | } 385 | 386 | return 0; 387 | } -------------------------------------------------------------------------------- /src/H264ReadCameraEncoder.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _H264_READ_CAMERA_ENCODER_H_ 35 | #define _H264_READ_CAMERA_ENCODER_H_ 36 | 37 | #include "FFMpegEncoder.h" 38 | 39 | class H264ReadCameraEncoder : public FFMpegEncoder 40 | { 41 | public: 42 | #if USE_LIVE555 43 | static FFMpegEncoder * 44 | createNew(UsageEnvironment &env); 45 | H264ReadCameraEncoder(UsageEnvironment &env); 46 | #else 47 | H264ReadCameraEncoder(); 48 | #endif 49 | virtual ~H264ReadCameraEncoder(); 50 | 51 | virtual int intialize(); 52 | virtual int finalize(); 53 | 54 | protected: 55 | virtual int SetupCodec(); 56 | 57 | private: 58 | pthread_t thread_id; 59 | int thread_exit; 60 | int videoindex; 61 | int fps; 62 | 63 | virtual int ReadFrame(); 64 | virtual int WriteFrame(AVFrame* frame); 65 | }; 66 | #endif // _H264_READ_CAMERA_ENCODER_H_ -------------------------------------------------------------------------------- /src/H264ReadScreenEncoder.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _H264_READ_SCREEN_ENCODER_H_ 35 | #define _H264_READ_SCREEN_ENCODER_H_ 36 | 37 | #include "FFMpegEncoder.h" 38 | 39 | class H264ReadScreenEncoder : public FFMpegEncoder 40 | { 41 | public: 42 | #if USE_LIVE555 43 | static FFMpegEncoder * 44 | createNew(UsageEnvironment &env); 45 | H264ReadScreenEncoder(UsageEnvironment &env); 46 | #else 47 | H264ReadScreenEncoder(); 48 | #endif 49 | virtual ~H264ReadScreenEncoder(); 50 | 51 | virtual int intialize(); 52 | virtual int finalize(); 53 | 54 | protected: 55 | virtual int SetupCodec(); 56 | 57 | private: 58 | pthread_t thread_id; 59 | int thread_exit; 60 | int videoindex; 61 | int fps; 62 | 63 | virtual int ReadFrame(); 64 | virtual int WriteFrame(AVFrame* frame); 65 | }; 66 | #endif // _H264_READ_SCREEN_ENCODER_H_ -------------------------------------------------------------------------------- /src/MediaBasicUsageEnvironment.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "MediaBasicUsageEnvironment.h" 35 | #include "log_utils.h" 36 | 37 | #include 38 | 39 | ////////// AmbaBasicUsageEnvironment ////////// 40 | 41 | #if defined(__WIN32__) || defined(_WIN32) 42 | extern "C" int initializeWinsockIfNecessary(); 43 | #endif 44 | 45 | MediaBasicUsageEnvironment* 46 | MediaBasicUsageEnvironment::createNew(TaskScheduler& taskScheduler) { 47 | return new MediaBasicUsageEnvironment(taskScheduler); 48 | } 49 | 50 | MediaBasicUsageEnvironment::MediaBasicUsageEnvironment(TaskScheduler& taskScheduler) 51 | : BasicUsageEnvironment0(taskScheduler) { 52 | #if defined(__WIN32__) || defined(_WIN32) 53 | if (!initializeWinsockIfNecessary()) { 54 | setResultErrMsg("Failed to initialize 'winsock': "); 55 | reportBackgroundError(); 56 | internalError(); 57 | } 58 | #endif 59 | } 60 | 61 | MediaBasicUsageEnvironment::~MediaBasicUsageEnvironment() { 62 | } 63 | 64 | int MediaBasicUsageEnvironment::getErrno() const { 65 | #if defined(__WIN32__) || defined(_WIN32) || defined(_WIN32_WCE) 66 | return WSAGetLastError(); 67 | #else 68 | return errno; 69 | #endif 70 | } 71 | 72 | UsageEnvironment& MediaBasicUsageEnvironment::operator<<(char const* str) { 73 | if (str == NULL) str = "(NULL)"; // sanity check 74 | #ifdef _CONSOLE 75 | fprintf(stderr, "%s", str); 76 | #else 77 | log_rtsp("%s", str); 78 | #endif 79 | return *this; 80 | } 81 | 82 | UsageEnvironment& MediaBasicUsageEnvironment::operator<<(int i) { 83 | #ifdef _CONSOLE 84 | fprintf(stderr, "%d", i); 85 | #else 86 | log_rtsp("%d", i); 87 | #endif 88 | return *this; 89 | } 90 | 91 | UsageEnvironment& MediaBasicUsageEnvironment::operator<<(unsigned u) { 92 | #ifdef _CONSOLE 93 | fprintf(stderr, "%u", u); 94 | #else 95 | log_rtsp("%u", u); 96 | #endif 97 | return *this; 98 | } 99 | 100 | UsageEnvironment& MediaBasicUsageEnvironment::operator<<(double d) { 101 | #ifdef _CONSOLE 102 | fprintf(stderr, "%f", d); 103 | #else 104 | log_rtsp("%f", d); 105 | #endif 106 | return *this; 107 | } 108 | 109 | UsageEnvironment& MediaBasicUsageEnvironment::operator<<(void* p) { 110 | #ifdef _CONSOLE 111 | fprintf(stderr, "%p", p); 112 | #else 113 | log_rtsp("%p", p); 114 | #endif 115 | return *this; 116 | } 117 | -------------------------------------------------------------------------------- /src/MediaBasicUsageEnvironment.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef MEDIA_BASICUSAGEENVIRONMENT_H_ 35 | #define MEDIA_BASICUSAGEENVIRONMENT_H_ 36 | 37 | #ifndef _BASIC_USAGE_ENVIRONMENT0_HH 38 | #include 39 | #endif 40 | 41 | class MediaBasicUsageEnvironment : public BasicUsageEnvironment0 { 42 | protected: 43 | MediaBasicUsageEnvironment(TaskScheduler& taskScheduler); 44 | // called only by "createNew()" (or subclass constructors) 45 | virtual ~MediaBasicUsageEnvironment(); 46 | 47 | public: 48 | static MediaBasicUsageEnvironment* createNew(TaskScheduler& taskScheduler); 49 | 50 | // redefined virtual functions: 51 | virtual int getErrno() const; 52 | 53 | virtual UsageEnvironment& operator<<(char const* str); 54 | virtual UsageEnvironment& operator<<(int i); 55 | virtual UsageEnvironment& operator<<(unsigned u); 56 | virtual UsageEnvironment& operator<<(double d); 57 | virtual UsageEnvironment& operator<<(void* p); 58 | }; 59 | #endif /* MEDIA_AMBABASICUSAGEENVIRONMENT_H_ */ 60 | -------------------------------------------------------------------------------- /src/MediaH264MediaSink.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #pragma once 35 | #ifndef _MEDIA_H264_MEDIASINK_H_ 36 | #define _MEDIA_H264_MEDIASINK_H_ 37 | 38 | #include "liveMedia.hh" 39 | 40 | #if 0 41 | class FFmpegDecoder; 42 | #else 43 | class H264Decoder; 44 | #endif 45 | // Define a data sink (a subclass of "MediaSink") to receive the data for each subsession (i.e., each audio or video 'substream'). 46 | // In practice, this might be a class (or a chain of classes) that decodes and then renders the incoming audio or video. 47 | // Or it might be a "FileSink", for outputting the received data into a file (as is done by the "openRTSP" application). 48 | // In this example code, however, we define a simple 'dummy' sink that receives incoming data, but does nothing with it. 49 | class MediaH264MediaSink : public MediaSink { 50 | public: 51 | static MediaH264MediaSink* createNew(UsageEnvironment& env, 52 | RTSPClient* client, 53 | MediaSubsession& subsession, // identifies the kind of data that's being received 54 | char const* streamId = NULL); // identifies the stream itself (optional) 55 | #if 0 56 | FFmpegDecoder* getDecoder() { return video_decoder; } 57 | #else 58 | H264Decoder* getDecoder() { return video_decoder; } 59 | #endif 60 | long getFrameCount() { return m_nFrameCount; } 61 | 62 | private: 63 | MediaH264MediaSink(UsageEnvironment& env, RTSPClient* client, MediaSubsession& subsession, 64 | char const* streamId); 65 | // called only by "createNew()" 66 | virtual ~MediaH264MediaSink(); 67 | 68 | static void afterGettingFrame(void* clientData, unsigned frameSize, 69 | unsigned numTruncatedBytes, struct timeval presentationTime, 70 | unsigned durationInMicroseconds); 71 | void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, 72 | struct timeval presentationTime, unsigned durationInMicroseconds); 73 | bool isH264iFrame(u_int8_t* packet); 74 | bool FindStartCode3(unsigned char *Buf); 75 | bool FindStartCode4(unsigned char *Buf); 76 | private: 77 | // redefined virtual functions: 78 | virtual Boolean continuePlaying(); 79 | 80 | private: 81 | u_int8_t* fReceiveBuffer; 82 | MediaSubsession& fSubsession; 83 | char* fStreamId; 84 | unsigned m_nFrameSize; 85 | unsigned m_nNalHeaderStartCodeOffset; 86 | #if 0 87 | FFmpegDecoder* video_decoder; 88 | #else 89 | H264Decoder* video_decoder; 90 | #endif 91 | long m_nFrameCount; 92 | RTSPClient* pClient; 93 | }; 94 | 95 | #endif /* _MEDIA_H264_MEDIASINK_H_ */ 96 | -------------------------------------------------------------------------------- /src/MediaH264VideoRTPSink.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "MediaH264VideoRTPSink.h" 35 | 36 | #define DUMMY_SINK_RECEIVE_BUFFER_SIZE 2764800 37 | 38 | MediaH264VideoRTPSink* MediaH264VideoRTPSink::createNew(UsageEnvironment& env, MediaSubsession* subsession, 39 | u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize, 40 | char const* streamId) { 41 | return new MediaH264VideoRTPSink(env, subsession, sps, spsSize, pps, ppsSize, streamId); 42 | } 43 | 44 | MediaH264VideoRTPSink::MediaH264VideoRTPSink(UsageEnvironment& env, MediaSubsession* subsession, 45 | u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize, 46 | char const* streamId) 47 | : H264VideoRTPSink(env, subsession->rtpSource()->RTPgs(), subsession->rtpPayloadFormat(), 48 | sps, spsSize, pps, ppsSize) 49 | , fSubsession(subsession) 50 | { 51 | fStreamId = strDup(streamId); 52 | fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE]; 53 | memset(fReceiveBuffer, 0x0, DUMMY_SINK_RECEIVE_BUFFER_SIZE); 54 | } 55 | 56 | MediaH264VideoRTPSink::~MediaH264VideoRTPSink() 57 | { 58 | delete[] fReceiveBuffer; 59 | delete[] fStreamId; 60 | } 61 | 62 | void MediaH264VideoRTPSink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes, 63 | struct timeval presentationTime, unsigned durationInMicroseconds) { 64 | MediaH264VideoRTPSink* sink = (MediaH264VideoRTPSink*)clientData; 65 | sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); 66 | } 67 | 68 | // If you don't want to see debugging output for each received frame, then comment out the following line: 69 | #define DEBUG_PRINT_EACH_RECEIVED_FRAME 1 70 | #define DEBUG_PRINT_NPT 1 71 | 72 | void MediaH264VideoRTPSink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, 73 | struct timeval presentationTime, unsigned /*durationInMicroseconds*/) { 74 | // We've just received a frame of data. (Optionally) print out information about it: 75 | #ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME 76 | if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; "; 77 | envir() << fSubsession->mediumName() << "/" << fSubsession->codecName() << ":\tReceived " << frameSize << " bytes"; 78 | if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)"; 79 | char uSecsStr[6 + 1]; // used to output the 'microseconds' part of the presentation time 80 | sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec); 81 | envir() << ".\tPresentation time: " << (unsigned)presentationTime.tv_sec << "." << uSecsStr; 82 | if (fSubsession->rtpSource() != NULL && !fSubsession->rtpSource()->hasBeenSynchronizedUsingRTCP()) { 83 | envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized 84 | } 85 | #ifdef DEBUG_PRINT_NPT 86 | envir() << "\tNPT: " << fSubsession->getNormalPlayTime(presentationTime); 87 | #endif 88 | envir() << "\n"; 89 | #endif 90 | 91 | // Then continue, to request the next frame of data: 92 | continuePlaying(); 93 | } 94 | 95 | Boolean MediaH264VideoRTPSink::continuePlaying() { 96 | if (fSource == NULL) return False; // sanity check (should not happen) 97 | 98 | // Request the next frame of data from our input source. "afterGettingFrame()" will get called later, when it arrives: 99 | fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE, 100 | afterGettingFrame, this, 101 | onSourceClosure, this); 102 | 103 | return True; 104 | } 105 | 106 | void MediaH264VideoRTPSink::doSpecialFrameHandling(unsigned /*fragmentationOffset*/, 107 | unsigned char* /*frameStart*/, 108 | unsigned /*numBytesInFrame*/, 109 | struct timeval framePresentationTime, 110 | unsigned /*numRemainingBytes*/) { 111 | // Set the RTP 'M' (marker) bit iff 112 | // 1/ The most recently delivered fragment was the end of (or the only fragment of) an NAL unit, and 113 | // 2/ This NAL unit was the last NAL unit of an 'access unit' (i.e. video frame). 114 | if (fOurFragmenter != NULL) { 115 | H264or5VideoStreamFramer* framerSource 116 | = (H264or5VideoStreamFramer*)(fOurFragmenter->inputSource()); 117 | 118 | // This relies on our fragmenter's source being a "H264or5VideoStreamFramer". 119 | // if (((H264or5Fragmenter*)fOurFragmenter)->lastFragmentCompletedNALUnit() 120 | // && framerSource != NULL && framerSource->pictureEndMarker()) { 121 | // setMarkerBit(); 122 | // framerSource->pictureEndMarker() = False; 123 | // } 124 | } 125 | 126 | setTimestamp(framePresentationTime); 127 | } -------------------------------------------------------------------------------- /src/MediaH264VideoRTPSink.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #pragma once 35 | 36 | #ifndef _MEDIA_H264_VIDEO_RTP_SINK_H_ 37 | #define _MEDIA_H264_VIDEO_RTP_SINK_H_ 38 | 39 | #include "liveMedia.hh" 40 | 41 | class MediaH264VideoRTPSink : public H264VideoRTPSink 42 | { 43 | public: 44 | static MediaH264VideoRTPSink* createNew(UsageEnvironment& env, 45 | MediaSubsession* subsession, // identifies the kind of data that's being received 46 | u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize, 47 | char const* streamId = NULL); // identifies the stream itself (optional) 48 | 49 | protected: 50 | MediaH264VideoRTPSink(UsageEnvironment& env, MediaSubsession* subsession, 51 | u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize, 52 | char const* streamId); 53 | virtual ~MediaH264VideoRTPSink(); 54 | 55 | private: 56 | static void afterGettingFrame(void* clientData, unsigned frameSize, 57 | unsigned numTruncatedBytes, struct timeval presentationTime, 58 | unsigned durationInMicroseconds); 59 | void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, 60 | struct timeval presentationTime, unsigned durationInMicroseconds); 61 | 62 | // redefined virtual functions: 63 | virtual void doSpecialFrameHandling(unsigned fragmentationOffset, 64 | unsigned char* frameStart, 65 | unsigned numBytesInFrame, 66 | struct timeval framePresentationTime, 67 | unsigned numRemainingBytes); 68 | virtual Boolean continuePlaying(); 69 | 70 | private: 71 | u_int8_t * fReceiveBuffer; 72 | MediaSubsession* fSubsession; 73 | char* fStreamId; 74 | }; 75 | 76 | #endif // _MEDIA_H264_VIDEO_RTP_SINK_H_ -------------------------------------------------------------------------------- /src/MediaRTSPClient.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "MediaRTSPClient.h" 35 | #include "log_utils.h" 36 | 37 | MediaRTSPClient* MediaRTSPClient::createNew(UsageEnvironment& env, char const* rtspURL, 38 | int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum, 39 | const char* username, const char* password) { 40 | return new MediaRTSPClient(env, NULL, rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, 41 | username, password); 42 | } 43 | 44 | MediaRTSPClient* MediaRTSPClient::createNew(UsageEnvironment& env, MediaRTSPSession* rtspSession, 45 | char const* rtspURL, int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum, 46 | const char* username, const char* password) { 47 | return new MediaRTSPClient(env, rtspSession, rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, 48 | username, password); 49 | } 50 | 51 | MediaRTSPClient::MediaRTSPClient(UsageEnvironment& env, MediaRTSPSession* rtspSession, char const* rtspURL, 52 | int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum, 53 | const char* username, const char* password) 54 | : RTSPClient(env, rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, -1) 55 | , fVidRTPSink(NULL) 56 | , fAudRTPSink(NULL) 57 | , bUpTransport(false) 58 | , mediaRTSPSession(rtspSession) 59 | { 60 | if (username && password) 61 | ourAuthenticator = new Authenticator(username, password); 62 | } 63 | 64 | MediaRTSPClient::~MediaRTSPClient() { 65 | delete ourAuthenticator; 66 | delete sdpDescription; 67 | } 68 | 69 | char* MediaRTSPClient::getSDPDescription() 70 | { 71 | char temp[2048] = ""; 72 | 73 | strcat(temp, "v=0\n"); 74 | strcat(temp, "o=- 0 0 IN IP4 null\n"); 75 | strcat(temp, "s=Media Presentation\n"); 76 | strcat(temp, "i=samsung\n"); 77 | strcat(temp, "c=IN IP4 ::0\n"); 78 | strcat(temp, "t=0 0\n"); 79 | strcat(temp, "a=recvonly\n"); 80 | 81 | strcat(temp, "m=video 40048 RTP/AVP 97\n"); 82 | strcat(temp, "b=AS:2000\n"); 83 | strcat(temp, "a=rtpmap:97 H264/90000\n"); 84 | strcat(temp, "a=fmtp:97 packetization-mode=1;profile-level-id=4D001F;sprop-parameter-sets=Z00AH5pkAoAt/4C3AQEBQAAA+gAAHUw6GADa0AA2tC7y40MAG1oABtaF3lwo,aO48gA==\n"); 85 | strcat(temp, "a=cliprect:0,0,720,1280\n"); 86 | strcat(temp, "a=framesize:97 1280-720\n"); 87 | strcat(temp, "a=framerate:15.0\n"); 88 | strcat(temp, "a=control:trackID=1\n"); 89 | 90 | strcat(temp, "m=audio 40052 RTP/AVP 0\n"); 91 | strcat(temp, "a=rtpmap:0 PCMU/8000\n"); 92 | strcat(temp, "a=control:trackID=2\n"); 93 | 94 | 95 | sdpDescription = new char[2048]; 96 | strcpy(this->sdpDescription, &temp[0]); 97 | 98 | return sdpDescription; 99 | } 100 | 101 | 102 | FramedSource* MediaRTSPClient::createNewVideoStreamSource() 103 | { 104 | /* 105 | // estBitrate = 2000; // kbps, estimate 106 | 107 | // Create the video source: 108 | AmbaVideoStreamSource *pAmbaSource = NULL; 109 | 110 | fStreamTag = 0; 111 | 112 | pAmbaSource = (fStream_type == STREAM_TYPE_LIVE) 113 | ? AmbaVideoStreamSource::createNew(envir(), fStream_reader, NULL, (u_int32_t)fStreamTag) 114 | : AmbaVideoStreamSource::createNew(envir(), fStream_reader, fFilename, (u_int32_t)fStreamTag); 115 | 116 | if (pAmbaSource == NULL) 117 | return NULL; 118 | 119 | fSourceCodec_ID = pAmbaSource->getSourceCodecID(); 120 | 121 | // get sps and pps from amba stream 122 | if (pAmbaSource) 123 | { 124 | int result = 0; 125 | 126 | log_debug("fSourceCodec_ID=0x%x\n", fSourceCodec_ID); 127 | 128 | if (fStream_type == STREAM_TYPE_PLAYBACK) 129 | { 130 | stream_reader_ctrl_box_t ctrl_box; 131 | 132 | memset(&ctrl_box, 0x0, sizeof(stream_reader_ctrl_box_t)); 133 | 134 | ctrl_box.cmd = STREAM_READER_CMD_PLAYBACK_GET_DURATION; 135 | ctrl_box.stream_tag = fStreamTag; 136 | ctrl_box.stream_type = fStream_type; 137 | StreamReader_Control(fStream_reader, NULL, &ctrl_box); 138 | 139 | fDuration = (float)ctrl_box.out.get_duration.duration / 1000.0; 140 | } 141 | 142 | switch (pAmbaSource->getSourceCodecID()) 143 | { 144 | case AMP_FORMAT_MID_AVC: 145 | case AMP_FORMAT_MID_H264: 146 | result = pAmbaSource->getSpsPps(&fSPS_payload, &fSPS_length, &fPPS_payload, &fPPS_length); 147 | if (result) 148 | { 149 | log_debug("No SPS/PPS !\n"); 150 | } 151 | return pAmbaSource; 152 | break; 153 | default: 154 | break; 155 | } 156 | } 157 | */ 158 | return NULL; 159 | } 160 | 161 | FramedSource* MediaRTSPClient::createNewAudioStreamSource() 162 | { 163 | /* 164 | // estBitrate = 128; // kbps, estimate 165 | fStreamTag = 0; 166 | 167 | // Create the audio source: 168 | AmbaAudioStreamSource *pAmbaSource = NULL; 169 | 170 | pAmbaSource = (fStream_type == STREAM_TYPE_LIVE) 171 | ? AmbaAudioStreamSource::createNew(envir(), fStream_reader, NULL, fStreamTag) 172 | : AmbaAudioStreamSource::createNew(envir(), fStream_reader, fFilename, fStreamTag); 173 | 174 | fSourceCodec_ID = pAmbaSource->getSourceCodecID(); 175 | 176 | if (pAmbaSource) 177 | { 178 | fSourceCodec_ID = pAmbaSource->getSourceCodecID(); 179 | 180 | log_debug("fSourceCodec_ID=0x%x", fSourceCodec_ID); 181 | 182 | if (fStream_type == STREAM_TYPE_PLAYBACK) 183 | { 184 | stream_reader_ctrl_box_t ctrl_box; 185 | 186 | memset(&ctrl_box, 0x0, sizeof(stream_reader_ctrl_box_t)); 187 | 188 | ctrl_box.cmd = STREAM_READER_CMD_PLAYBACK_GET_DURATION; 189 | ctrl_box.stream_tag = fStreamTag; 190 | ctrl_box.stream_type = fStream_type; 191 | StreamReader_Control(fStream_reader, NULL, &ctrl_box); 192 | 193 | fDuration = (float)ctrl_box.out.get_duration.duration / 1000.0; 194 | } 195 | 196 | switch (fSourceCodec_ID) 197 | { 198 | case AMP_FORMAT_MID_AAC: 199 | return pAmbaSource; 200 | break; 201 | 202 | case AMP_FORMAT_MID_PCM: 203 | case AMP_FORMAT_MID_ADPCM: 204 | case AMP_FORMAT_MID_MP3: 205 | case AMP_FORMAT_MID_AC3: 206 | case AMP_FORMAT_MID_WMA: 207 | case AMP_FORMAT_MID_OPUS: 208 | break; 209 | default: 210 | break; 211 | } 212 | } 213 | */ 214 | return NULL; 215 | } 216 | 217 | int MediaRTSPClient::CheckMediaConfiguration(char const *streamName, 218 | Boolean *pHave_amba_audio, Boolean *pHave_amba_video) { 219 | int result = 0; 220 | /* 221 | AMBA_NETFIFO_MOVIE_INFO_CFG_s movie_info = { 0 }; 222 | 223 | *pHave_amba_audio = False; 224 | *pHave_amba_video = False; 225 | 226 | do { 227 | if (streamName == NULL) { 228 | // live 229 | AMBA_NETFIFO_MEDIA_STREAMID_LIST_s stream_list = { 0 }; 230 | 231 | result = AmbaNetFifo_GetMediaStreamIDList(&stream_list); 232 | if (result < 0) { 233 | log_error("Fail to do AmbaNetFifo_GetMediaStreamIDList()\n"); 234 | break; 235 | } 236 | 237 | if (stream_list.Amount < 1) { 238 | log_error( 239 | "There is no valid stream. Maybe video record does not started yet.\n"); 240 | result = 1; 241 | break; 242 | } 243 | 244 | result = AmbaNetFifo_GetMediaInfo( 245 | stream_list.StreamID_List[stream_list.Amount - 1], 246 | &movie_info); 247 | if (result < 0) { 248 | log_error("Fail to do AmbaNetFifo_GetMediaInfo()\n"); 249 | break; 250 | } 251 | } 252 | else { 253 | // palyback 254 | AMBA_NETFIFO_PLAYBACK_OP_PARAM_s param_in = { 0,{ 0 } }; 255 | AMBA_NETFIFO_PLAYBACK_OP_PARAM_s param_out = { 0,{ 0 } }; 256 | 257 | param_in.OP = STREAM_READER_CMD_PLAYBACK_OPEN; 258 | snprintf((char*)param_in.Param, 128, "%s", streamName); 259 | result = AmbaNetFifo_PlayBack_OP(¶m_in, ¶m_out); 260 | if (result < 0) { 261 | log_error("fail to do AmbaNetFifo_PlayBack_OP(0x%08x), %d\n", 262 | param_in.OP, result); 263 | break; 264 | } 265 | 266 | result = AmbaNetFifo_GetMediaInfo(param_out.OP, &movie_info); 267 | if (result < 0) { 268 | log_error("Fail to do AmbaNetFifo_GetMediaInfo()\n"); 269 | break; 270 | } 271 | } 272 | 273 | if (movie_info.nTrack) { 274 | int i; 275 | for (i = 0; i < movie_info.nTrack; i++) { 276 | switch (movie_info.Track[i].nTrackType) { 277 | case AMBA_NETFIFO_MEDIA_TRACK_TYPE_VIDEO: 278 | *pHave_amba_video = True; 279 | break; 280 | case AMBA_NETFIFO_MEDIA_TRACK_TYPE_AUDIO: 281 | *pHave_amba_audio = True; 282 | break; 283 | default: 284 | break; 285 | } 286 | } 287 | } 288 | } while (0); 289 | */ 290 | return 0; 291 | } 292 | 293 | RTPSink* MediaRTSPClient::getVideoRTPSink() 294 | { 295 | return fVidRTPSink; 296 | } 297 | 298 | RTPSink* MediaRTSPClient::getAudioRTPSink() 299 | { 300 | return fAudRTPSink; 301 | } 302 | 303 | RTPSink* MediaRTSPClient::createNewRTPSink(Groupsock *rtpGroupsock, 304 | unsigned char rtpPayloadTypeIfDynamic, 305 | FramedSource * inputSource) 306 | { 307 | RTPSink* rtpSink = NULL; 308 | /* 309 | AmbaAudioStreamSource *pAmbaSource = NULL; 310 | 311 | switch (fSourceCodec_ID) 312 | { 313 | case AMP_FORMAT_MID_AVC: 314 | case AMP_FORMAT_MID_H264: 315 | rtpSink = AmbaVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, 316 | fSPS_payload, fSPS_length, fPPS_payload, fPPS_length); 317 | ((AmbaVideoRTPSink*)rtpSink)->setPacketSizes(1200, 1200); 318 | 319 | fVidRTPSink = rtpSink; 320 | log_debug("fVidRTPSink = %p\n", fVidRTPSink); 321 | break; 322 | 323 | case AMP_FORMAT_MID_AAC: 324 | pAmbaSource = (AmbaAudioStreamSource*)inputSource; 325 | rtpSink = MPEG4GenericRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, 326 | pAmbaSource->samplingFrequency(), "audio", "AAC-hbr", pAmbaSource->configStr(), 327 | pAmbaSource->numChannels()); 328 | 329 | fAudRTPSink = rtpSink; 330 | log_debug("fAudRTPSink = %p\n", fAudRTPSink); 331 | break; 332 | 333 | default: 334 | break; 335 | } 336 | */ 337 | log_debug("rtpSink = %p\n", rtpSink); 338 | return rtpSink; 339 | } -------------------------------------------------------------------------------- /src/MediaRTSPClient.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _MEDIA_RTSP_CLIENT_H_ 35 | #define _MEDIA_RTSP_CLIENT_H_ 36 | 37 | #pragma once 38 | 39 | #include "liveMedia.hh" 40 | #include "StreamClientState.h" 41 | #include "MediaBasicUsageEnvironment.h" 42 | 43 | class MediaRTSPSession; 44 | // If you're streaming just a single stream (i.e., just from a single URL, once), then you can define and use just a single 45 | // "StreamClientState" structure, as a global variable in your application. However, because - in this demo application - we're 46 | // showing how to play multiple streams, concurrently, we can't do that. Instead, we have to have a separate "StreamClientState" 47 | // structure for each "RTSPClient". To do this, we subclass "RTSPClient", and add a "StreamClientState" field to the subclass: 48 | class MediaRTSPClient : public RTSPClient { 49 | public: 50 | static MediaRTSPClient* createNew(UsageEnvironment& env, char const* rtspURL, 51 | int verbosityLevel = 0, 52 | char const* applicationName = NULL, 53 | portNumBits tunnelOverHTTPPortNum = 0, 54 | const char* username = NULL, 55 | const char* password = NULL); 56 | 57 | static MediaRTSPClient* createNew(UsageEnvironment& env, 58 | MediaRTSPSession* rtspSession, 59 | char const* rtspURL, 60 | int verbosityLevel = 0, 61 | char const* applicationName = NULL, 62 | portNumBits tunnelOverHTTPPortNum = 0, 63 | const char* username = NULL, 64 | const char* password = NULL); 65 | 66 | protected: 67 | MediaRTSPClient(UsageEnvironment& env, MediaRTSPSession* rtspSession, char const* rtspURL, 68 | int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum, 69 | const char* username = NULL, const char* password = NULL); 70 | // called only by createNew(); 71 | virtual ~MediaRTSPClient(); 72 | 73 | public: 74 | StreamClientState scs; 75 | protected: 76 | Authenticator* ourAuthenticator; 77 | char* sdpDescription; 78 | bool bUpTransport; 79 | bool bInterleaved; 80 | MediaRTSPSession* mediaRTSPSession; 81 | /* 82 | stream_reader_t *fStream_reader; 83 | // Boolean fHave_amba_audio; 84 | // Boolean fHave_amba_video; 85 | char* fFilename; 86 | stream_type_t fStream_type; 87 | u_int32_t fStreamTag; 88 | float fDuration; 89 | 90 | u_int8_t *fSPS_payload; 91 | u_int32_t fSPS_length; 92 | u_int8_t *fPPS_payload; 93 | u_int32_t fPPS_length; 94 | 95 | AMP_FORMAT_MID_e fSourceCodec_ID; 96 | */ 97 | RTPSink *fVidRTPSink; // ditto 98 | RTPSink *fAudRTPSink; // ditto 99 | 100 | public: 101 | Authenticator * getAuth() { return ourAuthenticator; } 102 | MediaRTSPSession* getRTSPSession() { return mediaRTSPSession; } 103 | char* getSDPDescription(); 104 | 105 | /* 106 | stream_reader_t* getStreamReader() { return fStream_reader; } 107 | // Boolean getHaveAmbaVideo() { return fHave_amba_video; } 108 | // Boolean getHaveAmbaAudio() { return fHave_amba_audio; } 109 | stream_type_t* getStreamType() { return &fStream_type; } 110 | 111 | u_int8_t* getSPSPayload() { return fSPS_payload; } 112 | u_int32_t getSPSLength() { return fSPS_length; } 113 | u_int8_t* getPPSPayload() { return fPPS_payload; } 114 | u_int32_t getPPSLength() { return fPPS_length; } 115 | */ 116 | FramedSource* createNewVideoStreamSource(); 117 | FramedSource* createNewAudioStreamSource(); 118 | static int CheckMediaConfiguration(char const *streamName, Boolean *pHave_amba_audio, Boolean *pHave_amba_video); 119 | RTPSink* createNewRTPSink(Groupsock *rtpGroupsock, 120 | unsigned char rtpPayloadTypeIfDynamic, 121 | FramedSource * inputSource); 122 | 123 | RTPSink* getVideoRTPSink(); 124 | RTPSink* getAudioRTPSink(); 125 | 126 | bool isUpTransportStream() { return bUpTransport; } 127 | void setUpTransportStream(bool flag) { bUpTransport = flag; } 128 | bool isInterleavedMode() { return bInterleaved; } 129 | void setInterleavedMode(bool flag) { bInterleaved = flag; } 130 | 131 | }; 132 | 133 | #endif // _MEDIA_RTSP_CLIENT_H_ -------------------------------------------------------------------------------- /src/MediaRTSPServer.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _MEDIA_RTSP_SERVER_H_ 35 | #define _MEDIA_RTSP_SERVER_H_ 36 | 37 | #include 38 | #include 39 | 40 | #ifndef _RTSP_SERVER_SUPPORTING_HTTP_STREAMING_HH 41 | #include "RTSPServer.hh" 42 | #endif 43 | 44 | #define LIVE_VEIW_NAME "live" 45 | 46 | #include "log_utils.h" 47 | 48 | /** 49 | * Our RTSP server class is derived from the liveMedia RTSP server. It extends the live555 RTSP server 50 | * to stream live media sessions. 51 | * 52 | * It also adds the capability to set the maximum number of connected clients. 53 | * It adds the ability to kick clients off the server. 54 | */ 55 | class MediaRTSPServer: public RTSPServer 56 | { 57 | public: 58 | static MediaRTSPServer* createNew(UsageEnvironment& env, Port ourPort = 554, 59 | UserAuthenticationDatabase* authDatabase = NULL, 60 | unsigned reclamationSeconds = 65); 61 | 62 | protected: 63 | MediaRTSPServer(UsageEnvironment& env, 64 | int ourSocketIPv4, int ourSocketIPv6, Port ourPort, 65 | UserAuthenticationDatabase* authDatabase, 66 | unsigned reclamationSeconds); 67 | // called only by createNew(); 68 | virtual ~MediaRTSPServer(); 69 | 70 | private: // redefined virtual functions 71 | //virtual ServerMediaSession* lookupServerMediaSession(char const* streamName, Boolean isFirstLookupInSession); 72 | virtual void lookupServerMediaSession(char const* streamName, 73 | lookupServerMediaSessionCompletionFunc* completionFunc, 74 | void* completionClientData, 75 | Boolean isFirstLookupInSession); 76 | 77 | protected: 78 | // reference url: 79 | // https://github.com/miseri/LiveMediaExt/blob/master/include/LiveMediaExt/LiveRtspServer.h 80 | class MediaRTSPClientSession; 81 | /** 82 | * @brief Subclassing this to make the client address acessible and add handleCmd_notEnoughBandwidth. 83 | */ 84 | class MediaRTSPClientConnection : public RTSPClientConnection { 85 | friend class MediaRTSPServer; 86 | friend class MediaRTSPClientSession; 87 | public: 88 | /** 89 | * @brief Constructor 90 | */ 91 | MediaRTSPClientConnection(RTSPServer& ourServer, int clientSocket, struct sockaddr_storage clientAddr); 92 | /** 93 | * @brief Destructor 94 | */ 95 | virtual ~MediaRTSPClientConnection(); 96 | protected: 97 | /** 98 | * @brief Getter for client address 99 | */ 100 | struct sockaddr_storage getClientAddr() const { return fClientAddr; } 101 | 102 | virtual void handleRequestBytes(int newBytesRead); 103 | 104 | /** 105 | * @brief This method can be called to respond to requests where there is insufficient bandwidth 106 | * to handle them. 107 | */ 108 | virtual void handleCmd_notEnoughBandwidth() 109 | { 110 | setRTSPResponse("453 Not Enough Bandwidth"); 111 | } 112 | 113 | virtual void handleCmd_OPTIONS(); 114 | virtual void handleCmd_DESCRIBE(char const* urlPreSuffix, char const* urlSuffix, char const* fullRequestStr); 115 | }; 116 | 117 | class MediaRTSPClientSession : public RTSPServer::RTSPClientSession 118 | { 119 | public: 120 | MediaRTSPClientSession(MediaRTSPServer& ourServer, unsigned sessionId) 121 | : RTSPClientSession(ourServer, sessionId), 122 | m_pParent(&ourServer), 123 | m_uiSessionId(sessionId) 124 | { 125 | log_rtsp("(CREATE)MediaRTSPClientSession:MediaRTSPClientSession"); 126 | } 127 | 128 | virtual ~MediaRTSPClientSession() 129 | { 130 | log_rtsp("(DELETE)MediaRTSPClientSession:~MediaRTSPClientSession"); 131 | // We need to check if the parent is still valid 132 | // in the case where the client session outlives the 133 | // RTSPServer child class implementation! In that case 134 | // the RTSPServer destructor deletes all the client 135 | // sessions, but at this point m_pParent is not valid 136 | // anymore. This is the reason for the orphan method. 137 | if (m_pParent) 138 | m_pParent->removeClientSession(m_uiSessionId); 139 | } 140 | /** 141 | * @brief invalidates the pointer to the DynamicRTSPServer object. 142 | */ 143 | void orphan() 144 | { 145 | m_pParent = NULL; 146 | } 147 | friend class MediaRTSPServer; 148 | friend class DynamicRTSPClientConnection; 149 | 150 | protected: 151 | virtual void handleCmd_SETUP( 152 | RTSPServer::RTSPClientConnection* ourClientConnection, 153 | char const* urlPreSuffix, char const* urlSuffix, 154 | char const* fullRequestStr); 155 | 156 | virtual void handleCmd_withinSession( 157 | RTSPClientConnection* ourClientConnection, char const* cmdName, 158 | char const* urlPreSuffix, char const* urlSuffix, 159 | char const* fullRequestStr); 160 | 161 | virtual void handleCmd_PLAY(RTSPClientConnection* ourClientConnection, 162 | ServerMediaSubsession* subsession, char const* fullRequestStr); 163 | 164 | virtual void handleCmd_PAUSE(RTSPClientConnection* ourClientConnection, 165 | ServerMediaSubsession* subsession); 166 | 167 | virtual void handleCmd_TEARDOWN( 168 | RTSPClientConnection* ourClientConnection, 169 | ServerMediaSubsession* subsession); 170 | 171 | virtual void handleCmd_GET_PARAMETER( 172 | RTSPClientConnection* ourClientConnection, 173 | ServerMediaSubsession* subsession, char const* fullRequestStr); 174 | 175 | virtual void handleCmd_SET_PARAMETER( 176 | RTSPClientConnection* ourClientConnection, 177 | ServerMediaSubsession* subsession, char const* fullRequestStr); 178 | 179 | 180 | MediaRTSPServer* m_pParent; 181 | unsigned m_uiSessionId; 182 | }; 183 | 184 | // If you subclass "RTSPClientConnection", then you must also redefine this virtual function in order 185 | // to create new objects of your subclass: 186 | virtual ClientConnection* createNewClientConnection(int clientSocket, struct sockaddr_storage const& clientAddr); 187 | 188 | // If you subclass "RTSPClientSession", then you must also redefine this virtual function in order 189 | // to create new objects of your subclass: 190 | virtual ClientSession* createNewClientSession(u_int32_t sessionId); 191 | 192 | void removeClientSession(unsigned sessionId); 193 | private: 194 | typedef std::map MediaClientSessionMap_t; 195 | /// map to store a pointer to client sessions on creation 196 | MediaClientSessionMap_t m_mRtspClientSessions; 197 | 198 | #if 0 199 | // To remove the media session, store the stream name 200 | char *m_mMediaSessName; 201 | #endif 202 | 203 | private: 204 | Boolean fHave_amba_audio; 205 | Boolean fHave_amba_video; 206 | Boolean fHave_amba_text; 207 | unsigned int fRTSPClientConnection; 208 | }; 209 | 210 | #endif /* _MEDIA_RTSP_SERVER_H_ */ 211 | -------------------------------------------------------------------------------- /src/MediaRTSPSession.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _MEDIA_RTSP_SESSION_H_ 35 | #define _MEDIA_RTSP_SESSION_H_ 36 | 37 | #include 38 | #include 39 | #include // for std::vector 40 | 41 | #if 0 42 | #include "FFMpegDecoder.h" 43 | #else 44 | #include "H264Decoder.h" 45 | #endif 46 | 47 | // reference 48 | // https://en.wikipedia.org/wiki/Graphics_display_resolution 49 | typedef enum screen_ratio { 50 | nHD, 51 | qHD, 52 | HD, 53 | HD_PLUS, 54 | FHD, 55 | WQHD, 56 | QHD_PLUS, 57 | UHD_4K, 58 | UHD_5K_PLUS, 59 | UHD_8K 60 | } SCREEN_RATIO; 61 | 62 | #define WINDOWS_WIDTH 320 63 | #define WINDOWS_HEIGHT 160 64 | 65 | #if defined(USE_GLFW_LIB) 66 | // reference 67 | // https://medium.com/@Plimsky/how-to-install-a-opengl-environment-on-ubuntu-e3918cf5ab6c 68 | #ifdef __APPLE__ 69 | #include 70 | #else 71 | #include 72 | #endif 73 | 74 | #include 75 | #elif defined(USE_SDL2_LIB) 76 | #include 77 | #undef main 78 | #endif 79 | 80 | class TaskScheduler; 81 | class UsageEnvironment; 82 | class UserAuthenticationDatabase; 83 | class RTSPClient; 84 | class RTSPServer; 85 | #if 0 86 | class MediaRTSPSession : public CDecodeCB 87 | #else 88 | class MediaRTSPSession : public DecodeListener 89 | #endif 90 | { 91 | public: 92 | MediaRTSPSession(); 93 | virtual ~MediaRTSPSession(); 94 | int startRTSPClient(const char* progName, const char* rtspURL, 95 | const char* username = NULL, const char* password = NULL, 96 | bool bInterleaved = false, bool bTransportStream = false); 97 | 98 | int startRTSPServer(const int portnum = 554, 99 | const char* username = NULL, const char* password = NULL); 100 | 101 | int stopRTSPClient(); 102 | int stopRTSPServer(); 103 | int openURL(UsageEnvironment& env); 104 | bool isTransportStream() { return bTransportStream; } 105 | void setTransportStream(bool flag) { bTransportStream = flag; } 106 | bool isInterleaved() { return bInterleaved; } 107 | void setInterleaved(bool flag) { bInterleaved = flag; } 108 | void setUsername(const char* username) { m_username = username; } 109 | void setPassword(const char* password) { m_password = password; } 110 | void setPort(const int port) { m_port = port; } 111 | void setDebugLevel(int level) { m_debugLevel = level; } 112 | 113 | private: 114 | RTSPClient* m_rtspClient; 115 | RTSPServer* m_rtspServer; 116 | TaskScheduler* m_taskScheduler; 117 | UsageEnvironment* m_usageEnvironment; 118 | UserAuthenticationDatabase* m_authDB; 119 | 120 | char eventLoopWatchVariable; 121 | bool bTransportStream; 122 | bool bInterleaved; 123 | 124 | pthread_t tid; 125 | bool m_running; 126 | std::string m_rtspUrl; 127 | std::string m_progName; 128 | std::string m_username; 129 | std::string m_password; 130 | int m_debugLevel; 131 | int m_port; 132 | 133 | SCREEN_RATIO screen_radio; 134 | 135 | typedef struct buffer { 136 | int width; 137 | int height; 138 | int length; 139 | uint8_t* data; 140 | int pitch; 141 | } rgb_buffer; 142 | 143 | std::vector myvector; 144 | 145 | #if defined(USE_GLFW_LIB) 146 | GLuint camera_texture; 147 | // variable declarations for glfw 148 | GLFWwindow* window; 149 | 150 | #elif defined(USE_SDL2_LIB) 151 | // variable declarations for sdl2 152 | SDL_Window* window; 153 | SDL_Renderer* renderer; 154 | SDL_Texture* texture; 155 | #endif 156 | 157 | static void *rtsp_thread_fun(void *param); 158 | static void *rtsp_server_thread_fun(void *param); 159 | 160 | void rtsp_fun(); 161 | void rtspserver_fun(); 162 | #if defined(USE_GLFW_LIB) 163 | static void *glfw3_thread_fun(void *param); 164 | void glfw3_fun(); 165 | #elif defined(USE_SDL2_LIB) 166 | static void *sdl2_thread_fun(void *param); 167 | void sdl2_fun(); 168 | #endif 169 | protected: 170 | #if 0 171 | virtual void videoCB(int width, int height, uint8_t* buff, int len, int pitch, RTSPClient* client); 172 | #else 173 | virtual void onDecoded(void* frame); 174 | #endif 175 | }; 176 | 177 | #endif // _MEDIA_RTSP_SESSION_H_ -------------------------------------------------------------------------------- /src/MediaVideoFragmenter.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 2013 Samsung Techwin Co., Ltd. 3 | * 4 | * Licensed to the Samsung Techwin Software Foundation under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | * 14 | * Samsung SmartCam - SmartCam Camera Proejct 15 | * http://www.samsungsmartcam.com 16 | * 17 | * Security Solution Division / B2C Camera S/W Development Team 18 | * 19 | * $Author$ 20 | * $Date$ 21 | * $Revision$ 22 | * $Id$ 23 | * $HeadURL$ 24 | * 25 | * History 26 | * AmbaVideoFragmenter.cpp 27 | * 28 | * Created on: Oct 21, 2016 29 | * Author: Youngho Kim 30 | *******************************************************************************/ 31 | 32 | #include "MediaVideoFragmenter.h" 33 | 34 | MediaVideoFragmenter::MediaVideoFragmenter(int hNumber, 35 | UsageEnvironment& env, FramedSource* inputSource, 36 | unsigned inputBufferMax, unsigned maxOutputPacketSize) 37 | : FramedFilter(env, inputSource), 38 | fHNumber(hNumber), 39 | fInputBufferSize(inputBufferMax+1), fMaxOutputPacketSize(maxOutputPacketSize) { 40 | fInputBuffer = new unsigned char[fInputBufferSize]; 41 | reset(); 42 | } 43 | 44 | MediaVideoFragmenter::~MediaVideoFragmenter() { 45 | delete[] fInputBuffer; 46 | detachInputSource(); // so that the subsequent ~FramedFilter() doesn't delete it 47 | } 48 | 49 | void MediaVideoFragmenter::doGetNextFrame() { 50 | if (fNumValidDataBytes == 1) { 51 | // We have no NAL unit data currently in the buffer. Read a new one: 52 | fInputSource->getNextFrame(&fInputBuffer[1], fInputBufferSize - 1, 53 | afterGettingFrame, this, 54 | FramedSource::handleClosure, this); 55 | } else { 56 | // We have NAL unit data in the buffer. There are three cases to consider: 57 | // 1. There is a new NAL unit in the buffer, and it's small enough to deliver 58 | // to the RTP sink (as is). 59 | // 2. There is a new NAL unit in the buffer, but it's too large to deliver to 60 | // the RTP sink in its entirety. Deliver the first fragment of this data, 61 | // as a FU packet, with one extra preceding header byte (for the "FU header"). 62 | // 3. There is a NAL unit in the buffer, and we've already delivered some 63 | // fragment(s) of this. Deliver the next fragment of this data, 64 | // as a FU packet, with two (H.264) or three (H.265) extra preceding header bytes 65 | // (for the "NAL header" and the "FU header"). 66 | 67 | if (fMaxSize < fMaxOutputPacketSize) { // shouldn't happen 68 | envir() << "H264or5Fragmenter::doGetNextFrame(): fMaxSize (" 69 | << fMaxSize << ") is smaller than expected\n"; 70 | } else { 71 | fMaxSize = fMaxOutputPacketSize; 72 | } 73 | 74 | fLastFragmentCompletedNALUnit = True; // by default 75 | if (fCurDataOffset == 1) { // case 1 or 2 76 | if (fNumValidDataBytes - 1 <= fMaxSize) { // case 1 77 | memmove(fTo, &fInputBuffer[1], fNumValidDataBytes - 1); 78 | fFrameSize = fNumValidDataBytes - 1; 79 | fCurDataOffset = fNumValidDataBytes; 80 | } else { // case 2 81 | // We need to send the NAL unit data as FU packets. Deliver the first 82 | // packet now. Note that we add "NAL header" and "FU header" bytes to the front 83 | // of the packet (overwriting the existing "NAL header"). 84 | if (fHNumber == 264) { 85 | fInputBuffer[0] = (fInputBuffer[1] & 0xE0) | 28; // FU indicator 86 | fInputBuffer[1] = 0x80 | (fInputBuffer[1] & 0x1F); // FU header (with S bit) 87 | } else { // 265 88 | u_int8_t nal_unit_type = (fInputBuffer[1]&0x7E)>>1; 89 | fInputBuffer[0] = (fInputBuffer[1] & 0x81) | (49<<1); // Payload header (1st byte) 90 | fInputBuffer[1] = fInputBuffer[2]; // Payload header (2nd byte) 91 | fInputBuffer[2] = 0x80 | nal_unit_type; // FU header (with S bit) 92 | } 93 | memmove(fTo, fInputBuffer, fMaxSize); 94 | fFrameSize = fMaxSize; 95 | fCurDataOffset += fMaxSize - 1; 96 | fLastFragmentCompletedNALUnit = False; 97 | } 98 | } else { // case 3 99 | // We are sending this NAL unit data as FU packets. We've already sent the 100 | // first packet (fragment). Now, send the next fragment. Note that we add 101 | // "NAL header" and "FU header" bytes to the front. (We reuse these bytes that 102 | // we already sent for the first fragment, but clear the S bit, and add the E 103 | // bit if this is the last fragment.) 104 | unsigned numExtraHeaderBytes; 105 | if (fHNumber == 264) { 106 | fInputBuffer[fCurDataOffset-2] = fInputBuffer[0]; // FU indicator 107 | fInputBuffer[fCurDataOffset-1] = fInputBuffer[1]&~0x80; // FU header (no S bit) 108 | numExtraHeaderBytes = 2; 109 | } else { // 265 110 | fInputBuffer[fCurDataOffset-3] = fInputBuffer[0]; // Payload header (1st byte) 111 | fInputBuffer[fCurDataOffset-2] = fInputBuffer[1]; // Payload header (2nd byte) 112 | fInputBuffer[fCurDataOffset-1] = fInputBuffer[2]&~0x80; // FU header (no S bit) 113 | numExtraHeaderBytes = 3; 114 | } 115 | unsigned numBytesToSend = numExtraHeaderBytes + (fNumValidDataBytes - fCurDataOffset); 116 | if (numBytesToSend > fMaxSize) { 117 | // We can't send all of the remaining data this time: 118 | numBytesToSend = fMaxSize; 119 | fLastFragmentCompletedNALUnit = False; 120 | } else { 121 | // This is the last fragment: 122 | fInputBuffer[fCurDataOffset-1] |= 0x40; // set the E bit in the FU header 123 | fNumTruncatedBytes = fSaveNumTruncatedBytes; 124 | } 125 | memmove(fTo, &fInputBuffer[fCurDataOffset-numExtraHeaderBytes], numBytesToSend); 126 | fFrameSize = numBytesToSend; 127 | fCurDataOffset += numBytesToSend - numExtraHeaderBytes; 128 | } 129 | 130 | if (fCurDataOffset >= fNumValidDataBytes) { 131 | // We're done with this data. Reset the pointers for receiving new data: 132 | fNumValidDataBytes = fCurDataOffset = 1; 133 | } 134 | 135 | // Complete delivery to the client: 136 | FramedSource::afterGetting(this); 137 | } 138 | } 139 | 140 | void MediaVideoFragmenter::doStopGettingFrames() { 141 | // Make sure that we don't have any stale data fragments lying around, should we later resume: 142 | reset(); 143 | FramedFilter::doStopGettingFrames(); 144 | } 145 | 146 | void MediaVideoFragmenter::afterGettingFrame(void* clientData, unsigned frameSize, 147 | unsigned numTruncatedBytes, 148 | struct timeval presentationTime, 149 | unsigned durationInMicroseconds) { 150 | MediaVideoFragmenter* fragmenter = (MediaVideoFragmenter*)clientData; 151 | fragmenter->afterGettingFrame1(frameSize, numTruncatedBytes, presentationTime, 152 | durationInMicroseconds); 153 | } 154 | 155 | void MediaVideoFragmenter::afterGettingFrame1(unsigned frameSize, 156 | unsigned numTruncatedBytes, 157 | struct timeval presentationTime, 158 | unsigned durationInMicroseconds) { 159 | fNumValidDataBytes += frameSize; 160 | fSaveNumTruncatedBytes = numTruncatedBytes; 161 | fPresentationTime = presentationTime; 162 | fDurationInMicroseconds = durationInMicroseconds; 163 | 164 | // Deliver data to the client: 165 | doGetNextFrame(); 166 | } 167 | 168 | void MediaVideoFragmenter::reset() { 169 | fNumValidDataBytes = fCurDataOffset = 1; 170 | fSaveNumTruncatedBytes = 0; 171 | fLastFragmentCompletedNALUnit = True; 172 | } 173 | 174 | //end of 175 | -------------------------------------------------------------------------------- /src/MediaVideoFragmenter.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _MEDIA_VIDEO_FRAGMENTER_H_ 35 | #define _MEDIA_VIDEO_FRAGMENTER_H_ 36 | 37 | #include 38 | 39 | /* 40 | * 41 | */ 42 | class MediaVideoFragmenter : public FramedFilter { 43 | public: 44 | MediaVideoFragmenter(int hNumber, UsageEnvironment& env, FramedSource* inputSource, 45 | unsigned inputBufferMax, unsigned maxOutputPacketSize); 46 | virtual ~MediaVideoFragmenter(); 47 | 48 | Boolean lastFragmentCompletedNALUnit() const { return fLastFragmentCompletedNALUnit; } 49 | 50 | private: // redefined virtual functions: 51 | virtual void doGetNextFrame(); 52 | virtual void doStopGettingFrames(); 53 | 54 | private: 55 | static void afterGettingFrame(void* clientData, unsigned frameSize, 56 | unsigned numTruncatedBytes, 57 | struct timeval presentationTime, 58 | unsigned durationInMicroseconds); 59 | void afterGettingFrame1(unsigned frameSize, 60 | unsigned numTruncatedBytes, 61 | struct timeval presentationTime, 62 | unsigned durationInMicroseconds); 63 | void reset(); 64 | 65 | private: 66 | int fHNumber; 67 | unsigned fInputBufferSize; 68 | unsigned fMaxOutputPacketSize; 69 | unsigned char* fInputBuffer; 70 | unsigned fNumValidDataBytes; 71 | unsigned fCurDataOffset; 72 | unsigned fSaveNumTruncatedBytes; 73 | Boolean fLastFragmentCompletedNALUnit; 74 | }; 75 | 76 | //end of 77 | 78 | #endif /* _MEDIA_VIDEO_FRAGMENTER_H_ */ 79 | -------------------------------------------------------------------------------- /src/MediaVideoRTPSink.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "MediaVideoRTPSink.h" 35 | #include "Base64.hh" 36 | #include "MediaVideoFragmenter.h" 37 | 38 | 39 | //#include "stream_reader.h" 40 | 41 | #ifdef TECHWIN_GET_PRE_SPS_PPS 42 | extern stream_reader_frm_info_t frm_sz; 43 | #endif 44 | 45 | ////////// H264VideoRTPSink implementation ////////// 46 | 47 | MediaVideoRTPSink 48 | ::MediaVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, 49 | u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize) 50 | : H264or5VideoRTPSink(264, env, RTPgs, rtpPayloadFormat, 51 | NULL, 0, sps, spsSize, pps, ppsSize) { 52 | // log_debug("Create: AmbaVideoRTPSink"); 53 | } 54 | 55 | MediaVideoRTPSink::~MediaVideoRTPSink() { 56 | //log_debug("Delete: ~AmbaVideoRTPSink"); 57 | } 58 | 59 | MediaVideoRTPSink* MediaVideoRTPSink 60 | ::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat) { 61 | return new MediaVideoRTPSink(env, RTPgs, rtpPayloadFormat); 62 | } 63 | 64 | MediaVideoRTPSink* MediaVideoRTPSink 65 | ::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, 66 | u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize) { 67 | 68 | // Hard coded [SPS, PPS] value used for testing ... Hence commenting. 69 | #ifdef TECHWIN_GET_PRE_SPS_PPS 70 | //base for 720p 71 | u_int8_t SPS_temp[44] = { 72 | 0x27,0x4D,0x00,0x1F,0x9A,0x64,0x02,0x80,0x2D,0xD3, 73 | 0x50,0x10,0x10,0x14,0x00,0x00,0x0F,0xA4,0x00,0x03, 74 | 0xA9,0x83,0xA1,0x80,0x13,0xD8,0x00,0x13,0xD6,0xEE, 75 | 0xF2,0xE3,0x43,0x00,0x27,0xB0,0x00,0x27,0xAD,0xDD, 76 | 0xE5,0xC2,0x80,0x00 77 | }; 78 | u_int8_t PPS_temp[4] = {0x28,0xEE,0x3C,0x80}; 79 | log_debug(">>>>>>>>>> frm_sz.wid:%d frm_sz.hgh:%d spsSize:%d ppsSize:%d\n",frm_sz.wid,frm_sz.hgh,spsSize,ppsSize); 80 | 81 | if((frm_sz.wid == 640) && (frm_sz.hgh == 480)) //VGA, the other are all HD(720p) 82 | { 83 | SPS_temp[3] = 0x1e; 84 | } 85 | 86 | sps = (u_int8_t const*)&SPS_temp[0]; 87 | pps = (u_int8_t const*)&PPS_temp[0]; 88 | spsSize = 44; 89 | ppsSize = 4; 90 | #endif 91 | 92 | return new MediaVideoRTPSink(env, RTPgs, rtpPayloadFormat, sps, spsSize, pps, ppsSize); 93 | } 94 | 95 | MediaVideoRTPSink* MediaVideoRTPSink 96 | ::createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, 97 | char const* sPropParameterSetsStr) { 98 | u_int8_t* sps = NULL; unsigned spsSize = 0; 99 | u_int8_t* pps = NULL; unsigned ppsSize = 0; 100 | 101 | unsigned numSPropRecords; 102 | SPropRecord* sPropRecords = parseSPropParameterSets(sPropParameterSetsStr, numSPropRecords); 103 | for (unsigned i = 0; i < numSPropRecords; ++i) { 104 | if (sPropRecords[i].sPropLength == 0) continue; // bad data 105 | u_int8_t nal_unit_type = (sPropRecords[i].sPropBytes[0])&0x1F; 106 | if (nal_unit_type == 7/*SPS*/) { 107 | sps = sPropRecords[i].sPropBytes; 108 | spsSize = sPropRecords[i].sPropLength; 109 | } else if (nal_unit_type == 8/*PPS*/) { 110 | pps = sPropRecords[i].sPropBytes; 111 | ppsSize = sPropRecords[i].sPropLength; 112 | } 113 | } 114 | 115 | MediaVideoRTPSink* result 116 | = new MediaVideoRTPSink(env, RTPgs, rtpPayloadFormat, sps, spsSize, pps, ppsSize); 117 | delete[] sPropRecords; 118 | 119 | return result; 120 | } 121 | 122 | Boolean MediaVideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) { 123 | // Our source must be an appropriate framer: 124 | return source.isH264VideoStreamFramer(); 125 | } 126 | 127 | char const* MediaVideoRTPSink::auxSDPLine() { 128 | // Generate a new "a=fmtp:" line each time, using our SPS and PPS (if we have them), 129 | // otherwise parameters from our framer source (in case they've changed since the last time that 130 | // we were called): 131 | H264or5VideoStreamFramer* framerSource = NULL; 132 | u_int8_t* vpsDummy = NULL; unsigned vpsDummySize = 0; 133 | u_int8_t* sps = fSPS; unsigned spsSize = fSPSSize; 134 | u_int8_t* pps = fPPS; unsigned ppsSize = fPPSSize; 135 | 136 | //log_debug("%s", __func__); 137 | 138 | if (sps == NULL || pps == NULL) { 139 | // We need to get SPS and PPS from our framer source: 140 | //log_debug("SPS and PPS are NULL"); 141 | if (fOurFragmenter == NULL) return NULL; // we don't yet have a fragmenter (and therefore not a source) 142 | framerSource = (H264or5VideoStreamFramer*)(fOurFragmenter->inputSource()); 143 | if (framerSource == NULL) return NULL; // we don't yet have a source 144 | 145 | framerSource->getVPSandSPSandPPS(vpsDummy, vpsDummySize, sps, spsSize, pps, ppsSize); 146 | if (sps == NULL || pps == NULL) return NULL; // our source isn't ready 147 | } 148 | 149 | // Set up the "a=fmtp:" SDP line for this stream: 150 | u_int8_t* spsWEB = new u_int8_t[spsSize]; // "WEB" means "Without Emulation Bytes" 151 | unsigned spsWEBSize = removeH264or5EmulationBytes(spsWEB, spsSize, sps, spsSize); 152 | if (spsWEBSize < 4) { // Bad SPS size => assume our source isn't ready 153 | delete[] spsWEB; 154 | return NULL; 155 | } 156 | u_int32_t profileLevelId = (spsWEB[1]<<16) | (spsWEB[2]<<8) | spsWEB[3]; 157 | delete[] spsWEB; 158 | 159 | char* sps_base64 = base64Encode((char*)sps, spsSize); 160 | char* pps_base64 = base64Encode((char*)pps, ppsSize); 161 | 162 | char const* fmtpFmt = 163 | "a=fmtp:%d packetization-mode=1" 164 | ";profile-level-id=%06X" 165 | ";sprop-parameter-sets=%s,%s\r\n"; 166 | unsigned fmtpFmtSize = strlen(fmtpFmt) 167 | + 3 /* max char len */ 168 | + 6 /* 3 bytes in hex */ 169 | + strlen(sps_base64) + strlen(pps_base64); 170 | char* fmtp = new char[fmtpFmtSize]; 171 | sprintf(fmtp, fmtpFmt, 172 | rtpPayloadType(), 173 | profileLevelId, 174 | sps_base64, pps_base64); 175 | 176 | delete[] sps_base64; 177 | delete[] pps_base64; 178 | delete[] fFmtpSDPLine; 179 | fFmtpSDPLine = fmtp; 180 | 181 | //log_info("SDP = %s", fFmtpSDPLine); 182 | 183 | return fFmtpSDPLine; 184 | } 185 | 186 | 187 | Boolean MediaVideoRTPSink::continuePlaying() 188 | { 189 | // First, check whether we have a 'fragmenter' class set up yet. 190 | // If not, create it now: 191 | if (fOurFragmenter == NULL) { 192 | fOurFragmenter = new MediaVideoFragmenter(fHNumber, envir(), fSource, OutPacketBuffer::maxSize, 193 | ourMaxPacketSize() - 12/*RTP hdr size*/); 194 | } else { 195 | fOurFragmenter->reassignInputSource(fSource); 196 | } 197 | fSource = fOurFragmenter; 198 | 199 | // Then call the parent class's implementation: 200 | return MultiFramedRTPSink::continuePlaying(); 201 | } 202 | 203 | void MediaVideoRTPSink::doSpecialFrameHandling(unsigned fragmentationOffset, 204 | unsigned char* frameStart, 205 | unsigned numBytesInFrame, 206 | struct timeval framePresentationTime, 207 | unsigned numRemainingBytes) 208 | { 209 | // Set the RTP 'M' (marker) bit iff 210 | // 1/ The most recently delivered fragment was the end of (or the only fragment of) an NAL unit, and 211 | // 2/ This NAL unit was the last NAL unit of an 'access unit' (i.e. video frame). 212 | if (fOurFragmenter != NULL) { 213 | H264or5VideoStreamFramer* framerSource 214 | = (H264or5VideoStreamFramer*)(fOurFragmenter->inputSource()); 215 | // This relies on our fragmenter's source being a "H264or5VideoStreamFramer". 216 | if (((MediaVideoFragmenter*)fOurFragmenter)->lastFragmentCompletedNALUnit() 217 | && framerSource != NULL && framerSource->pictureEndMarker()) { 218 | setMarkerBit(); 219 | // framerSource->pictureEndMarker() = False; 220 | } 221 | } 222 | 223 | setTimestamp(framePresentationTime); 224 | } 225 | 226 | Boolean MediaVideoRTPSink 227 | ::frameCanAppearAfterPacketStart(unsigned char const* frameStart, 228 | unsigned numBytesInFrame) const { 229 | return False; 230 | } 231 | //end of 232 | -------------------------------------------------------------------------------- /src/MediaVideoRTPSink.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _MEDIA_VIDEO_RTPSINK_H_ 35 | #define _MEDIA_VIDEO_RTPSINK_H_ 36 | 37 | #include "liveMedia.hh" 38 | 39 | /* 40 | * 41 | */ 42 | class MediaVideoRTPSink : public H264or5VideoRTPSink { 43 | public: 44 | static MediaVideoRTPSink* 45 | createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat); 46 | static MediaVideoRTPSink* 47 | createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, 48 | u_int8_t const* sps, unsigned spsSize, u_int8_t const* pps, unsigned ppsSize); 49 | // an optional variant of "createNew()", useful if we know, in advance, 50 | // the stream's SPS and PPS NAL units. 51 | // This avoids us having to 'pre-read' from the input source in order to get these values. 52 | static MediaVideoRTPSink* 53 | createNew(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, 54 | char const* sPropParameterSetsStr); 55 | // an optional variant of "createNew()", useful if we know, in advance, 56 | // the stream's SPS and PPS NAL units. 57 | // This avoids us having to 'pre-read' from the input source in order to get these values. 58 | 59 | protected: 60 | MediaVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, unsigned char rtpPayloadFormat, 61 | u_int8_t const* sps = NULL, unsigned spsSize = 0, 62 | u_int8_t const* pps = NULL, unsigned ppsSize = 0); 63 | // called only by createNew() 64 | virtual ~MediaVideoRTPSink(); 65 | 66 | protected: // redefined virtual functions: 67 | virtual char const* auxSDPLine(); 68 | 69 | private: // redefined virtual functions: 70 | virtual Boolean sourceIsCompatibleWithUs(MediaSource& source); 71 | virtual Boolean continuePlaying(); 72 | virtual void doSpecialFrameHandling(unsigned fragmentationOffset, 73 | unsigned char* frameStart, 74 | unsigned numBytesInFrame, 75 | struct timeval framePresentationTime, 76 | unsigned numRemainingBytes); 77 | virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart, 78 | unsigned numBytesInFrame) const; 79 | }; 80 | 81 | //end of 82 | 83 | #endif /* _MEDIA_VIDEO_RTPSINK_H_ */ 84 | -------------------------------------------------------------------------------- /src/MediaVideoServerMediaSubsession.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "MediaVideoServerMediaSubsession.h" 35 | #include "MediaVideoStreamSource.h" 36 | #include "MediaVideoRTPSink.h" 37 | 38 | #include "H264ReadCameraEncoder.h" 39 | #include "H264ReadScreenEncoder.h" 40 | 41 | MediaVideoServerMediaSubsession * MediaVideoServerMediaSubsession::createNew(UsageEnvironment& env, VIDEO_SOURCE_TYPE type) 42 | { 43 | return new MediaVideoServerMediaSubsession(env, type); 44 | } 45 | 46 | void MediaVideoServerMediaSubsession::rtcpRRQos(void* clientData) 47 | { 48 | // TODO: add process for rtcp RR QOS 49 | } 50 | 51 | RTCPInstance* MediaVideoServerMediaSubsession 52 | ::createRTCP(Groupsock* RTCPgs, unsigned totSessionBW, /* in kbps */ 53 | unsigned char const* cname, RTPSink* sink) 54 | { 55 | // Default implementation: 56 | RTCPInstance *pRTCPInstance = NULL; 57 | 58 | pRTCPInstance = RTCPInstance::createNew(envir(), RTCPgs, totSessionBW, 59 | cname, sink, NULL/*we're a server*/); 60 | 61 | if (pRTCPInstance) 62 | pRTCPInstance->setRRHandler(rtcpRRQos, this); 63 | 64 | return pRTCPInstance; 65 | } 66 | 67 | 68 | FramedSource* MediaVideoServerMediaSubsession 69 | ::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate) 70 | { 71 | FFMpegEncoder* encoder = NULL; 72 | 73 | // create new encoder with video source type 74 | switch(video_source_type) { 75 | case VIDEO_SOURCE_TYPE_CAMERA: 76 | break; 77 | case VIDEO_SOURCE_TYPE_SCREEN: 78 | break; 79 | default: 80 | break; 81 | 82 | } 83 | // generate video stream source 84 | MediaVideoStreamSource *pVideoSource = MediaVideoStreamSource::createNew(envir(), encoder); 85 | 86 | return H264VideoStreamDiscreteFramer::createNew(envir(), pVideoSource); 87 | } 88 | 89 | void MediaVideoServerMediaSubsession::afterPlaying(void* clientData) 90 | { 91 | // TODO: add process after playing 92 | } 93 | 94 | RTPSink* MediaVideoServerMediaSubsession 95 | ::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource) 96 | { 97 | fVideoRTPSink = MediaVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); 98 | MediaVideoStreamSource* pVideoSource = (MediaVideoStreamSource*)inputSource; 99 | //log_rtsp("SDP = %s", getAuxSDPLine(fVidRTPSink, pAmbaSource)); 100 | envir() << "Amba Video SDP: " << getAuxSDPLine(fVideoRTPSink, pVideoSource); 101 | // Finally, start playing: 102 | //envir() << "Beginning to read from UPP...\n"; 103 | fVideoRTPSink->startPlaying(*pVideoSource, MediaVideoServerMediaSubsession::afterPlaying, fVideoRTPSink); 104 | 105 | return fVideoRTPSink; 106 | } 107 | 108 | void MediaVideoServerMediaSubsession 109 | ::seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, 110 | u_int64_t& /*numBytes*/) 111 | { 112 | MediaVideoStreamSource *pVideoSource = (MediaVideoStreamSource*)inputSource; 113 | 114 | // if ((float)seekNPT < fDuration) 115 | // pVideoSource->seekStream((int32_t)seekNPT); 116 | } -------------------------------------------------------------------------------- /src/MediaVideoServerMediaSubsession.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _MEDIA_VIDEO_SERVER_MEDIA_SUBSESSION_H 35 | #define _MEDIA_VIDEO_SERVER_MEDIA_SUBSESSION_H 36 | 37 | #include "liveMedia.hh" 38 | 39 | typedef enum source_yype { 40 | VIDEO_SOURCE_TYPE_CAMERA, 41 | VIDEO_SOURCE_TYPE_SCREEN, 42 | } VIDEO_SOURCE_TYPE; 43 | 44 | class MediaVideoServerMediaSubsession: public OnDemandServerMediaSubsession 45 | { 46 | public: 47 | static MediaVideoServerMediaSubsession* createNew(UsageEnvironment& env, VIDEO_SOURCE_TYPE type); 48 | 49 | protected: 50 | MediaVideoServerMediaSubsession(UsageEnvironment& env, VIDEO_SOURCE_TYPE type) 51 | : OnDemandServerMediaSubsession(env, False), video_source_type(type){}; 52 | 53 | // redefined virtual functions 54 | virtual RTCPInstance* createRTCP(Groupsock* RTCPgs, unsigned totSessionBW, /* in kbps */ 55 | unsigned char const* cname, RTPSink* sink); 56 | 57 | virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate); 58 | virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource); 59 | 60 | virtual void seekStreamSource(FramedSource* inputSource, double& seekNPT, double streamDuration, u_int64_t& numBytes); 61 | 62 | static void afterPlaying(void* clientData); 63 | static void rtcpRRQos(void* clientData); 64 | 65 | private: 66 | VIDEO_SOURCE_TYPE video_source_type; 67 | RTPSink *fVideoRTPSink; // ditto 68 | }; 69 | 70 | #endif /* _MEDIA_VIDEO_SERVER_MEDIA_SUBSESSION_H */ 71 | -------------------------------------------------------------------------------- /src/MediaVideoStreamSource.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "MediaVideoStreamSource.h" 35 | #include "GroupsockHelper.hh" 36 | #include "FFMpegEncoder.h" 37 | 38 | MediaVideoStreamSource * MediaVideoStreamSource::createNew(UsageEnvironment& env, FFMpegEncoder * enc) { 39 | return new MediaVideoStreamSource(env, enc); 40 | } 41 | 42 | MediaVideoStreamSource::MediaVideoStreamSource(UsageEnvironment & env, FFMpegEncoder * enc) 43 | : FramedSource(env) 44 | { 45 | m_eventTriggerId = envir().taskScheduler().createEventTrigger(MediaVideoStreamSource::deliverFrameStub); 46 | // std::function callback1 = std::bind(&MediaH264VideoSource::onEncoded, this); 47 | // encoder->setCallbackFunctionFrameIsReady(callback1); 48 | } 49 | 50 | MediaVideoStreamSource::~MediaVideoStreamSource(void) 51 | { 52 | } 53 | 54 | void MediaVideoStreamSource::doStopGettingFrames() 55 | { 56 | FramedSource::doStopGettingFrames(); 57 | } 58 | 59 | void MediaVideoStreamSource::onEncoded() 60 | { 61 | envir().taskScheduler().triggerEvent(m_eventTriggerId, this); 62 | } 63 | 64 | void MediaVideoStreamSource::doGetNextFrame() 65 | { 66 | deliverFrame(); 67 | } 68 | 69 | void MediaVideoStreamSource::deliverFrame() 70 | { 71 | if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet 72 | 73 | static uint8_t* newFrameDataStart; 74 | static unsigned newFrameSize = 0; 75 | 76 | /* get the data frame from the Encoding thread.. */ 77 | if (encoder->GetFrame(&newFrameDataStart, &newFrameSize)) { 78 | if (newFrameDataStart != NULL) { 79 | /* This should never happen, but check anyway.. */ 80 | if (newFrameSize > fMaxSize) { 81 | fFrameSize = fMaxSize; 82 | fNumTruncatedBytes = newFrameSize - fMaxSize; 83 | } 84 | else { 85 | fFrameSize = newFrameSize; 86 | } 87 | 88 | gettimeofday(&fPresentationTime, NULL); 89 | memcpy(fTo, newFrameDataStart, fFrameSize); 90 | 91 | //delete newFrameDataStart; 92 | //newFrameSize = 0; 93 | 94 | encoder->ReleaseFrame(); 95 | } 96 | else { 97 | fFrameSize = 0; 98 | fTo = NULL; 99 | handleClosure(this); 100 | } 101 | } 102 | else 103 | { 104 | fFrameSize = 0; 105 | } 106 | 107 | if (fFrameSize>0) 108 | FramedSource::afterGetting(this); 109 | 110 | } -------------------------------------------------------------------------------- /src/MediaVideoStreamSource.h: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melchi45/rtspclient_with_opengl/593eb17f7549279995a870d740ee2daed23c94a4/src/MediaVideoStreamSource.h -------------------------------------------------------------------------------- /src/StreamClientState.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | // Implementation of "StreamClientState": 35 | #include "StreamClientState.h" 36 | 37 | StreamClientState::StreamClientState() 38 | : iter(NULL), session(NULL), subsession(NULL), streamTimerTask(NULL), duration(0.0) { 39 | } 40 | 41 | StreamClientState::~StreamClientState() { 42 | delete iter; 43 | if (session != NULL) { 44 | // We also need to delete "session", and unschedule "streamTimerTask" (if set) 45 | UsageEnvironment& env = session->envir(); // alias 46 | 47 | env.taskScheduler().unscheduleDelayedTask(streamTimerTask); 48 | Medium::close(session); 49 | } 50 | } -------------------------------------------------------------------------------- /src/StreamClientState.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _STREAM_CLIENT_STATE_HH 35 | #define _STREAM_CLIENT_STATE_HH 36 | 37 | #include "liveMedia.hh" 38 | 39 | class StreamClientState { 40 | public: 41 | StreamClientState(); 42 | virtual ~StreamClientState(); 43 | 44 | public: 45 | MediaSubsessionIterator * iter; 46 | MediaSession* session; 47 | MediaSubsession* subsession; 48 | TaskToken streamTimerTask; 49 | double duration; 50 | }; 51 | 52 | #endif // _STREAM_CLIENT_STATE_HH -------------------------------------------------------------------------------- /src/YUV420P_Player.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "YUV420P_Player.h" 35 | 36 | #if defined(USE_GLFW_LIB) 37 | 38 | YUV420P_Player::YUV420P_Player() 39 | :vid_w(0) 40 | ,vid_h(0) 41 | ,win_w(0) 42 | ,win_h(0) 43 | ,vao(0) 44 | ,y_tex(0) 45 | ,u_tex(0) 46 | ,v_tex(0) 47 | ,vert(0) 48 | ,frag(0) 49 | ,prog(0) 50 | ,u_pos(-1) 51 | ,textures_created(false) 52 | ,shader_created(false) 53 | ,y_pixels(NULL) 54 | ,u_pixels(NULL) 55 | ,v_pixels(NULL) 56 | { 57 | } 58 | 59 | bool YUV420P_Player::setup(int vidW, int vidH) { 60 | 61 | vid_w = vidW; 62 | vid_h = vidH; 63 | 64 | if(!vid_w || !vid_h) { 65 | printf("Invalid texture size.\n"); 66 | return false; 67 | } 68 | 69 | if(y_pixels || u_pixels || v_pixels) { 70 | printf("Already setup the YUV420P_Player.\n"); 71 | return false; 72 | } 73 | 74 | y_pixels = new uint8_t[vid_w * vid_h]; 75 | u_pixels = new uint8_t[int((vid_w * 0.5) * (vid_h * 0.5))]; 76 | v_pixels = new uint8_t[int((vid_w * 0.5) * (vid_h * 0.5))]; 77 | 78 | if(!setupTextures()) { 79 | return false; 80 | } 81 | 82 | if(!setupShader()) { 83 | return false; 84 | } 85 | 86 | glGenVertexArrays(1, &vao); 87 | 88 | return true; 89 | } 90 | 91 | bool YUV420P_Player::setupShader() { 92 | 93 | if(shader_created) { 94 | printf("Already creatd the shader.\n"); 95 | return false; 96 | } 97 | 98 | vert = rx_create_shader(GL_VERTEX_SHADER, YUV420P_VS); 99 | frag = rx_create_shader(GL_FRAGMENT_SHADER, YUV420P_FS); 100 | prog = rx_create_program(vert, frag); 101 | 102 | glLinkProgram(prog); 103 | rx_print_shader_link_info(prog); 104 | 105 | glUseProgram(prog); 106 | glUniform1i(glGetUniformLocation(prog, "y_tex"), 0); 107 | glUniform1i(glGetUniformLocation(prog, "u_tex"), 1); 108 | glUniform1i(glGetUniformLocation(prog, "v_tex"), 2); 109 | 110 | u_pos = glGetUniformLocation(prog, "draw_pos"); 111 | 112 | GLint viewport[4]; 113 | glGetIntegerv(GL_VIEWPORT, viewport); 114 | resize(viewport[2], viewport[3]); 115 | 116 | return true; 117 | } 118 | 119 | bool YUV420P_Player::setupTextures() { 120 | 121 | if(textures_created) { 122 | printf("Textures already created.\n"); 123 | return false; 124 | } 125 | 126 | glGenTextures(1, &y_tex); 127 | glBindTexture(GL_TEXTURE_2D, y_tex); 128 | glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w, vid_h, 0, GL_RED, GL_UNSIGNED_BYTE, NULL); // y_pixels); 129 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 130 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 131 | 132 | glGenTextures(1, &u_tex); 133 | glBindTexture(GL_TEXTURE_2D, u_tex); 134 | glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w/2, vid_h/2, 0, GL_RED, GL_UNSIGNED_BYTE, NULL); 135 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 136 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 137 | 138 | glGenTextures(1, &v_tex); 139 | glBindTexture(GL_TEXTURE_2D, v_tex); 140 | glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w/2, vid_h/2, 0, GL_RED, GL_UNSIGNED_BYTE, NULL); 141 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 142 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 143 | 144 | textures_created = true; 145 | return true; 146 | } 147 | 148 | void YUV420P_Player::draw(int x, int y, int w, int h) { 149 | // assert(textures_created == true); 150 | 151 | if(w == 0) { 152 | w = vid_w; 153 | } 154 | 155 | if(h == 0) { 156 | h = vid_h; 157 | } 158 | 159 | glBindVertexArray(vao); 160 | glUseProgram(prog); 161 | 162 | glUniform4f(u_pos, x, y, w, h); 163 | 164 | glActiveTexture(GL_TEXTURE0); 165 | glBindTexture(GL_TEXTURE_2D, y_tex); 166 | 167 | glActiveTexture(GL_TEXTURE1); 168 | glBindTexture(GL_TEXTURE_2D, u_tex); 169 | 170 | glActiveTexture(GL_TEXTURE2); 171 | glBindTexture(GL_TEXTURE_2D, v_tex); 172 | 173 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); 174 | } 175 | 176 | void YUV420P_Player::resize(int winW, int winH) { 177 | assert(winW > 0 && winH > 0); 178 | 179 | win_w = winW; 180 | win_h = winH; 181 | 182 | pm.identity(); 183 | pm.ortho(0, win_w, win_h, 0, 0.0, 100.0f); 184 | 185 | glUseProgram(prog); 186 | glUniformMatrix4fv(glGetUniformLocation(prog, "u_pm"), 1, GL_FALSE, pm.ptr()); 187 | } 188 | 189 | void YUV420P_Player::setYPixels(uint8_t* pixels, int stride) { 190 | // assert(textures_created == true); 191 | 192 | glBindTexture(GL_TEXTURE_2D, y_tex); 193 | glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); 194 | glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w, vid_h, GL_RED, GL_UNSIGNED_BYTE, pixels); 195 | } 196 | 197 | void YUV420P_Player::setUPixels(uint8_t* pixels, int stride) { 198 | // assert(textures_created == true); 199 | 200 | glBindTexture(GL_TEXTURE_2D, u_tex); 201 | glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); 202 | glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w/2, vid_h/2, GL_RED, GL_UNSIGNED_BYTE, pixels); 203 | } 204 | 205 | void YUV420P_Player::setVPixels(uint8_t* pixels, int stride) { 206 | // assert(textures_created == true); 207 | 208 | glBindTexture(GL_TEXTURE_2D, v_tex); 209 | glPixelStorei(GL_UNPACK_ROW_LENGTH, stride); 210 | glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w/2, vid_h/2, GL_RED, GL_UNSIGNED_BYTE, pixels); 211 | } 212 | 213 | #endif // USE_GLFW_LIB -------------------------------------------------------------------------------- /src/YUV420P_Player.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef ROXLU_YUV420P_PLAYER_H 35 | #define ROXLU_YUV420P_PLAYER_H 36 | 37 | #if defined(USE_GLFW_LIB) 38 | 39 | // reference 40 | // https://medium.com/@Plimsky/how-to-install-a-opengl-environment-on-ubuntu-e3918cf5ab6c 41 | // https://nervous.io/ffmpeg/opengl/2017/01/31/ffmpeg-opengl/ 42 | #ifdef __APPLE__ 43 | #include 44 | #else 45 | #include 46 | #endif 47 | 48 | #include 49 | 50 | static const char* YUV420P_VS = "" 51 | "#version 330\n" 52 | "" 53 | "uniform mat4 u_pm;" 54 | "uniform vec4 draw_pos;" 55 | "" 56 | "const vec2 verts[4] = vec2[] (" 57 | " vec2(-0.5, 0.5), " 58 | " vec2(-0.5, -0.5), " 59 | " vec2( 0.5, 0.5), " 60 | " vec2( 0.5, -0.5) " 61 | ");" 62 | "" 63 | "const vec2 texcoords[4] = vec2[] (" 64 | " vec2(0.0, 1.0), " 65 | " vec2(0.0, 0.0), " 66 | " vec2(1.0, 1.0), " 67 | " vec2(1.0, 0.0) " 68 | "); " 69 | "" 70 | "out vec2 v_coord; " 71 | "" 72 | "void main() {" 73 | " vec2 vert = verts[gl_VertexID];" 74 | " vec4 p = vec4((0.5 * draw_pos.z) + draw_pos.x + (vert.x * draw_pos.z), " 75 | " (0.5 * draw_pos.w) + draw_pos.y + (vert.y * draw_pos.w), " 76 | " 0, 1);" 77 | " gl_Position = u_pm * p;" 78 | " v_coord = texcoords[gl_VertexID];" 79 | "}" 80 | ""; 81 | 82 | static const char* YUV420P_FS = "" 83 | "#version 330\n" 84 | "uniform sampler2D y_tex;" 85 | "uniform sampler2D u_tex;" 86 | "uniform sampler2D v_tex;" 87 | "in vec2 v_coord;" 88 | "layout( location = 0 ) out vec4 fragcolor;" 89 | "" 90 | "const vec3 R_cf = vec3(1.164383, 0.000000, 1.596027);" 91 | "const vec3 G_cf = vec3(1.164383, -0.391762, -0.812968);" 92 | "const vec3 B_cf = vec3(1.164383, 2.017232, 0.000000);" 93 | "const vec3 offset = vec3(-0.0625, -0.5, -0.5);" 94 | "" 95 | "void main() {" 96 | " float y = texture(y_tex, v_coord).r;" 97 | " float u = texture(u_tex, v_coord).r;" 98 | " float v = texture(v_tex, v_coord).r;" 99 | " vec3 yuv = vec3(y,u,v);" 100 | " yuv += offset;" 101 | " fragcolor = vec4(0.0, 0.0, 0.0, 1.0);" 102 | " fragcolor.r = dot(yuv, R_cf);" 103 | " fragcolor.g = dot(yuv, G_cf);" 104 | " fragcolor.b = dot(yuv, B_cf);" 105 | "}" 106 | ""; 107 | 108 | class YUV420P_Player { 109 | 110 | public: 111 | YUV420P_Player(); 112 | bool setup(int w, int h); 113 | void setYPixels(uint8_t* pixels, int stride); 114 | void setUPixels(uint8_t* pixels, int stride); 115 | void setVPixels(uint8_t* pixels, int stride); 116 | void draw(int x, int y, int w = 0, int h = 0); 117 | void resize(int winW, int winH); 118 | 119 | private: 120 | bool setupTextures(); 121 | bool setupShader(); 122 | 123 | public: 124 | int vid_w; 125 | int vid_h; 126 | int win_w; 127 | int win_h; 128 | GLuint vao; 129 | GLuint y_tex; 130 | GLuint u_tex; 131 | GLuint v_tex; 132 | GLuint vert; 133 | GLuint frag; 134 | GLuint prog; 135 | GLint u_pos; 136 | bool textures_created; 137 | bool shader_created; 138 | uint8_t* y_pixels; 139 | uint8_t* u_pixels; 140 | uint8_t* v_pixels; 141 | mat4 pm; 142 | }; 143 | #endif // USE_GLFW_LIB 144 | #endif -------------------------------------------------------------------------------- /src/bs.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _H264_BS_H 35 | #define _H264_BS_H 1 36 | 37 | #include 38 | #include 39 | #include 40 | 41 | #ifdef __cplusplus 42 | extern "C" { 43 | #endif 44 | 45 | typedef struct 46 | { 47 | uint8_t* start; 48 | uint8_t* p; 49 | uint8_t* end; 50 | int bits_left; 51 | } bs_t; 52 | 53 | #define _OPTIMIZE_BS_ 1 54 | 55 | #if ( _OPTIMIZE_BS_ > 0 ) 56 | #ifndef FAST_U8 57 | #define FAST_U8 58 | #endif 59 | #endif 60 | 61 | 62 | static bs_t* bs_new(uint8_t* buf, size_t size); 63 | static void bs_free(bs_t* b); 64 | static bs_t* bs_clone( bs_t* dest, const bs_t* src ); 65 | static bs_t* bs_init(bs_t* b, uint8_t* buf, size_t size); 66 | static uint32_t bs_byte_aligned(bs_t* b); 67 | static int bs_eof(bs_t* b); 68 | static int bs_overrun(bs_t* b); 69 | static int bs_pos(bs_t* b); 70 | 71 | static uint32_t bs_peek_u1(bs_t* b); 72 | static uint32_t bs_read_u1(bs_t* b); 73 | static uint32_t bs_read_u(bs_t* b, int n); 74 | static uint32_t bs_read_f(bs_t* b, int n); 75 | static uint32_t bs_read_u8(bs_t* b); 76 | static uint32_t bs_read_ue(bs_t* b); 77 | static int32_t bs_read_se(bs_t* b); 78 | 79 | static void bs_write_u1(bs_t* b, uint32_t v); 80 | static void bs_write_u(bs_t* b, int n, uint32_t v); 81 | static void bs_write_f(bs_t* b, int n, uint32_t v); 82 | static void bs_write_u8(bs_t* b, uint32_t v); 83 | static void bs_write_ue(bs_t* b, uint32_t v); 84 | static void bs_write_se(bs_t* b, int32_t v); 85 | 86 | static int bs_read_bytes(bs_t* b, uint8_t* buf, int len); 87 | static int bs_write_bytes(bs_t* b, uint8_t* buf, int len); 88 | static int bs_skip_bytes(bs_t* b, int len); 89 | static uint32_t bs_next_bits(bs_t* b, int nbits); 90 | // IMPLEMENTATION 91 | 92 | static inline bs_t* bs_init(bs_t* b, uint8_t* buf, size_t size) 93 | { 94 | b->start = buf; 95 | b->p = buf; 96 | b->end = buf + size; 97 | b->bits_left = 8; 98 | return b; 99 | } 100 | 101 | static inline bs_t* bs_new(uint8_t* buf, size_t size) 102 | { 103 | bs_t* b = (bs_t*)malloc(sizeof(bs_t)); 104 | bs_init(b, buf, size); 105 | return b; 106 | } 107 | 108 | static inline void bs_free(bs_t* b) 109 | { 110 | free(b); 111 | } 112 | 113 | static inline bs_t* bs_clone(bs_t* dest, const bs_t* src) 114 | { 115 | dest->start = src->p; 116 | dest->p = src->p; 117 | dest->end = src->end; 118 | dest->bits_left = src->bits_left; 119 | return dest; 120 | } 121 | 122 | static inline uint32_t bs_byte_aligned(bs_t* b) 123 | { 124 | return (b->bits_left == 8); 125 | } 126 | 127 | static inline int bs_eof(bs_t* b) { if (b->p >= b->end) { return 1; } else { return 0; } } 128 | 129 | static inline int bs_overrun(bs_t* b) { if (b->p > b->end) { return 1; } else { return 0; } } 130 | 131 | static inline int bs_pos(bs_t* b) { if (b->p > b->end) { return (b->end - b->start); } else { return (b->p - b->start); } } 132 | 133 | static inline int bs_bytes_left(bs_t* b) { return (b->end - b->p); } 134 | 135 | static inline uint32_t bs_read_u1(bs_t* b) 136 | { 137 | uint32_t r = 0; 138 | 139 | b->bits_left--; 140 | 141 | if (! bs_eof(b)) 142 | { 143 | r = ((*(b->p)) >> b->bits_left) & 0x01; 144 | } 145 | 146 | if (b->bits_left == 0) { b->p ++; b->bits_left = 8; } 147 | 148 | return r; 149 | } 150 | 151 | static inline void bs_skip_u1(bs_t* b) 152 | { 153 | b->bits_left--; 154 | if (b->bits_left == 0) { b->p ++; b->bits_left = 8; } 155 | } 156 | 157 | static inline uint32_t bs_peek_u1(bs_t* b) 158 | { 159 | uint32_t r = 0; 160 | 161 | if (! bs_eof(b)) 162 | { 163 | r = ((*(b->p)) >> ( b->bits_left - 1 )) & 0x01; 164 | } 165 | return r; 166 | } 167 | 168 | 169 | static inline uint32_t bs_read_u(bs_t* b, int n) 170 | { 171 | uint32_t r = 0; 172 | int i; 173 | for (i = 0; i < n; i++) 174 | { 175 | r |= ( bs_read_u1(b) << ( n - i - 1 ) ); 176 | } 177 | return r; 178 | } 179 | 180 | static inline void bs_skip_u(bs_t* b, int n) 181 | { 182 | int i; 183 | for ( i = 0; i < n; i++ ) 184 | { 185 | bs_skip_u1( b ); 186 | } 187 | } 188 | 189 | static inline uint32_t bs_read_f(bs_t* b, int n) { return bs_read_u(b, n); } 190 | 191 | static inline uint32_t bs_read_u8(bs_t* b) 192 | { 193 | #ifdef FAST_U8 194 | if (b->bits_left == 8 && ! bs_eof(b)) // can do fast read 195 | { 196 | uint32_t r = b->p[0]; 197 | b->p++; 198 | return r; 199 | } 200 | #endif 201 | return bs_read_u(b, 8); 202 | } 203 | 204 | static inline uint32_t bs_read_ue(bs_t* b) 205 | { 206 | int32_t r = 0; 207 | int i = 0; 208 | 209 | while( (bs_read_u1(b) == 0) && (i < 32) && (!bs_eof(b)) ) 210 | { 211 | i++; 212 | } 213 | r = bs_read_u(b, i); 214 | r += (1 << i) - 1; 215 | return r; 216 | } 217 | 218 | static inline int32_t bs_read_se(bs_t* b) 219 | { 220 | int32_t r = bs_read_ue(b); 221 | if (r & 0x01) 222 | { 223 | r = (r+1)/2; 224 | } 225 | else 226 | { 227 | r = -(r/2); 228 | } 229 | return r; 230 | } 231 | 232 | 233 | static inline void bs_write_u1(bs_t* b, uint32_t v) 234 | { 235 | b->bits_left--; 236 | 237 | if (! bs_eof(b)) 238 | { 239 | // FIXME this is slow, but we must clear bit first 240 | // is it better to memset(0) the whole buffer during bs_init() instead? 241 | // if we don't do either, we introduce pretty nasty bugs 242 | (*(b->p)) &= ~(0x01 << b->bits_left); 243 | (*(b->p)) |= ((v & 0x01) << b->bits_left); 244 | } 245 | 246 | if (b->bits_left == 0) { b->p ++; b->bits_left = 8; } 247 | } 248 | 249 | static inline void bs_write_u(bs_t* b, int n, uint32_t v) 250 | { 251 | int i; 252 | for (i = 0; i < n; i++) 253 | { 254 | bs_write_u1(b, (v >> ( n - i - 1 ))&0x01 ); 255 | } 256 | } 257 | 258 | static inline void bs_write_f(bs_t* b, int n, uint32_t v) { bs_write_u(b, n, v); } 259 | 260 | static inline void bs_write_u8(bs_t* b, uint32_t v) 261 | { 262 | #ifdef FAST_U8 263 | if (b->bits_left == 8 && ! bs_eof(b)) // can do fast write 264 | { 265 | b->p[0] = v; 266 | b->p++; 267 | return; 268 | } 269 | #endif 270 | bs_write_u(b, 8, v); 271 | } 272 | 273 | static inline void bs_write_ue(bs_t* b, uint32_t v) 274 | { 275 | static const int len_table[256] = 276 | { 277 | 1, 278 | 1, 279 | 2,2, 280 | 3,3,3,3, 281 | 4,4,4,4,4,4,4,4, 282 | 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5, 283 | 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, 284 | 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, 285 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 286 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 287 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 288 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 289 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 290 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 291 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 292 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 293 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 294 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 295 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 296 | 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 297 | }; 298 | 299 | int len; 300 | 301 | if (v == 0) 302 | { 303 | bs_write_u1(b, 1); 304 | } 305 | else 306 | { 307 | v++; 308 | 309 | if (v >= 0x01000000) 310 | { 311 | len = 24 + len_table[ v >> 24 ]; 312 | } 313 | else if(v >= 0x00010000) 314 | { 315 | len = 16 + len_table[ v >> 16 ]; 316 | } 317 | else if(v >= 0x00000100) 318 | { 319 | len = 8 + len_table[ v >> 8 ]; 320 | } 321 | else 322 | { 323 | len = len_table[ v ]; 324 | } 325 | 326 | bs_write_u(b, 2*len-1, v); 327 | } 328 | } 329 | 330 | static inline void bs_write_se(bs_t* b, int32_t v) 331 | { 332 | if (v <= 0) 333 | { 334 | bs_write_ue(b, -v*2); 335 | } 336 | else 337 | { 338 | bs_write_ue(b, v*2 - 1); 339 | } 340 | } 341 | 342 | static inline int bs_read_bytes(bs_t* b, uint8_t* buf, int len) 343 | { 344 | int actual_len = len; 345 | if (b->end - b->p < actual_len) { actual_len = b->end - b->p; } 346 | if (actual_len < 0) { actual_len = 0; } 347 | memcpy(buf, b->p, actual_len); 348 | if (len < 0) { len = 0; } 349 | b->p += len; 350 | return actual_len; 351 | } 352 | 353 | static inline int bs_write_bytes(bs_t* b, uint8_t* buf, int len) 354 | { 355 | int actual_len = len; 356 | if (b->end - b->p < actual_len) { actual_len = b->end - b->p; } 357 | if (actual_len < 0) { actual_len = 0; } 358 | memcpy(b->p, buf, actual_len); 359 | if (len < 0) { len = 0; } 360 | b->p += len; 361 | return actual_len; 362 | } 363 | 364 | static inline int bs_skip_bytes(bs_t* b, int len) 365 | { 366 | int actual_len = len; 367 | if (b->end - b->p < actual_len) { actual_len = b->end - b->p; } 368 | if (actual_len < 0) { actual_len = 0; } 369 | if (len < 0) { len = 0; } 370 | b->p += len; 371 | return actual_len; 372 | } 373 | 374 | static inline uint32_t bs_next_bits(bs_t* bs, int nbits) 375 | { 376 | bs_t b; 377 | bs_clone(&b,bs); 378 | return bs_read_u(&b, nbits); 379 | } 380 | 381 | static inline uint64_t bs_next_bytes(bs_t* bs, int nbytes) 382 | { 383 | int i = 0; 384 | uint64_t val = 0; 385 | 386 | if ( (nbytes > 8) || (nbytes < 1) ) { return 0; } 387 | if (bs->p + nbytes > bs->end) { return 0; } 388 | 389 | for ( i = 0; i < nbytes; i++ ) { val = ( val << 8 ) | bs->p[i]; } 390 | return val; 391 | } 392 | 393 | #define bs_print_state(b) fprintf( stderr, "%s:%d@%s: b->p=0x%02hhX, b->left = %d\n", __FILE__, __LINE__, __FUNCTION__, *b->p, b->bits_left ) 394 | 395 | #ifdef __cplusplus 396 | } 397 | #endif 398 | 399 | #endif 400 | -------------------------------------------------------------------------------- /src/h264_avcc.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include 35 | #include 36 | #include 37 | #include 38 | 39 | #include "h264_avcc.h" 40 | #include "bs.h" 41 | #include "h264_stream.h" 42 | 43 | avcc_t* avcc_new() 44 | { 45 | avcc_t* avcc = (avcc_t*)calloc(1, sizeof(avcc_t)); 46 | avcc->sps_table = NULL; 47 | avcc->pps_table = NULL; 48 | return avcc; 49 | } 50 | 51 | void avcc_free(avcc_t* avcc) 52 | { 53 | if (avcc->sps_table != NULL) { free(avcc->sps_table); } 54 | if (avcc->pps_table != NULL) { free(avcc->pps_table); } 55 | free(avcc); 56 | } 57 | 58 | int read_avcc(avcc_t* avcc, h264_stream_t* h, bs_t* b) 59 | { 60 | avcc->configurationVersion = bs_read_u8(b); 61 | avcc->AVCProfileIndication = bs_read_u8(b); 62 | avcc->profile_compatibility = bs_read_u8(b); 63 | avcc->AVCLevelIndication = bs_read_u8(b); 64 | /* int reserved = */ bs_read_u(b, 6); // '111111'b; 65 | avcc->lengthSizeMinusOne = bs_read_u(b, 2); 66 | /* int reserved = */ bs_read_u(b, 3); // '111'b; 67 | 68 | avcc->numOfSequenceParameterSets = bs_read_u(b, 5); 69 | avcc->sps_table = (sps_t**)calloc(avcc->numOfSequenceParameterSets, sizeof(sps_t*)); 70 | for (int i = 0; i < avcc->numOfSequenceParameterSets; i++) 71 | { 72 | int sequenceParameterSetLength = bs_read_u(b, 16); 73 | int len = sequenceParameterSetLength; 74 | uint8_t* buf = (uint8_t*)malloc(len); 75 | len = bs_read_bytes(b, buf, len); 76 | int rc = read_nal_unit(h, buf, len); 77 | free(buf); 78 | if (h->nal->nal_unit_type != NAL_UNIT_TYPE_SPS) { continue; } // TODO report errors 79 | if (rc < 0) { continue; } 80 | avcc->sps_table[i] = h->sps; // TODO copy data? 81 | } 82 | 83 | avcc->numOfPictureParameterSets = bs_read_u(b, 8); 84 | avcc->pps_table = (pps_t**)calloc(avcc->numOfSequenceParameterSets, sizeof(pps_t*)); 85 | for (int i = 0; i < avcc->numOfPictureParameterSets; i++) 86 | { 87 | int pictureParameterSetLength = bs_read_u(b, 16); 88 | int len = pictureParameterSetLength; 89 | uint8_t* buf = (uint8_t*)malloc(len); 90 | len = bs_read_bytes(b, buf, len); 91 | int rc = read_nal_unit(h, buf, len); 92 | free(buf); 93 | if (h->nal->nal_unit_type != NAL_UNIT_TYPE_PPS) { continue; } // TODO report errors 94 | if (rc < 0) { continue; } 95 | avcc->pps_table[i] = h->pps; // TODO copy data? 96 | } 97 | 98 | if (bs_overrun(b)) { return -1; } 99 | return bs_pos(b); 100 | } 101 | 102 | 103 | int write_avcc(avcc_t* avcc, h264_stream_t* h, bs_t* b) 104 | { 105 | bs_write_u8(b, 1); // configurationVersion = 1; 106 | bs_write_u8(b, avcc->AVCProfileIndication); 107 | bs_write_u8(b, avcc->profile_compatibility); 108 | bs_write_u8(b, avcc->AVCLevelIndication); 109 | bs_write_u(b, 6, 0x3F); // reserved = '111111'b; 110 | bs_write_u(b, 2, avcc->lengthSizeMinusOne); 111 | bs_write_u(b, 3, 0x07); // reserved = '111'b; 112 | 113 | bs_write_u(b, 5, avcc->numOfSequenceParameterSets); 114 | for (int i = 0; i < avcc->numOfSequenceParameterSets; i++) 115 | { 116 | int max_len = 1024; // FIXME 117 | uint8_t* buf = (uint8_t*)malloc(max_len); 118 | h->nal->nal_ref_idc = 3; // NAL_REF_IDC_PRIORITY_HIGHEST; 119 | h->nal->nal_unit_type = NAL_UNIT_TYPE_SPS; 120 | h->sps = avcc->sps_table[i]; 121 | int len = write_nal_unit(h, buf, max_len); 122 | if (len < 0) { free(buf); continue; } // TODO report errors 123 | int sequenceParameterSetLength = len; 124 | bs_write_u(b, 16, sequenceParameterSetLength); 125 | bs_write_bytes(b, buf, len); 126 | free(buf); 127 | } 128 | 129 | bs_write_u(b, 8, avcc->numOfPictureParameterSets); 130 | for (int i = 0; i < avcc->numOfPictureParameterSets; i++) 131 | { 132 | int max_len = 1024; // FIXME 133 | uint8_t* buf = (uint8_t*)malloc(max_len); 134 | h->nal->nal_ref_idc = 3; // NAL_REF_IDC_PRIORITY_HIGHEST; 135 | h->nal->nal_unit_type = NAL_UNIT_TYPE_PPS; 136 | h->pps = avcc->pps_table[i]; 137 | int len = write_nal_unit(h, buf, max_len); 138 | if (len < 0) { free(buf); continue; } // TODO report errors 139 | int pictureParameterSetLength = len; 140 | bs_write_u(b, 16, pictureParameterSetLength); 141 | bs_write_bytes(b, buf, len); 142 | free(buf); 143 | } 144 | 145 | if (bs_overrun(b)) { return -1; } 146 | return bs_pos(b); 147 | } 148 | 149 | void debug_avcc(avcc_t* avcc) 150 | { 151 | printf("======= AVC Decoder Configuration Record =======\n"); 152 | printf(" configurationVersion: %d\n", avcc->configurationVersion ); 153 | printf(" AVCProfileIndication: %d\n", avcc->AVCProfileIndication ); 154 | printf(" profile_compatibility: %d\n", avcc->profile_compatibility ); 155 | printf(" AVCLevelIndication: %d\n", avcc->AVCLevelIndication ); 156 | printf(" lengthSizeMinusOne: %d\n", avcc->lengthSizeMinusOne ); 157 | 158 | printf("\n"); 159 | printf(" numOfSequenceParameterSets: %d\n", avcc->numOfSequenceParameterSets ); 160 | for (int i = 0; i < avcc->numOfSequenceParameterSets; i++) 161 | { 162 | //printf(" sequenceParameterSetLength\n", avcc->sequenceParameterSetLength ); 163 | if (avcc->sps_table[i] == NULL) { printf(" null sps\n"); continue; } 164 | debug_sps(avcc->sps_table[i]); 165 | } 166 | 167 | printf("\n"); 168 | printf(" numOfPictureParameterSets: %d\n", avcc->numOfPictureParameterSets ); 169 | for (int i = 0; i < avcc->numOfPictureParameterSets; i++) 170 | { 171 | //printf(" pictureParameterSetLength\n", avcc->pictureParameterSetLength ); 172 | if (avcc->pps_table[i] == NULL) { printf(" null pps\n"); continue; } 173 | debug_pps(avcc->pps_table[i]); 174 | } 175 | } 176 | -------------------------------------------------------------------------------- /src/h264_avcc.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef _H264_AVCC_H 35 | #define _H264_AVCC_H 1 36 | 37 | #include 38 | #include 39 | 40 | #include "bs.h" 41 | #include "h264_stream.h" 42 | 43 | #ifdef __cplusplus 44 | extern "C" { 45 | #endif 46 | 47 | /** 48 | AVC decoder configuration record, ISO/IEC 14496-15:2004(E), Section 5.2.4.1 49 | Seen in seen in mp4 files as 'avcC' atom 50 | Seen in flv files as AVCVIDEOPACKET with AVCPacketType == 0 51 | */ 52 | typedef struct 53 | { 54 | int configurationVersion; // = 1 55 | int AVCProfileIndication; 56 | int profile_compatibility; 57 | int AVCLevelIndication; 58 | // bit(6) reserved = '111111'b; 59 | int lengthSizeMinusOne; 60 | // bit(3) reserved = '111'b; 61 | int numOfSequenceParameterSets; 62 | sps_t** sps_table; 63 | int numOfPictureParameterSets; 64 | pps_t** pps_table; 65 | } avcc_t; 66 | 67 | avcc_t* avcc_new(); 68 | void avcc_free(avcc_t* avcc); 69 | int read_avcc(avcc_t* avcc, h264_stream_t* h, bs_t* b); 70 | int write_avcc(avcc_t* avcc, h264_stream_t* h, bs_t* b); 71 | void debug_avcc(avcc_t* avcc); 72 | 73 | #ifdef __cplusplus 74 | } 75 | #endif 76 | 77 | #endif 78 | -------------------------------------------------------------------------------- /src/h264_sei.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include "bs.h" 35 | #include "h264_stream.h" 36 | #include "h264_sei.h" 37 | 38 | #include 39 | #include // malloc 40 | #include // memset 41 | 42 | sei_t* sei_new() 43 | { 44 | sei_t* s = (sei_t*)malloc(sizeof(sei_t)); 45 | memset(s, 0, sizeof(sei_t)); 46 | s->payload = NULL; 47 | return s; 48 | } 49 | 50 | void sei_free(sei_t* s) 51 | { 52 | if ( s->payload != NULL ) free(s->payload); 53 | free(s); 54 | } 55 | 56 | void read_sei_end_bits(h264_stream_t* h, bs_t* b ) 57 | { 58 | // if the message doesn't end at a byte border 59 | if ( !bs_byte_aligned( b ) ) 60 | { 61 | if ( !bs_read_u1( b ) ) fprintf(stderr, "WARNING: bit_equal_to_one is 0!!!!\n"); 62 | while ( ! bs_byte_aligned( b ) ) 63 | { 64 | if ( bs_read_u1( b ) ) fprintf(stderr, "WARNING: bit_equal_to_zero is 1!!!!\n"); 65 | } 66 | } 67 | 68 | read_rbsp_trailing_bits(h, b); 69 | } 70 | 71 | // D.1 SEI payload syntax 72 | void read_sei_payload(h264_stream_t* h, bs_t* b, int payloadType, int payloadSize) 73 | { 74 | sei_t* s = h->sei; 75 | 76 | s->payload = (uint8_t*)malloc(payloadSize); 77 | 78 | int i; 79 | 80 | for ( i = 0; i < payloadSize; i++ ) 81 | s->payload[i] = bs_read_u(b, 8); 82 | 83 | read_sei_end_bits(h, b); 84 | } 85 | 86 | // D.1 SEI payload syntax 87 | void write_sei_payload(h264_stream_t* h, bs_t* b, int payloadType, int payloadSize) 88 | { 89 | sei_t* s = h->sei; 90 | 91 | int i; 92 | for ( i = 0; i < s->payloadSize; i++ ) 93 | bs_write_u(b, 8, s->payload[i]); 94 | } 95 | 96 | 97 | 98 | -------------------------------------------------------------------------------- /src/h264_sei.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | 35 | #include 36 | 37 | #ifndef _H264_SEI_H 38 | #define _H264_SEI_H 1 39 | 40 | #include 41 | 42 | #include "bs.h" 43 | 44 | #ifdef __cplusplus 45 | extern "C" { 46 | #endif 47 | 48 | typedef struct 49 | { 50 | int payloadType; 51 | int payloadSize; 52 | uint8_t* payload; 53 | } sei_t; 54 | 55 | sei_t* sei_new(); 56 | void sei_free(sei_t* s); 57 | 58 | //D.1 SEI payload syntax 59 | #define SEI_TYPE_BUFFERING_PERIOD 0 60 | #define SEI_TYPE_PIC_TIMING 1 61 | #define SEI_TYPE_PAN_SCAN_RECT 2 62 | #define SEI_TYPE_FILLER_PAYLOAD 3 63 | #define SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35 4 64 | #define SEI_TYPE_USER_DATA_UNREGISTERED 5 65 | #define SEI_TYPE_RECOVERY_POINT 6 66 | #define SEI_TYPE_DEC_REF_PIC_MARKING_REPETITION 7 67 | #define SEI_TYPE_SPARE_PIC 8 68 | #define SEI_TYPE_SCENE_INFO 9 69 | #define SEI_TYPE_SUB_SEQ_INFO 10 70 | #define SEI_TYPE_SUB_SEQ_LAYER_CHARACTERISTICS 11 71 | #define SEI_TYPE_SUB_SEQ_CHARACTERISTICS 12 72 | #define SEI_TYPE_FULL_FRAME_FREEZE 13 73 | #define SEI_TYPE_FULL_FRAME_FREEZE_RELEASE 14 74 | #define SEI_TYPE_FULL_FRAME_SNAPSHOT 15 75 | #define SEI_TYPE_PROGRESSIVE_REFINEMENT_SEGMENT_START 16 76 | #define SEI_TYPE_PROGRESSIVE_REFINEMENT_SEGMENT_END 17 77 | #define SEI_TYPE_MOTION_CONSTRAINED_SLICE_GROUP_SET 18 78 | #define SEI_TYPE_FILM_GRAIN_CHARACTERISTICS 19 79 | #define SEI_TYPE_DEBLOCKING_FILTER_DISPLAY_PREFERENCE 20 80 | #define SEI_TYPE_STEREO_VIDEO_INFO 21 81 | 82 | #ifdef __cplusplus 83 | } 84 | #endif 85 | 86 | #endif 87 | -------------------------------------------------------------------------------- /src/log_utils.h: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #ifndef LOG_UTILS_H_ 35 | #define LOG_UTILS_H_ 36 | 37 | #ifdef __cplusplus 38 | extern "C" { 39 | #endif 40 | 41 | #define ANSI_COLOR_RED "\x1b[31m" 42 | #define ANSI_COLOR_GREEN "\x1b[32m" 43 | #define ANSI_COLOR_YELLOW "\x1b[33m" 44 | #define ANSI_COLOR_BLUE "\x1b[34m" 45 | #define ANSI_COLOR_MAGENTA "\x1b[35m" 46 | #define ANSI_COLOR_CYAN "\x1b[36m" 47 | #define ANSI_COLOR_RESET "\x1b[0m" 48 | 49 | /** 50 | * This is a prototype of log handler function. 51 | * @param fmt Message format (used in the same way as with @a printf()). 52 | */ 53 | typedef void (*log_function)(char* fmt, ...); 54 | 55 | /**@name Log handlers 56 | * Contain links to the current log handlers. Normally these links should not 57 | * be used directly. 58 | * @{ 59 | */ 60 | /** 61 | * Contains link to the current handler of @a info log messages. Normally 62 | * #log_info() should be used instead. 63 | */ 64 | extern log_function log_rtspHandler; 65 | /** 66 | * Contains link to the current handler of @a info log messages. Normally 67 | * #log_info() should be used instead. 68 | */ 69 | extern log_function log_infoHandler; 70 | /** 71 | * Contains link to the current handler of @a debug log messages. Normally 72 | * #log_debug() should be used instead. 73 | */ 74 | extern log_function log_debugHandler; 75 | /** 76 | * Contains link to the current handler of @a warning log messages. Normally 77 | * #log_warning() should be used instead. 78 | */ 79 | extern log_function log_warningHandler; 80 | /** 81 | * Contains link to the current handler of @a error log messages. Normally 82 | * #log_error() should be used instead. 83 | */ 84 | extern log_function log_errorHandler; 85 | /** @} */ 86 | 87 | /**@name Logging utilities 88 | * @{*/ 89 | // A trick with define is done in order to auto-add filename and line number 90 | // into the log message. 91 | 92 | /** 93 | * Prints @a info message to the configured output. 94 | * Automatically adds filename and string number of the place from where the log 95 | * was written. 96 | * 97 | * @param fmt Message format (used in the same way as with @a printf()). 98 | */ 99 | #define log_rtsp(fmt, ...) (*log_rtspHandler)((char*)"%s" fmt, ## __VA_ARGS__) 100 | 101 | /** 102 | * Prints @a info message to the configured output. 103 | * Automatically adds filename and string number of the place from where the log 104 | * was written. 105 | * 106 | * @param fmt Message format (used in the same way as with @a printf()). 107 | */ 108 | #define log_info(fmt, ...) (*log_infoHandler)((char*)"%s(%d): " fmt, __FILE__, \ 109 | __LINE__, ## __VA_ARGS__) 110 | 111 | /** 112 | * Prints @a debug message to the configured output. 113 | * Automatically adds filename and string number of the place from where the log 114 | * was written. 115 | * 116 | * @param fmt Message format (used in the same way as with @a printf()). 117 | */ 118 | #define log_debug(fmt, ...) (*log_debugHandler)((char*)"%s(%d): " fmt, __FILE__, \ 119 | __LINE__, ## __VA_ARGS__) 120 | 121 | /** 122 | * Prints @a warning message to the configured output. 123 | * Automatically adds filename and string number of the place from where the log 124 | * was written. 125 | * 126 | * @param fmt Message format (used in the same way as with @a printf()). 127 | */ 128 | #define log_warning(fmt, ...) (*log_warningHandler)((char*)"%s(%d): " fmt, __FILE__, \ 129 | __LINE__, ## __VA_ARGS__) 130 | 131 | /** 132 | * Prints @a error message to the configured output. 133 | * Automatically adds filename and string number of the place from where the log 134 | * was written. 135 | * 136 | * @param fmt Message format (used in the same way as with @a printf()). 137 | */ 138 | #define log_error(fmt, ...) (*log_errorHandler)((char*)"%s(%d): " fmt, __FILE__, \ 139 | __LINE__, ## __VA_ARGS__) 140 | /**@}*/ 141 | 142 | /** 143 | * Defines possible log levels. 144 | */ 145 | typedef enum 146 | { 147 | /** Info log level. */ 148 | LOG_LEVEL_INFO, 149 | /** Debug log level. */ 150 | LOG_LEVEL_DEBUG, 151 | /** Warning log level. */ 152 | LOG_LEVEL_WARNING, 153 | /** Error log level. */ 154 | LOG_LEVEL_ERROR, 155 | /** Info log level. */ 156 | LOG_LEVEL_RTSP, 157 | /** 'No' log level. */ 158 | LOG_LEVEL_NO 159 | } LOG_LEVEL; 160 | 161 | /** 162 | * Switches library to use @a syslog for handling messages. 163 | * 164 | * @param facility Facility tells syslog who issued the message. See 165 | * documentation of @a syslog for more information. 166 | */ 167 | void log_useSyslog(int facility); 168 | 169 | /** 170 | * Switches library to use @a printf for handling messages. 171 | */ 172 | void log_usePrintf(); 173 | 174 | void log_closeSyslog(); 175 | 176 | /** 177 | * Sets the minimum priority level of the messages which will be processed. 178 | * 179 | * @param level Priority level: 180 | * - #LOG_LEVEL_DEBUG - All messages will be printed; 181 | * - #LOG_LEVEL_WARNING - Only warning and error messages will be 182 | * printed; 183 | * - #LOG_LEVEL_ERROR - Only error messages are printed; 184 | * - #LOG_LEVEL_NO - Nothing is printed at all. 185 | */ 186 | void log_setLevel(LOG_LEVEL level); 187 | 188 | #ifdef __cplusplus 189 | } 190 | #endif 191 | 192 | #endif /* LOG_UTILS_H_ */ 193 | -------------------------------------------------------------------------------- /src/rtspclient_with_opengl.cpp: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 1998 MFC Forum 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | * 16 | * Module Name: 17 | * 18 | * Revision History: 19 | * 20 | * Date Ver Name Description 21 | * ---------- --- --------------------- ----------------------------------------- 22 | * 07-Jun-2016 0.1 Youngho Kim Created 23 | * ---------- --- --------------------- ----------------------------------------- 24 | * 25 | * DESCRIPTION: 26 | * 27 | * $Author: 28 | * $LastChangedBy: 29 | * $Date: 30 | * $Revision: 2949 $ 31 | * $Id: 32 | * $HeadURL: 33 | *******************************************************************************/ 34 | #include 35 | #include // for signal 36 | #ifdef WIN32 37 | #include // for using the function Sleep 38 | #else 39 | #include // for using the function sleep 40 | #include // for sleep 41 | #endif 42 | 43 | #include "MediaRTSPSession.h" 44 | #include "log_utils.h" 45 | #include "H264ReadCameraEncoder.h" 46 | #include "H264ReadScreenEncoder.h" 47 | 48 | //char eventLoopWatchVariable = 0; 49 | char* username = NULL; 50 | char* password = NULL; 51 | char* rtsp_url = NULL; 52 | char* filename = NULL; 53 | bool bUpStream = false; 54 | bool bInterleaved = false; 55 | MediaRTSPSession* pRtsp = NULL; 56 | 57 | /* Allow ourselves to be shut down gracefully by a signal */ 58 | void _signalHandlerShutdown(int sig); 59 | 60 | void usage(char const* progName) { 61 | std::cout << "Usage: " << progName << " ... \n"; 62 | std::cout << "\t(where each is a \"rtsp://\" URL)\n"; 63 | } 64 | 65 | int main(int argc, char** argv) { 66 | pRtsp = new MediaRTSPSession(); 67 | bool option_param = false; 68 | bool isServer = false; 69 | // We need at least one "rtsp://" URL argument: 70 | if (argc < 2) { 71 | usage(argv[0]); 72 | return 1; 73 | } 74 | 75 | while (argc > 2) { 76 | char* const opt = argv[1]; 77 | 78 | if (opt[0] != '-' && !option_param) { 79 | usage(argv[0]); 80 | option_param = false; 81 | } 82 | 83 | switch (opt[1]) { 84 | case 'r': 85 | rtsp_url = argv[2]; 86 | 87 | std::cout << "RTSP Url: " << rtsp_url << "\n"; 88 | 89 | option_param = true; 90 | break; 91 | case 'U': 92 | // specify start port number 93 | bUpStream = true; 94 | option_param = true; 95 | break; 96 | case 'u': 97 | // specify start port number 98 | username = argv[2]; 99 | password = argv[3]; 100 | 101 | std::cout << "Username: " << username << "\n"; 102 | std::cout << "Password: " << password << "\n"; 103 | 104 | option_param = true; 105 | break; 106 | case 'f': 107 | filename = argv[2]; 108 | std::cout << "File Name: " << filename << "\n"; 109 | option_param = true; 110 | break; 111 | case 'i': 112 | bInterleaved = true; 113 | option_param = true; 114 | break; 115 | case 's': 116 | isServer = true; 117 | break; 118 | default: 119 | break; 120 | } 121 | ++argv; 122 | --argc; 123 | } 124 | 125 | /* Allow ourselves to be shut down gracefully by a signal */ 126 | signal(SIGTERM, _signalHandlerShutdown); 127 | signal(SIGINT, _signalHandlerShutdown); 128 | signal(SIGSEGV, _signalHandlerShutdown); 129 | 130 | if (!isServer) { 131 | pRtsp->setDebugLevel(1); 132 | if (pRtsp->startRTSPClient("MyClient", rtsp_url, username, password, bInterleaved, bUpStream)) 133 | { 134 | delete pRtsp; 135 | pRtsp = NULL; 136 | 137 | return -1; 138 | } 139 | } else { 140 | //H264ReadCameraEncoder* enc = H264ReadCameraEncoder::createNew(env); 141 | //H264ReadScreenEncoder enc; 142 | pRtsp->setDebugLevel(1); 143 | if (pRtsp->startRTSPServer()) 144 | { 145 | delete pRtsp; 146 | pRtsp = NULL; 147 | 148 | return -1; 149 | } 150 | 151 | } 152 | while (true) { 153 | #ifdef WIN32 154 | Sleep(5000); // wait for 5 secondes before closing 155 | #else 156 | sleep(5000); // wait for 5 secondes before closing 157 | #endif 158 | } 159 | 160 | return 0; 161 | } 162 | 163 | void _signalHandlerShutdown(int sig) 164 | { 165 | log_error("%s: Got signal %d, program exits!", __FUNCTION__, sig); 166 | 167 | pRtsp->stopRTSPClient(); 168 | delete pRtsp; 169 | pRtsp = NULL; 170 | } -------------------------------------------------------------------------------- /test.264: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melchi45/rtspclient_with_opengl/593eb17f7549279995a870d740ee2daed23c94a4/test.264 -------------------------------------------------------------------------------- /test/testDecoder.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | BASIC GLFW + GLXW WINDOW AND OPENGL SETUP 4 | ------------------------------------------ 5 | See https://gist.github.com/roxlu/6698180 for the latest version of the example. 6 | 7 | */ 8 | #include 9 | #include 10 | 11 | #include "H264Decoder.h" 12 | H264_Decoder* decoder_ptr = NULL; 13 | bool playback_initialized = false; 14 | 15 | void frame_callback(AVCodecContext *pCodecCtx, AVFrame* frame, AVPacket* pkt, int framecount, void* user); 16 | void yuv420p_save(AVFrame *pFrame, int width, int height, int iFrame); 17 | 18 | int main() { 19 | // ---------------------------------------------------------------- 20 | // THIS IS WHERE YOU START CALLING OPENGL FUNCTIONS, NOT EARLIER!! 21 | // ---------------------------------------------------------------- 22 | H264_Decoder decoder(frame_callback, NULL); 23 | decoder_ptr = &decoder; 24 | 25 | if (!decoder.load("test.264", 30.0f)) { 26 | ::exit(EXIT_FAILURE); 27 | } 28 | 29 | while (!decoder.readFrame()) { 30 | 31 | } 32 | 33 | return EXIT_SUCCESS; 34 | } 35 | 36 | void frame_callback(AVCodecContext *pCodecCtx, AVFrame* frame, AVPacket* pkt, int framecount, void* user) { 37 | yuv420p_save(frame, frame->width, frame->height, framecount); 38 | } 39 | 40 | void yuv420p_save(AVFrame *pFrame, int width, int height, int iFrame) 41 | { 42 | int i = 0; 43 | FILE *pFile; 44 | char szFilename[32]; 45 | 46 | int height_half = height / 2, width_half = width / 2; 47 | int y_wrap = pFrame->linesize[0]; 48 | int u_wrap = pFrame->linesize[1]; 49 | int v_wrap = pFrame->linesize[2]; 50 | 51 | unsigned char *y_buf = pFrame->data[0]; 52 | unsigned char *u_buf = pFrame->data[1]; 53 | unsigned char *v_buf = pFrame->data[2]; 54 | sprintf(szFilename, "frame%d.jpg", iFrame); 55 | pFile = fopen(szFilename, "wb"); 56 | 57 | //save y 58 | for (i = 0; i save Y success\n"); 61 | 62 | //save u 63 | for (i = 0; i save U success\n"); 66 | 67 | //save v 68 | for (i = 0; i save V success\n"); 71 | 72 | fflush(pFile); 73 | fclose(pFile); 74 | } --------------------------------------------------------------------------------