├── .gitignore ├── README.md ├── TestPlayWithFFMPEGAndSDL.xcodeproj └── project.pbxproj └── TestPlayWithFFMPEGAndSDL ├── FSFFPLAYVideoPlayAppDelegate.h ├── FSFFPLAYVideoPlayAppDelegate.m ├── FSFFPLAYViewController.h ├── FSFFPLAYViewController.m ├── FSFFPLAYViewController.xib ├── FSVideoPlayViewController.h ├── FSVideoPlayViewController.m ├── KxMovieDecoder.h ├── KxMovieDecoder.m ├── KxMovieGLView.h ├── KxMovieGLView.m ├── TestPlayWithFFMPEGAndSDL-Info.plist ├── TestPlayWithFFMPEGAndSDL-Prefix.pch ├── en.lproj ├── FSVideoPlayViewController.xib └── InfoPlist.strings ├── ffmpegclasses ├── cmdutils.c └── cmdutils.h ├── libs ├── libSDL.a ├── libavcodec.a ├── libavdevice.a ├── libavfilter.a ├── libavformat.a ├── libavutil.a ├── libswresample.a └── libswscale.a └── main.m /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | .DS_Store 3 | build/ 4 | *.pbxuser 5 | !default.pbxuser 6 | *.mode1v3 7 | !default.mode1v3 8 | *.mode2v3 9 | !default.mode2v3 10 | *.perspectivev3 11 | !default.perspectivev3 12 | *.xcworkspace 13 | !default.xcworkspace 14 | xcuserdata 15 | profile 16 | *.moved-aside 17 | DerivedData 18 | .idea/ 19 | includes/ 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | player 2 | ====== 3 | 4 | 使用ffmpeg 解码 openAL播放音频 openGL或是quartzcore播放视频 5 | 6 | 第一步:移植ffplay到iOS(已完成。由于ffmpeg和SDL目录太大,现已删除,请下载最新的ffmpeg和SDL并编译后放到includes目录下,SDL请使用1.3.0) 7 | SDL:http://www.sdltutorials.com/sdl-13 8 | ffmpeg:http://ffmpeg.org 9 | 10 | 第二步:将ffplay中的SDL视频播放改为openGL ES或是帧动画 (已完成) 11 | 12 | 13 | 第三步:优化 (正在解决高清视频播放问题) 14 | 15 | 16 | 第四步:重新开发完整播放器 17 | 18 | 开发讨论QQ群:181515226 -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 6229DFFC1726698B000A7E19 /* 2.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 6229DFFB1726698B000A7E19 /* 2.mp4 */; }; 11 | 62301AE9171E79540017AE20 /* 1.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 62301AE8171E79540017AE20 /* 1.mp4 */; }; 12 | 62301AEB171E7FF30017AE20 /* libavcodec.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 62301AEA171E7FF30017AE20 /* libavcodec.a */; }; 13 | 62301AED171E7FFE0017AE20 /* libavdevice.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 62301AEC171E7FFE0017AE20 /* libavdevice.a */; }; 14 | 62301AEF171E802D0017AE20 /* libavformat.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 62301AEE171E802D0017AE20 /* libavformat.a */; }; 15 | 62301AF1171E80600017AE20 /* libavutil.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 62301AF0171E80600017AE20 /* libavutil.a */; }; 16 | 62301AF3171E806C0017AE20 /* libswresample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 62301AF2171E806C0017AE20 /* libswresample.a */; }; 17 | 62301AF5171E80780017AE20 /* libswscale.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 62301AF4171E80780017AE20 /* libswscale.a */; }; 18 | 62301AF7171E80D60017AE20 /* libavfilter.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 62301AF6171E80D60017AE20 /* libavfilter.a */; }; 19 | 6251FF491715296D00AED4EA /* KxMovieGLView.m in Sources */ = {isa = PBXBuildFile; fileRef = 6251FF481715296D00AED4EA /* KxMovieGLView.m */; }; 20 | 6251FF4C17152D8800AED4EA /* KxMovieDecoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 6251FF4B17152D8800AED4EA /* KxMovieDecoder.m */; }; 21 | 62B259D2167AD2AD00594873 /* FSFFPLAYViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 62B259D0167AD2AD00594873 /* FSFFPLAYViewController.m */; }; 22 | 62B25A11167B1D6B00594873 /* FSFFPLAYVideoPlayAppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 62B25A0E167B1D6B00594873 /* FSFFPLAYVideoPlayAppDelegate.m */; }; 23 | 62C3EF6216795CCD00F416E6 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF6116795CCD00F416E6 /* UIKit.framework */; }; 24 | 62C3EF6416795CCD00F416E6 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF6316795CCD00F416E6 /* Foundation.framework */; }; 25 | 62C3EF6616795CCD00F416E6 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF6516795CCD00F416E6 /* CoreGraphics.framework */; }; 26 | 62C3EF6C16795CCD00F416E6 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 62C3EF6A16795CCD00F416E6 /* InfoPlist.strings */; }; 27 | 62C3EF6E16795CCD00F416E6 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 62C3EF6D16795CCD00F416E6 /* main.m */; }; 28 | 62C3EF8C16795DC200F416E6 /* libSDL.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF8416795DC200F416E6 /* libSDL.a */; }; 29 | 62C3EF991679601D00F416E6 /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF971679600800F416E6 /* CoreAudio.framework */; }; 30 | 62C3EF9A1679601D00F416E6 /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF9516795FF500F416E6 /* AudioToolbox.framework */; }; 31 | 62C3EF9B1679601D00F416E6 /* OpenGLES.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF9316795FEA00F416E6 /* OpenGLES.framework */; }; 32 | 62C3EF9C1679601D00F416E6 /* libbz2.1.0.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF9116795FE000F416E6 /* libbz2.1.0.dylib */; }; 33 | 62C3EF9D1679601D00F416E6 /* libz.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF8F16795FD800F416E6 /* libz.dylib */; }; 34 | 62C3EFA0167969EF00F416E6 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 62C3EF9E167969E800F416E6 /* QuartzCore.framework */; }; 35 | 62CC2A9A1682A2B100AC043A /* cmdutils.c in Sources */ = {isa = PBXBuildFile; fileRef = 62CC2A981682A2B100AC043A /* cmdutils.c */; }; 36 | 62CC2A9C1682AF4D00AC043A /* FSFFPLAYViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 62CC2A9B1682AF4D00AC043A /* FSFFPLAYViewController.xib */; }; 37 | /* End PBXBuildFile section */ 38 | 39 | /* Begin PBXFileReference section */ 40 | 6229DFFB1726698B000A7E19 /* 2.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; name = 2.mp4; path = ../../../../Resource/presolveVideos/2.mp4; sourceTree = ""; }; 41 | 62301AE8171E79540017AE20 /* 1.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; name = 1.mp4; path = ../../../../Resource/vedios/1.mp4; sourceTree = ""; }; 42 | 62301AEA171E7FF30017AE20 /* libavcodec.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libavcodec.a; path = TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavcodec/libavcodec.a; sourceTree = SOURCE_ROOT; }; 43 | 62301AEC171E7FFE0017AE20 /* libavdevice.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libavdevice.a; path = TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavdevice/libavdevice.a; sourceTree = SOURCE_ROOT; }; 44 | 62301AEE171E802D0017AE20 /* libavformat.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libavformat.a; path = TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavformat/libavformat.a; sourceTree = SOURCE_ROOT; }; 45 | 62301AF0171E80600017AE20 /* libavutil.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libavutil.a; path = TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavutil/libavutil.a; sourceTree = SOURCE_ROOT; }; 46 | 62301AF2171E806C0017AE20 /* libswresample.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libswresample.a; path = TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libswresample/libswresample.a; sourceTree = SOURCE_ROOT; }; 47 | 62301AF4171E80780017AE20 /* libswscale.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libswscale.a; path = TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libswscale/libswscale.a; sourceTree = SOURCE_ROOT; }; 48 | 62301AF6171E80D60017AE20 /* libavfilter.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libavfilter.a; path = TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavfilter/libavfilter.a; sourceTree = SOURCE_ROOT; }; 49 | 6251FF471715296D00AED4EA /* KxMovieGLView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = KxMovieGLView.h; sourceTree = ""; }; 50 | 6251FF481715296D00AED4EA /* KxMovieGLView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = KxMovieGLView.m; sourceTree = ""; }; 51 | 6251FF4A17152D8800AED4EA /* KxMovieDecoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = KxMovieDecoder.h; sourceTree = ""; }; 52 | 6251FF4B17152D8800AED4EA /* KxMovieDecoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = KxMovieDecoder.m; sourceTree = ""; }; 53 | 62B259CF167AD2AD00594873 /* FSFFPLAYViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FSFFPLAYViewController.h; sourceTree = ""; }; 54 | 62B259D0167AD2AD00594873 /* FSFFPLAYViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FSFFPLAYViewController.m; sourceTree = ""; }; 55 | 62B25A0D167B1D6B00594873 /* FSFFPLAYVideoPlayAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FSFFPLAYVideoPlayAppDelegate.h; sourceTree = ""; }; 56 | 62B25A0E167B1D6B00594873 /* FSFFPLAYVideoPlayAppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FSFFPLAYVideoPlayAppDelegate.m; sourceTree = ""; }; 57 | 62C3EF5D16795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = TestPlayWithFFMPEGAndSDL.app; sourceTree = BUILT_PRODUCTS_DIR; }; 58 | 62C3EF6116795CCD00F416E6 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; }; 59 | 62C3EF6316795CCD00F416E6 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; 60 | 62C3EF6516795CCD00F416E6 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; }; 61 | 62C3EF6916795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL-Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = "TestPlayWithFFMPEGAndSDL-Info.plist"; sourceTree = ""; }; 62 | 62C3EF6B16795CCD00F416E6 /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/InfoPlist.strings; sourceTree = ""; }; 63 | 62C3EF6D16795CCD00F416E6 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 64 | 62C3EF6F16795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL-Prefix.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "TestPlayWithFFMPEGAndSDL-Prefix.pch"; sourceTree = ""; }; 65 | 62C3EF8416795DC200F416E6 /* libSDL.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = libSDL.a; sourceTree = ""; }; 66 | 62C3EF8F16795FD800F416E6 /* libz.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libz.dylib; path = usr/lib/libz.dylib; sourceTree = SDKROOT; }; 67 | 62C3EF9116795FE000F416E6 /* libbz2.1.0.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libbz2.1.0.dylib; path = usr/lib/libbz2.1.0.dylib; sourceTree = SDKROOT; }; 68 | 62C3EF9316795FEA00F416E6 /* OpenGLES.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGLES.framework; path = System/Library/Frameworks/OpenGLES.framework; sourceTree = SDKROOT; }; 69 | 62C3EF9516795FF500F416E6 /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = System/Library/Frameworks/AudioToolbox.framework; sourceTree = SDKROOT; }; 70 | 62C3EF971679600800F416E6 /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; }; 71 | 62C3EF9E167969E800F416E6 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; }; 72 | 62CC2A981682A2B100AC043A /* cmdutils.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = cmdutils.c; sourceTree = ""; }; 73 | 62CC2A991682A2B100AC043A /* cmdutils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = cmdutils.h; sourceTree = ""; }; 74 | 62CC2A9B1682AF4D00AC043A /* FSFFPLAYViewController.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = FSFFPLAYViewController.xib; sourceTree = ""; }; 75 | /* End PBXFileReference section */ 76 | 77 | /* Begin PBXFrameworksBuildPhase section */ 78 | 62C3EF5A16795CCD00F416E6 /* Frameworks */ = { 79 | isa = PBXFrameworksBuildPhase; 80 | buildActionMask = 2147483647; 81 | files = ( 82 | 62C3EF8C16795DC200F416E6 /* libSDL.a in Frameworks */, 83 | 62C3EF991679601D00F416E6 /* CoreAudio.framework in Frameworks */, 84 | 62C3EF9A1679601D00F416E6 /* AudioToolbox.framework in Frameworks */, 85 | 62C3EF9B1679601D00F416E6 /* OpenGLES.framework in Frameworks */, 86 | 62C3EF9C1679601D00F416E6 /* libbz2.1.0.dylib in Frameworks */, 87 | 62C3EF9D1679601D00F416E6 /* libz.dylib in Frameworks */, 88 | 62C3EF6216795CCD00F416E6 /* UIKit.framework in Frameworks */, 89 | 62C3EF6416795CCD00F416E6 /* Foundation.framework in Frameworks */, 90 | 62C3EF6616795CCD00F416E6 /* CoreGraphics.framework in Frameworks */, 91 | 62C3EFA0167969EF00F416E6 /* QuartzCore.framework in Frameworks */, 92 | 62301AEB171E7FF30017AE20 /* libavcodec.a in Frameworks */, 93 | 62301AED171E7FFE0017AE20 /* libavdevice.a in Frameworks */, 94 | 62301AEF171E802D0017AE20 /* libavformat.a in Frameworks */, 95 | 62301AF1171E80600017AE20 /* libavutil.a in Frameworks */, 96 | 62301AF3171E806C0017AE20 /* libswresample.a in Frameworks */, 97 | 62301AF5171E80780017AE20 /* libswscale.a in Frameworks */, 98 | 62301AF7171E80D60017AE20 /* libavfilter.a in Frameworks */, 99 | ); 100 | runOnlyForDeploymentPostprocessing = 0; 101 | }; 102 | /* End PBXFrameworksBuildPhase section */ 103 | 104 | /* Begin PBXGroup section */ 105 | 62B259D5167B002300594873 /* ffmpegclasses */ = { 106 | isa = PBXGroup; 107 | children = ( 108 | 62CC2A981682A2B100AC043A /* cmdutils.c */, 109 | 62CC2A991682A2B100AC043A /* cmdutils.h */, 110 | ); 111 | path = ffmpegclasses; 112 | sourceTree = ""; 113 | }; 114 | 62C3EF5216795CCC00F416E6 = { 115 | isa = PBXGroup; 116 | children = ( 117 | 62C3EF6716795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL */, 118 | 62C3EF6016795CCD00F416E6 /* Frameworks */, 119 | 62C3EF5E16795CCD00F416E6 /* Products */, 120 | ); 121 | sourceTree = ""; 122 | }; 123 | 62C3EF5E16795CCD00F416E6 /* Products */ = { 124 | isa = PBXGroup; 125 | children = ( 126 | 62C3EF5D16795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL.app */, 127 | ); 128 | name = Products; 129 | sourceTree = ""; 130 | }; 131 | 62C3EF6016795CCD00F416E6 /* Frameworks */ = { 132 | isa = PBXGroup; 133 | children = ( 134 | 62C3EF9E167969E800F416E6 /* QuartzCore.framework */, 135 | 62C3EF971679600800F416E6 /* CoreAudio.framework */, 136 | 62C3EF9516795FF500F416E6 /* AudioToolbox.framework */, 137 | 62C3EF9316795FEA00F416E6 /* OpenGLES.framework */, 138 | 62C3EF9116795FE000F416E6 /* libbz2.1.0.dylib */, 139 | 62C3EF8F16795FD800F416E6 /* libz.dylib */, 140 | 62C3EF6116795CCD00F416E6 /* UIKit.framework */, 141 | 62C3EF6316795CCD00F416E6 /* Foundation.framework */, 142 | 62C3EF6516795CCD00F416E6 /* CoreGraphics.framework */, 143 | ); 144 | name = Frameworks; 145 | sourceTree = ""; 146 | }; 147 | 62C3EF6716795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL */ = { 148 | isa = PBXGroup; 149 | children = ( 150 | 62B25A0D167B1D6B00594873 /* FSFFPLAYVideoPlayAppDelegate.h */, 151 | 62B25A0E167B1D6B00594873 /* FSFFPLAYVideoPlayAppDelegate.m */, 152 | 62B259CF167AD2AD00594873 /* FSFFPLAYViewController.h */, 153 | 62B259D0167AD2AD00594873 /* FSFFPLAYViewController.m */, 154 | 62CC2A9B1682AF4D00AC043A /* FSFFPLAYViewController.xib */, 155 | 6251FF471715296D00AED4EA /* KxMovieGLView.h */, 156 | 6251FF481715296D00AED4EA /* KxMovieGLView.m */, 157 | 6251FF4A17152D8800AED4EA /* KxMovieDecoder.h */, 158 | 6251FF4B17152D8800AED4EA /* KxMovieDecoder.m */, 159 | 62B259D5167B002300594873 /* ffmpegclasses */, 160 | 62C3EF7E16795DC200F416E6 /* libs */, 161 | 62C3EF6816795CCD00F416E6 /* Supporting Files */, 162 | ); 163 | path = TestPlayWithFFMPEGAndSDL; 164 | sourceTree = ""; 165 | }; 166 | 62C3EF6816795CCD00F416E6 /* Supporting Files */ = { 167 | isa = PBXGroup; 168 | children = ( 169 | 6229DFFB1726698B000A7E19 /* 2.mp4 */, 170 | 62301AE8171E79540017AE20 /* 1.mp4 */, 171 | 62C3EF6916795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL-Info.plist */, 172 | 62C3EF6A16795CCD00F416E6 /* InfoPlist.strings */, 173 | 62C3EF6D16795CCD00F416E6 /* main.m */, 174 | 62C3EF6F16795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL-Prefix.pch */, 175 | ); 176 | name = "Supporting Files"; 177 | sourceTree = ""; 178 | }; 179 | 62C3EF7E16795DC200F416E6 /* libs */ = { 180 | isa = PBXGroup; 181 | children = ( 182 | 62301AEC171E7FFE0017AE20 /* libavdevice.a */, 183 | 62301AEA171E7FF30017AE20 /* libavcodec.a */, 184 | 62301AEE171E802D0017AE20 /* libavformat.a */, 185 | 62301AF0171E80600017AE20 /* libavutil.a */, 186 | 62301AF2171E806C0017AE20 /* libswresample.a */, 187 | 62301AF4171E80780017AE20 /* libswscale.a */, 188 | 62301AF6171E80D60017AE20 /* libavfilter.a */, 189 | 62C3EF8416795DC200F416E6 /* libSDL.a */, 190 | ); 191 | path = libs; 192 | sourceTree = ""; 193 | }; 194 | /* End PBXGroup section */ 195 | 196 | /* Begin PBXNativeTarget section */ 197 | 62C3EF5C16795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL */ = { 198 | isa = PBXNativeTarget; 199 | buildConfigurationList = 62C3EF7B16795CCD00F416E6 /* Build configuration list for PBXNativeTarget "TestPlayWithFFMPEGAndSDL" */; 200 | buildPhases = ( 201 | 62C3EF5916795CCD00F416E6 /* Sources */, 202 | 62C3EF5A16795CCD00F416E6 /* Frameworks */, 203 | 62C3EF5B16795CCD00F416E6 /* Resources */, 204 | ); 205 | buildRules = ( 206 | ); 207 | dependencies = ( 208 | ); 209 | name = TestPlayWithFFMPEGAndSDL; 210 | productName = TestPlayWithFFMPEGAndSDL; 211 | productReference = 62C3EF5D16795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL.app */; 212 | productType = "com.apple.product-type.application"; 213 | }; 214 | /* End PBXNativeTarget section */ 215 | 216 | /* Begin PBXProject section */ 217 | 62C3EF5416795CCC00F416E6 /* Project object */ = { 218 | isa = PBXProject; 219 | attributes = { 220 | LastUpgradeCheck = 0420; 221 | }; 222 | buildConfigurationList = 62C3EF5716795CCC00F416E6 /* Build configuration list for PBXProject "TestPlayWithFFMPEGAndSDL" */; 223 | compatibilityVersion = "Xcode 3.2"; 224 | developmentRegion = English; 225 | hasScannedForEncodings = 0; 226 | knownRegions = ( 227 | en, 228 | ); 229 | mainGroup = 62C3EF5216795CCC00F416E6; 230 | productRefGroup = 62C3EF5E16795CCD00F416E6 /* Products */; 231 | projectDirPath = ""; 232 | projectRoot = ""; 233 | targets = ( 234 | 62C3EF5C16795CCD00F416E6 /* TestPlayWithFFMPEGAndSDL */, 235 | ); 236 | }; 237 | /* End PBXProject section */ 238 | 239 | /* Begin PBXResourcesBuildPhase section */ 240 | 62C3EF5B16795CCD00F416E6 /* Resources */ = { 241 | isa = PBXResourcesBuildPhase; 242 | buildActionMask = 2147483647; 243 | files = ( 244 | 62C3EF6C16795CCD00F416E6 /* InfoPlist.strings in Resources */, 245 | 62CC2A9C1682AF4D00AC043A /* FSFFPLAYViewController.xib in Resources */, 246 | 62301AE9171E79540017AE20 /* 1.mp4 in Resources */, 247 | 6229DFFC1726698B000A7E19 /* 2.mp4 in Resources */, 248 | ); 249 | runOnlyForDeploymentPostprocessing = 0; 250 | }; 251 | /* End PBXResourcesBuildPhase section */ 252 | 253 | /* Begin PBXSourcesBuildPhase section */ 254 | 62C3EF5916795CCD00F416E6 /* Sources */ = { 255 | isa = PBXSourcesBuildPhase; 256 | buildActionMask = 2147483647; 257 | files = ( 258 | 62C3EF6E16795CCD00F416E6 /* main.m in Sources */, 259 | 62B259D2167AD2AD00594873 /* FSFFPLAYViewController.m in Sources */, 260 | 62B25A11167B1D6B00594873 /* FSFFPLAYVideoPlayAppDelegate.m in Sources */, 261 | 62CC2A9A1682A2B100AC043A /* cmdutils.c in Sources */, 262 | 6251FF491715296D00AED4EA /* KxMovieGLView.m in Sources */, 263 | 6251FF4C17152D8800AED4EA /* KxMovieDecoder.m in Sources */, 264 | ); 265 | runOnlyForDeploymentPostprocessing = 0; 266 | }; 267 | /* End PBXSourcesBuildPhase section */ 268 | 269 | /* Begin PBXVariantGroup section */ 270 | 62C3EF6A16795CCD00F416E6 /* InfoPlist.strings */ = { 271 | isa = PBXVariantGroup; 272 | children = ( 273 | 62C3EF6B16795CCD00F416E6 /* en */, 274 | ); 275 | name = InfoPlist.strings; 276 | sourceTree = ""; 277 | }; 278 | /* End PBXVariantGroup section */ 279 | 280 | /* Begin XCBuildConfiguration section */ 281 | 62C3EF7916795CCD00F416E6 /* Debug */ = { 282 | isa = XCBuildConfiguration; 283 | buildSettings = { 284 | ALWAYS_SEARCH_USER_PATHS = NO; 285 | ARCHS = "$(ARCHS_STANDARD_32_BIT)"; 286 | CODE_SIGN_IDENTITY = ""; 287 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; 288 | COPY_PHASE_STRIP = NO; 289 | GCC_C_LANGUAGE_STANDARD = gnu99; 290 | GCC_DYNAMIC_NO_PIC = NO; 291 | GCC_OPTIMIZATION_LEVEL = 0; 292 | GCC_PREPROCESSOR_DEFINITIONS = ( 293 | "DEBUG=1", 294 | "$(inherited)", 295 | ); 296 | GCC_SYMBOLS_PRIVATE_EXTERN = NO; 297 | GCC_VERSION = com.apple.compilers.llvm.clang.1_0; 298 | GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES; 299 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 300 | GCC_WARN_UNUSED_VARIABLE = YES; 301 | IPHONEOS_DEPLOYMENT_TARGET = 5.0; 302 | SDKROOT = iphoneos; 303 | }; 304 | name = Debug; 305 | }; 306 | 62C3EF7A16795CCD00F416E6 /* Release */ = { 307 | isa = XCBuildConfiguration; 308 | buildSettings = { 309 | ALWAYS_SEARCH_USER_PATHS = NO; 310 | ARCHS = "$(ARCHS_STANDARD_32_BIT)"; 311 | CODE_SIGN_IDENTITY = ""; 312 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; 313 | COPY_PHASE_STRIP = YES; 314 | GCC_C_LANGUAGE_STANDARD = gnu99; 315 | GCC_VERSION = com.apple.compilers.llvm.clang.1_0; 316 | GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES; 317 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 318 | GCC_WARN_UNUSED_VARIABLE = YES; 319 | IPHONEOS_DEPLOYMENT_TARGET = 5.0; 320 | OTHER_CFLAGS = "-DNS_BLOCK_ASSERTIONS=1"; 321 | SDKROOT = iphoneos; 322 | VALIDATE_PRODUCT = YES; 323 | }; 324 | name = Release; 325 | }; 326 | 62C3EF7C16795CCD00F416E6 /* Debug */ = { 327 | isa = XCBuildConfiguration; 328 | buildSettings = { 329 | ARCHS = armv7; 330 | CODE_SIGN_IDENTITY = ""; 331 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; 332 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 333 | GCC_PREFIX_HEADER = "TestPlayWithFFMPEGAndSDL/TestPlayWithFFMPEGAndSDL-Prefix.pch"; 334 | GCC_VERSION = ""; 335 | HEADER_SEARCH_PATHS = ( 336 | "$(inherited)", 337 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg\"", 338 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/SDL/src\"", 339 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/SDL/include\"", 340 | ); 341 | INFOPLIST_FILE = "TestPlayWithFFMPEGAndSDL/TestPlayWithFFMPEGAndSDL-Info.plist"; 342 | IPHONEOS_DEPLOYMENT_TARGET = 5.0; 343 | LIBRARY_SEARCH_PATHS = ( 344 | "$(inherited)", 345 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/libs\"", 346 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavcodec\"", 347 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavdevice\"", 348 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavformat\"", 349 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavutil\"", 350 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libswresample\"", 351 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libswscale\"", 352 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavfilter\"", 353 | ); 354 | PRODUCT_NAME = "$(TARGET_NAME)"; 355 | TARGETED_DEVICE_FAMILY = "1,2"; 356 | VALID_ARCHS = armv7; 357 | WRAPPER_EXTENSION = app; 358 | }; 359 | name = Debug; 360 | }; 361 | 62C3EF7D16795CCD00F416E6 /* Release */ = { 362 | isa = XCBuildConfiguration; 363 | buildSettings = { 364 | ARCHS = armv7; 365 | CODE_SIGN_IDENTITY = ""; 366 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = ""; 367 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 368 | GCC_PREFIX_HEADER = "TestPlayWithFFMPEGAndSDL/TestPlayWithFFMPEGAndSDL-Prefix.pch"; 369 | GCC_VERSION = ""; 370 | HEADER_SEARCH_PATHS = ( 371 | "$(inherited)", 372 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg\"", 373 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/SDL/src\"", 374 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/SDL/include\"", 375 | ); 376 | INFOPLIST_FILE = "TestPlayWithFFMPEGAndSDL/TestPlayWithFFMPEGAndSDL-Info.plist"; 377 | IPHONEOS_DEPLOYMENT_TARGET = 5.0; 378 | LIBRARY_SEARCH_PATHS = ( 379 | "$(inherited)", 380 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/libs\"", 381 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavcodec\"", 382 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavdevice\"", 383 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavformat\"", 384 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavutil\"", 385 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libswresample\"", 386 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libswscale\"", 387 | "\"$(SRCROOT)/TestPlayWithFFMPEGAndSDL/includes/ffmpeg/libavfilter\"", 388 | ); 389 | PRODUCT_NAME = "$(TARGET_NAME)"; 390 | TARGETED_DEVICE_FAMILY = "1,2"; 391 | VALID_ARCHS = armv7; 392 | WRAPPER_EXTENSION = app; 393 | }; 394 | name = Release; 395 | }; 396 | /* End XCBuildConfiguration section */ 397 | 398 | /* Begin XCConfigurationList section */ 399 | 62C3EF5716795CCC00F416E6 /* Build configuration list for PBXProject "TestPlayWithFFMPEGAndSDL" */ = { 400 | isa = XCConfigurationList; 401 | buildConfigurations = ( 402 | 62C3EF7916795CCD00F416E6 /* Debug */, 403 | 62C3EF7A16795CCD00F416E6 /* Release */, 404 | ); 405 | defaultConfigurationIsVisible = 0; 406 | defaultConfigurationName = Release; 407 | }; 408 | 62C3EF7B16795CCD00F416E6 /* Build configuration list for PBXNativeTarget "TestPlayWithFFMPEGAndSDL" */ = { 409 | isa = XCConfigurationList; 410 | buildConfigurations = ( 411 | 62C3EF7C16795CCD00F416E6 /* Debug */, 412 | 62C3EF7D16795CCD00F416E6 /* Release */, 413 | ); 414 | defaultConfigurationIsVisible = 0; 415 | defaultConfigurationName = Release; 416 | }; 417 | /* End XCConfigurationList section */ 418 | }; 419 | rootObject = 62C3EF5416795CCC00F416E6 /* Project object */; 420 | } 421 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/FSFFPLAYVideoPlayAppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // FSVideoPlayAppDelegate.h 3 | // TestPlayWithFFMPEGAndSDL 4 | // 5 | // Created by on 12-12-13. 6 | // Copyright (c) 2012年 __MyCompanyName__. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | //@class FSVideoPlayViewController; 12 | @class FSFFPLAYViewController; 13 | 14 | @interface FSFFPLAYVideoPlayAppDelegate : UIResponder { 15 | 16 | } 17 | 18 | @property (retain, nonatomic) UIWindow *window; 19 | 20 | //@property (strong, nonatomic) FSVideoPlayViewController *viewController; 21 | @property (retain, nonatomic) FSFFPLAYViewController *fsFFPLAYViewController; 22 | 23 | @end 24 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/FSFFPLAYVideoPlayAppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // FSVideoPlayAppDelegate.m 3 | // TestPlayWithFFMPEGAndSDL 4 | // 5 | // Created by on 12-12-13. 6 | // Copyright (c) 2012年 __MyCompanyName__. All rights reserved. 7 | // 8 | 9 | #import "FSFFPLAYVideoPlayAppDelegate.h" 10 | 11 | //#import "FSVideoPlayViewController.h" 12 | #import "FSFFPLAYViewController.h" 13 | 14 | @implementation FSFFPLAYVideoPlayAppDelegate 15 | 16 | @synthesize window = _window; 17 | //@synthesize viewController = _viewController; 18 | @synthesize fsFFPLAYViewController; 19 | - (void)dealloc 20 | { 21 | [_window release]; 22 | // [_viewController release]; 23 | [fsFFPLAYViewController release]; 24 | [super dealloc]; 25 | } 26 | 27 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 28 | { 29 | self.window = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease]; 30 | // Override point for customization after application launch. 31 | // self.viewController = [[[FSVideoPlayViewController alloc] initWithNibName:@"FSVideoPlayViewController" bundle:nil] autorelease]; 32 | self.fsFFPLAYViewController = [[[FSFFPLAYViewController alloc] initWithNibName:@"FSFFPLAYViewController" bundle:nil] autorelease]; 33 | // self.window.rootViewController = self.viewController; 34 | self.window.rootViewController = self.fsFFPLAYViewController; 35 | 36 | [self.window makeKeyAndVisible]; 37 | return YES; 38 | } 39 | 40 | - (void)applicationWillResignActive:(UIApplication *)application 41 | { 42 | /* 43 | Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 44 | Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 45 | */ 46 | } 47 | 48 | - (void)applicationDidEnterBackground:(UIApplication *)application 49 | { 50 | /* 51 | Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 52 | If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 53 | */ 54 | } 55 | 56 | - (void)applicationWillEnterForeground:(UIApplication *)application 57 | { 58 | /* 59 | Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 60 | */ 61 | } 62 | 63 | - (void)applicationDidBecomeActive:(UIApplication *)application 64 | { 65 | /* 66 | Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 67 | */ 68 | } 69 | 70 | - (void)applicationWillTerminate:(UIApplication *)application 71 | { 72 | /* 73 | Called when the application is about to terminate. 74 | Save data if appropriate. 75 | See also applicationDidEnterBackground:. 76 | */ 77 | } 78 | 79 | @end 80 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/FSFFPLAYViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // FSFFPLAYViewController.h 3 | // TestPlayWithFFMPEGAndSDL 4 | // 参考ffplay.c文件改写 5 | // Created by on 12-12-14. 6 | // Copyright (c) 2012年 __MyCompanyName__. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #include "config.h" 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include "libavutil/avstring.h" 17 | #include "libavutil/colorspace.h" 18 | #include "libavutil/mathematics.h" 19 | #include "libavutil/pixdesc.h" 20 | #include "libavutil/imgutils.h" 21 | #include "libavutil/dict.h" 22 | #include "libavutil/parseutils.h" 23 | #include "libavutil/samplefmt.h" 24 | #include "libavutil/avassert.h" 25 | #include "libavutil/time.h" 26 | #include "libavformat/avformat.h" 27 | #include "libavdevice/avdevice.h" 28 | #include "libswscale/swscale.h" 29 | #include "libavutil/opt.h" 30 | #include "libavcodec/avfft.h" 31 | #include "libswresample/swresample.h" 32 | 33 | #include 34 | #include 35 | 36 | #include "cmdutils.h" 37 | 38 | #include 39 | 40 | #import "KxMovieGLView.h" 41 | #import "KxMovieDecoder.h" 42 | 43 | #import 44 | 45 | 46 | #define MAX_QUEUE_SIZE (15 * 1024 * 1024) 47 | #define MIN_FRAMES 5 48 | 49 | /* SDL audio buffer size, in samples. Should be small to have precise 50 | A/V sync as SDL does not have hardware buffer fullness info. */ 51 | #define SDL_AUDIO_BUFFER_SIZE 1024 52 | 53 | /* no AV sync correction is done if below the AV sync threshold */ 54 | #define AV_SYNC_THRESHOLD 0.01 55 | /* no AV correction is done if too big error */ 56 | #define AV_NOSYNC_THRESHOLD 10.0 57 | 58 | /* maximum audio speed change to get correct sync */ 59 | #define SAMPLE_CORRECTION_PERCENT_MAX 10 60 | 61 | /* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */ 62 | #define AUDIO_DIFF_AVG_NB 20 63 | 64 | /* NOTE: the size must be big enough to compensate the hardware audio buffersize size */ 65 | /* TODO: We assume that a decoded and resampled frame fits into this buffer */ 66 | #define SAMPLE_ARRAY_SIZE (8 * 65536) 67 | 68 | typedef struct PacketQueue { 69 | AVPacketList *first_pkt, *last_pkt; 70 | int nb_packets; 71 | int size; 72 | int abort_request; 73 | SDL_mutex *mutex; 74 | SDL_cond *cond; 75 | } PacketQueue; 76 | 77 | #define VIDEO_PICTURE_QUEUE_SIZE 4 78 | #define SUBPICTURE_QUEUE_SIZE 4 79 | 80 | typedef struct VideoPicture { 81 | double pts; ///< presentation time stamp for this picture 82 | int64_t pos; ///< byte position in file 83 | int skip; 84 | SDL_Overlay *bmp; 85 | int width, height; /* source height & width */ 86 | AVRational sample_aspect_ratio; 87 | int allocated; 88 | int reallocate; 89 | 90 | #if CONFIG_AVFILTER 91 | AVFilterBufferRef *picref; 92 | #endif 93 | } VideoPicture; 94 | 95 | typedef struct SubPicture { 96 | double pts; /* presentation time stamp for this picture */ 97 | AVSubtitle sub; 98 | } SubPicture; 99 | 100 | typedef struct AudioParams { 101 | int freq; 102 | int channels; 103 | int channel_layout; 104 | enum AVSampleFormat fmt; 105 | } AudioParams; 106 | 107 | enum { 108 | AV_SYNC_AUDIO_MASTER, /* default choice */ 109 | AV_SYNC_VIDEO_MASTER, 110 | AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */ 111 | }; 112 | 113 | typedef struct VideoState { 114 | SDL_Thread *read_tid; 115 | SDL_Thread *video_tid; 116 | SDL_Thread *refresh_tid; 117 | AVInputFormat *iformat; 118 | int no_background; 119 | int abort_request; 120 | int force_refresh; 121 | int paused; 122 | int last_paused; 123 | int que_attachments_req; 124 | int seek_req; 125 | int seek_flags; 126 | int64_t seek_pos; 127 | int64_t seek_rel; 128 | int read_pause_return; 129 | AVFormatContext *ic; 130 | 131 | int audio_stream; 132 | 133 | int av_sync_type; 134 | double external_clock; /* external clock base */ 135 | int64_t external_clock_time; 136 | 137 | double audio_clock; 138 | double audio_diff_cum; /* used for AV difference average computation */ 139 | double audio_diff_avg_coef; 140 | double audio_diff_threshold; 141 | int audio_diff_avg_count; 142 | AVStream *audio_st; 143 | PacketQueue audioq; 144 | int audio_hw_buf_size; 145 | DECLARE_ALIGNED(16,uint8_t,audio_buf2)[AVCODEC_MAX_AUDIO_FRAME_SIZE * 4]; 146 | uint8_t silence_buf[SDL_AUDIO_BUFFER_SIZE]; 147 | uint8_t *audio_buf; 148 | uint8_t *audio_buf1; 149 | unsigned int audio_buf_size; /* in bytes */ 150 | int audio_buf_index; /* in bytes */ 151 | int audio_write_buf_size; 152 | AVPacket audio_pkt_temp; 153 | AVPacket audio_pkt; 154 | struct AudioParams audio_src; 155 | struct AudioParams audio_tgt; 156 | struct SwrContext *swr_ctx; 157 | double audio_current_pts; 158 | double audio_current_pts_drift; 159 | int frame_drops_early; 160 | int frame_drops_late; 161 | AVFrame *frame; 162 | AVFrame *videoFrame;//视频frame 163 | 164 | enum ShowMode { 165 | SHOW_MODE_NONE = -1, SHOW_MODE_VIDEO = 0, SHOW_MODE_WAVES, SHOW_MODE_RDFT, SHOW_MODE_NB 166 | } show_mode; 167 | int16_t sample_array[SAMPLE_ARRAY_SIZE]; 168 | int sample_array_index; 169 | int last_i_start; 170 | RDFTContext *rdft; 171 | int rdft_bits; 172 | FFTSample *rdft_data; 173 | int xpos; 174 | 175 | SDL_Thread *subtitle_tid; 176 | int subtitle_stream; 177 | int subtitle_stream_changed; 178 | AVStream *subtitle_st; 179 | PacketQueue subtitleq; 180 | SubPicture subpq[SUBPICTURE_QUEUE_SIZE]; 181 | int subpq_size, subpq_rindex, subpq_windex; 182 | SDL_mutex *subpq_mutex; 183 | SDL_cond *subpq_cond; 184 | 185 | double frame_timer; 186 | double frame_last_pts; 187 | double frame_last_duration; 188 | double frame_last_dropped_pts; 189 | double frame_last_returned_time; 190 | double frame_last_filter_delay; 191 | int64_t frame_last_dropped_pos; 192 | double video_clock; ///< pts of last decoded frame / predicted pts of next decoded frame 193 | int video_stream; 194 | AVStream *video_st; 195 | PacketQueue videoq; 196 | double video_current_pts; ///< current displayed pts (different from video_clock if frame fifos are used) 197 | double video_current_pts_drift; ///< video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts 198 | int64_t video_current_pos; ///< current displayed file pos 199 | VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE]; 200 | int pictq_size, pictq_rindex, pictq_windex; 201 | SDL_mutex *pictq_mutex; 202 | SDL_cond *pictq_cond; 203 | #if !CONFIG_AVFILTER 204 | struct SwsContext *img_convert_ctx; 205 | #endif 206 | 207 | char filename[1024]; 208 | int width, height, xleft, ytop; 209 | int step; 210 | 211 | #if CONFIG_AVFILTER 212 | AVFilterContext *in_video_filter; ///< the first filter in the video chain 213 | AVFilterContext *out_video_filter; ///< the last filter in the video chain 214 | int use_dr1; 215 | FrameBuffer *buffer_pool; 216 | #endif 217 | 218 | int refresh; 219 | int last_video_stream, last_audio_stream, last_subtitle_stream; 220 | 221 | SDL_cond *continue_read_thread; 222 | } VideoState; 223 | 224 | //播放错误信息提示 225 | typedef enum { 226 | VideoPlayErrorTypeInput, 227 | VideoPlayErrorTypeSDLError, 228 | VideoPlayErrorTypeInitError, 229 | VideoPlayErrorTypeHDError 230 | } VideoPlayErrorType; 231 | 232 | //播放状态 233 | typedef enum { 234 | VideoPlayStatePlaying = 10000, 235 | VideoPlayStatePause, 236 | VideoPlayStateStop 237 | } VideoPlayState; 238 | 239 | //高清指标 240 | #define HDWIDTHHEIGHTTOTAL 600000 // This is roughly between 480p and 720p 241 | 242 | //播放器 243 | @interface FSFFPLAYViewController : UIViewController { 244 | //显示视频的view 245 | KxMovieGLView *_glView; 246 | 247 | IBOutlet UIView *showVideoView; 248 | IBOutlet UIView *controlView; 249 | 250 | VideoPlayState videoPlayState; 251 | } 252 | 253 | 254 | @property (nonatomic, assign) VideoPlayState videoPlayState; 255 | 256 | //开始播放 257 | - (void)startPlayWithURLString:(NSString *)playURLString; 258 | 259 | - (UIView *) frameView; 260 | 261 | - (void)showVideo; 262 | 263 | //播放控制 264 | - (void)start; 265 | 266 | - (void)pause; 267 | 268 | - (void)stop; 269 | 270 | //停止播放原因 271 | - (void)stopWithError:(VideoPlayErrorType)errotType andError:(NSError *)error; 272 | 273 | - (void)seekWithTime:(int)time; 274 | 275 | //控件操作 276 | - (IBAction)playAction:(id)sender; 277 | 278 | - (IBAction)pausePlayAction:(id)sender; 279 | 280 | - (IBAction)stopPlayAction:(id)sender; 281 | 282 | #pragma mark - 283 | #pragma mark 284 | 285 | void av_noreturn exit_program(int ret); 286 | static int packet_queue_put_private(PacketQueue *q, AVPacket *pkt); 287 | static int packet_queue_put(PacketQueue *q, AVPacket *pkt); 288 | static void packet_queue_init(PacketQueue *q); 289 | static void packet_queue_flush(PacketQueue *q); 290 | static void packet_queue_destroy(PacketQueue *q); 291 | static void packet_queue_abort(PacketQueue *q); 292 | static void packet_queue_start(PacketQueue *q); 293 | static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block); 294 | 295 | static void free_subpicture(SubPicture *sp); 296 | 297 | static void stream_close(VideoState *is); 298 | static void do_exit(VideoState *is); 299 | static void sigterm_handler(int sig); 300 | static void video_display(VideoState *is); 301 | static int refresh_thread(void *opaque); 302 | static double get_audio_clock(VideoState *is); 303 | static double get_video_clock(VideoState *is); 304 | static double get_external_clock(VideoState *is); 305 | static double get_master_clock(VideoState *is); 306 | static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes); 307 | static void stream_toggle_pause(VideoState *is); 308 | static double compute_target_delay(double delay, VideoState *is); 309 | static void pictq_next_picture(VideoState *is); 310 | static void pictq_prev_picture(VideoState *is); 311 | static void update_video_pts(VideoState *is, double pts, int64_t pos); 312 | static void video_refresh(void *opaque); 313 | static void alloc_picture(VideoState *is); 314 | static int queue_picture(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos); 315 | static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt); 316 | static int video_thread(void *arg); 317 | static int subtitle_thread(void *arg); 318 | static void update_sample_display(VideoState *is, short *samples, int samples_size); 319 | static int synchronize_audio(VideoState *is, int nb_samples); 320 | static int audio_decode_frame(VideoState *is, double *pts_ptr); 321 | static void sdl_audio_callback(void *opaque, Uint8 *stream, int len); 322 | static int audio_open(void *opaque, int64_t wanted_channel_layout, int wanted_nb_channels, int wanted_sample_rate, struct AudioParams *audio_hw_params); 323 | static int stream_component_open(VideoState *is, int stream_index); 324 | static void stream_component_close(VideoState *is, int stream_index); 325 | static int decode_interrupt_cb(void *ctx); 326 | static int read_thread(void *arg) 327 | ; 328 | static VideoState *stream_open(const char *filename, AVInputFormat *iformat); 329 | static void stream_cycle_channel(VideoState *is, int codec_type); 330 | //static void toggle_full_screen(VideoState *is); 331 | static void toggle_pause(VideoState *is); 332 | static void step_to_next_frame(VideoState *is); 333 | static void toggle_audio_display(VideoState *is); 334 | //static void event_loop(VideoState *cur_stream); 335 | static int opt_frame_size(void *optctx, const char *opt, const char *arg); 336 | static int opt_width(void *optctx, const char *opt, const char *arg); 337 | static int opt_height(void *optctx, const char *opt, const char *arg); 338 | static int opt_format(void *optctx, const char *opt, const char *arg); 339 | static int opt_frame_pix_fmt(void *optctx, const char *opt, const char *arg); 340 | static int opt_sync(void *optctx, const char *opt, const char *arg); 341 | static int opt_seek(void *optctx, const char *opt, const char *arg); 342 | static int opt_duration(void *optctx, const char *opt, const char *arg); 343 | static int opt_show_mode(void *optctx, const char *opt, const char *arg); 344 | static void opt_input_file(void *optctx, const char *filename); 345 | static int opt_codec(void *o, const char *opt, const char *arg); 346 | static void show_usage(void); 347 | void show_help_default(const char *opt, const char *arg); 348 | static int lockmgr(void **mtx, enum AVLockOp op); 349 | 350 | @end 351 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/FSFFPLAYViewController.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 1552 5 | 12D78 6 | 3084 7 | 1187.37 8 | 626.00 9 | 10 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 11 | 2083 12 | 13 | 14 | IBProxyObject 15 | IBUIButton 16 | IBUIView 17 | 18 | 19 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 20 | 21 | 22 | PluginDependencyRecalculationVersion 23 | 24 | 25 | 26 | 27 | IBFilesOwner 28 | IBCocoaTouchFramework 29 | 30 | 31 | IBFirstResponder 32 | IBCocoaTouchFramework 33 | 34 | 35 | 36 | 274 37 | 38 | 39 | 40 | 274 41 | {1024, 748} 42 | 43 | 44 | 45 | _NS:9 46 | 47 | 3 48 | MCAwAA 49 | 50 | IBCocoaTouchFramework 51 | 52 | 53 | 54 | 274 55 | 56 | 57 | 58 | 269 59 | {{115, 685}, {73, 44}} 60 | 61 | 62 | 63 | _NS:9 64 | NO 65 | IBCocoaTouchFramework 66 | 0 67 | 0 68 | 1 69 | Pause 70 | 71 | 3 72 | MQA 73 | 74 | 75 | 1 76 | MC4xOTYwNzg0MzQ2IDAuMzA5ODAzOTMyOSAwLjUyMTU2ODY1NgA 77 | 78 | 79 | 3 80 | MC41AA 81 | 82 | 83 | 2 84 | 15 85 | 86 | 87 | Helvetica-Bold 88 | 15 89 | 16 90 | 91 | 92 | 93 | 94 | 269 95 | {{835, 685}, {73, 44}} 96 | 97 | 98 | _NS:9 99 | NO 100 | IBCocoaTouchFramework 101 | 0 102 | 0 103 | 1 104 | Stop 105 | 106 | 107 | 1 108 | MC4xOTYwNzg0MzQ2IDAuMzA5ODAzOTMyOSAwLjUyMTU2ODY1NgA 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 269 117 | {{486, 685}, {55, 44}} 118 | 119 | 120 | 121 | _NS:9 122 | NO 123 | IBCocoaTouchFramework 124 | 0 125 | 0 126 | 1 127 | Play 128 | 129 | 130 | 1 131 | MC4xOTYwNzg0MzQ2IDAuMzA5ODAzOTMyOSAwLjUyMTU2ODY1NgA 132 | 133 | 134 | 135 | 136 | 137 | 138 | {1024, 748} 139 | 140 | 141 | 142 | _NS:9 143 | 144 | IBCocoaTouchFramework 145 | 146 | 147 | {1024, 748} 148 | 149 | 150 | 151 | 152 | 3 153 | MAA 154 | 155 | IBCocoaTouchFramework 156 | 157 | 158 | 159 | 160 | 161 | 162 | view 163 | 164 | 165 | 166 | 3 167 | 168 | 169 | 170 | showVideoView 171 | 172 | 173 | 174 | 8 175 | 176 | 177 | 178 | controlView 179 | 180 | 181 | 182 | 9 183 | 184 | 185 | 186 | pausePlayAction: 187 | 188 | 189 | 7 190 | 191 | 11 192 | 193 | 194 | 195 | stopPlayAction: 196 | 197 | 198 | 7 199 | 200 | 13 201 | 202 | 203 | 204 | playAction: 205 | 206 | 207 | 7 208 | 209 | 15 210 | 211 | 212 | 213 | 214 | 215 | 0 216 | 217 | 218 | 219 | 220 | 221 | -1 222 | 223 | 224 | File's Owner 225 | 226 | 227 | -2 228 | 229 | 230 | 231 | 232 | 2 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 6 242 | 243 | 244 | 245 | 246 | 7 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 10 257 | 258 | 259 | 260 | 261 | 12 262 | 263 | 264 | 265 | 266 | 14 267 | 268 | 269 | 270 | 271 | 272 | 273 | FSFFPLAYViewController 274 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 275 | UIResponder 276 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 277 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 278 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 279 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 280 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 281 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 282 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 283 | 284 | 285 | 286 | 287 | 288 | 15 289 | 290 | 291 | 292 | 293 | FSFFPLAYViewController 294 | UIViewController 295 | 296 | id 297 | id 298 | id 299 | 300 | 301 | 302 | pausePlayAction: 303 | id 304 | 305 | 306 | playAction: 307 | id 308 | 309 | 310 | stopPlayAction: 311 | id 312 | 313 | 314 | 315 | UIView 316 | UIView 317 | 318 | 319 | 320 | controlView 321 | UIView 322 | 323 | 324 | showVideoView 325 | UIView 326 | 327 | 328 | 329 | IBProjectSource 330 | ./Classes/FSFFPLAYViewController.h 331 | 332 | 333 | 334 | 335 | 0 336 | IBCocoaTouchFramework 337 | YES 338 | 3 339 | 2083 340 | 341 | 342 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/FSVideoPlayViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // FSVideoPlayViewController.h 3 | // TestPlayWithFFMPEGAndSDL 4 | // 5 | // Created by on 12-12-13. 6 | // Copyright (c) 2012年 __MyCompanyName__. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | #import 12 | #import 13 | #include "SDL.h" 14 | #include 15 | #import 16 | #import 17 | 18 | #define SDL_AUDIO_BUFFER_SIZE 1024 19 | #define MAX_AUDIOQ_SIZE (5 * 16 * 1024) 20 | #define MAX_VIDEOQ_SIZE (5 * 256 * 1024) 21 | #define AV_SYNC_THRESHOLD 0.01 22 | #define AV_NOSYNC_THRESHOLD 10.0 23 | #define SAMPLE_CORRECTION_PERCENT_MAX 10 24 | #define AUDIO_DIFF_AVG_NB 20 25 | #define FF_ALLOC_EVENT (SDL_USEREVENT) 26 | #define FF_REFRESH_EVENT (SDL_USEREVENT + 1) 27 | #define FF_QUIT_EVENT (SDL_USEREVENT + 2) 28 | #define VIDEO_PICTURE_QUEUE_SIZE 1 29 | #define DEFAULT_AV_SYNC_TYPE AV_SYNC_VIDEO_MASTER 30 | 31 | typedef struct PacketQueue { 32 | AVPacketList *first_pkt, *last_pkt; 33 | int nb_packets; 34 | int size; 35 | SDL_mutex *mutex; 36 | SDL_cond *cond; 37 | } PacketQueue; 38 | typedef struct VideoPicture { 39 | SDL_Overlay *bmp; 40 | int width, height; /* source height & width */ 41 | int allocated; 42 | double pts; 43 | } VideoPicture; 44 | typedef struct VideoState { 45 | 46 | AVFormatContext *pFormatCtx; 47 | int videoStream, audioStream; 48 | 49 | int av_sync_type; 50 | double external_clock; /* external clock base */ 51 | int64_t external_clock_time; 52 | int seek_req; 53 | int seek_flags; 54 | int64_t seek_pos; 55 | double audio_clock; 56 | AVStream *audio_st; 57 | PacketQueue audioq; 58 | DECLARE_ALIGNED(16, uint8_t, audio_buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2]); 59 | unsigned int audio_buf_size; 60 | unsigned int audio_buf_index; 61 | AVPacket audio_pkt; 62 | uint8_t *audio_pkt_data; 63 | int audio_pkt_size; 64 | int audio_hw_buf_size; 65 | double audio_diff_cum; /* used for AV difference average computation */ 66 | double audio_diff_avg_coef; 67 | double audio_diff_threshold; 68 | int audio_diff_avg_count; 69 | double frame_timer; 70 | double frame_last_pts; 71 | double frame_last_delay; 72 | double video_clock; ///>>>>>>>>>>>SDL FFMPEG 22 | 23 | - (void)didReceiveMemoryWarning 24 | { 25 | [super didReceiveMemoryWarning]; 26 | // Release any cached data, images, etc that aren't in use. 27 | } 28 | 29 | #pragma mark - View lifecycle 30 | 31 | - (void)viewDidLoad 32 | { 33 | [super viewDidLoad]; 34 | // Do any additional setup after loading the view, typically from a nib. 35 | 36 | 37 | 38 | } 39 | 40 | - (void)viewDidUnload 41 | { 42 | [super viewDidUnload]; 43 | // Release any retained subviews of the main view. 44 | // e.g. self.myOutlet = nil; 45 | } 46 | 47 | - (void)viewWillAppear:(BOOL)animated 48 | { 49 | [super viewWillAppear:animated]; 50 | } 51 | 52 | - (void)viewDidAppear:(BOOL)animated 53 | { 54 | [super viewDidAppear:animated]; 55 | [self startPlayVideo]; 56 | } 57 | 58 | - (void)viewWillDisappear:(BOOL)animated 59 | { 60 | [super viewWillDisappear:animated]; 61 | } 62 | 63 | - (void)viewDidDisappear:(BOOL)animated 64 | { 65 | [super viewDidDisappear:animated]; 66 | } 67 | 68 | - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation 69 | { 70 | // Return YES for supported orientations 71 | return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown); 72 | } 73 | 74 | - (void)startPlayVideo { 75 | NSString *playUrlStr = @"udp://@192.168.1.3:8905?fifo_size=1000000&overrun_nonfatal=1&buffer_size=102400&pkt_size=102400"; 76 | // NSString *playUrlStr = @"udp://@192.168.1.3:8905"; 77 | // NSString *playUrlStr = [[NSBundle mainBundle] pathForResource:@"1" ofType:@"mp4"]; 78 | 79 | 80 | 81 | SDL_Event event; 82 | double pos; 83 | VideoState *is; 84 | NSLog(@"FUNCTION:%s LINE:%d", __FUNCTION__, __LINE__); 85 | is = av_mallocz(sizeof(VideoState)); 86 | 87 | // if(argc < 2) { 88 | // fprintf(stderr, "Usage: test \n"); 89 | // exit(1); 90 | // } 91 | // Register all formats and codecs 92 | avformat_network_init(); 93 | av_register_all(); 94 | avcodec_register_all(); 95 | 96 | if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { 97 | fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); 98 | exit(1); 99 | } 100 | NSLog(@"FUNCTION:%s LINE:%d", __FUNCTION__, __LINE__); 101 | // Make a screen to put our video 102 | #ifndef __DARWIN__ 103 | screen = SDL_SetVideoMode(640, 480, 0, 0); 104 | #else 105 | screen = SDL_SetVideoMode(640, 480, 0, 0); 106 | #endif 107 | if(!screen) { 108 | fprintf(stderr, "SDL: could not set video mode - exiting\n"); 109 | exit(1); 110 | } 111 | 112 | // pstrcpy(is->filename, sizeof(is->filename), argv[1]); 113 | av_strlcpy(is->filename, [playUrlStr UTF8String], sizeof(is->filename)); 114 | 115 | NSLog(@"FUNCTION:%s LINE:%d", __FUNCTION__, __LINE__); 116 | is->pictq_mutex = SDL_CreateMutex(); 117 | is->pictq_cond = SDL_CreateCond(); 118 | 119 | schedule_refresh(is, 40); 120 | NSLog(@"FUNCTION:%s LINE:%d", __FUNCTION__, __LINE__); 121 | is->av_sync_type = DEFAULT_AV_SYNC_TYPE; 122 | NSLog(@"FUNCTION:%s LINE:%d", __FUNCTION__, __LINE__); 123 | is->parse_tid = SDL_CreateThread(decode_thread, is->filename, is); 124 | NSLog(@"FUNCTION:%s LINE:%d", __FUNCTION__, __LINE__); 125 | if(!is->parse_tid) { 126 | av_free(is); 127 | return; 128 | } 129 | 130 | av_init_packet(&flush_pkt); 131 | flush_pkt.data = (uint8_t *)"FLUSH"; 132 | 133 | 134 | for(;;) { 135 | double incr, pos; 136 | 137 | SDL_WaitEvent(&event); 138 | switch(event.type) { 139 | case SDL_KEYDOWN: 140 | switch(event.key.keysym.sym) { 141 | case SDLK_LEFT: 142 | incr = -10.0; 143 | goto do_seek; 144 | case SDLK_RIGHT: 145 | incr = 10.0; 146 | goto do_seek; 147 | case SDLK_UP: 148 | incr = 60.0; 149 | goto do_seek; 150 | case SDLK_DOWN: 151 | incr = -60.0; 152 | goto do_seek; 153 | do_seek: 154 | if(global_video_state) { 155 | pos = get_master_clock(global_video_state); 156 | pos += incr; 157 | stream_seek(global_video_state, (int64_t)(pos * AV_TIME_BASE), incr); 158 | } 159 | break; 160 | default: 161 | break; 162 | } 163 | break; 164 | case FF_QUIT_EVENT: 165 | case SDL_QUIT: 166 | is->quit = 1; 167 | SDL_Quit(); 168 | exit(0); 169 | break; 170 | case FF_ALLOC_EVENT: 171 | alloc_picture(event.user.data1); 172 | break; 173 | case FF_REFRESH_EVENT: 174 | video_refresh_timer(event.user.data1); 175 | break; 176 | default: 177 | break; 178 | } 179 | } 180 | 181 | NSLog(@"FUNCTION:%s LINE:%d", __FUNCTION__, __LINE__); 182 | } 183 | 184 | #pragma mark - 185 | #pragma mark SDL FFMPEG 186 | //SDL_Surface *screen; 187 | // 188 | ///* Since we only have one decoding thread, the Big Struct 189 | // can be global in case we need it. */ 190 | //VideoState *global_video_state; 191 | //AVPacket flush_pkt; 192 | //首先,我们应当指出 nb_packets 是与 size 不一样的--size 表示我们从 packet->size 中得到的字节数。你会注意到我们有一 个互斥量 mutex 和一个条 件变量 cond 在结构体里面。这是因为 SDL 是在一个独立的线程中来进行音频处 理的。如果我们没有正确的锁定这个队列,我们有 可能把数据搞乱。我们将来 看一个这个队列是如何来运行的。每一个程序员应当知道如何来生成的一个队 列,但是我们将把这 部分也来讨论从而可以学习到 SDL 的函数。 一开始我们先创建一个函数来初始化队列 193 | void packet_queue_init(PacketQueue *q) { 194 | memset(q, 0, sizeof(PacketQueue)); 195 | q->mutex = SDL_CreateMutex();//互斥量 mutex 196 | q->cond = SDL_CreateCond();//条 件变量 cond 197 | } 198 | //接着我们再做一个函数来给队列中填入东西 199 | int packet_queue_put(PacketQueue *q, AVPacket *pkt) { 200 | AVPacketList *pkt1; 201 | if(pkt != &flush_pkt && av_dup_packet(pkt) < 0) { 202 | return -1; 203 | } 204 | pkt1 = av_malloc(sizeof(AVPacketList)); 205 | if (!pkt1) 206 | return -1; 207 | pkt1->pkt = *pkt; 208 | pkt1->next = NULL; 209 | 210 | SDL_LockMutex(q->mutex);//函数 SDL_LockMutex()锁定队列的互斥量以便于我们向队列中添加东西 211 | 212 | if (!q->last_pkt) 213 | q->first_pkt = pkt1; 214 | else 215 | q->last_pkt->next = pkt1; 216 | q->last_pkt = pkt1; 217 | q->nb_packets++; 218 | q->size += pkt1->pkt.size; 219 | SDL_CondSignal(q->cond);//然后函 数 SDL_CondSignal()通过我们的条件变量为一个接 收函数(如果它在等待)发 出一个信号来告诉它现在已经有数据了,接着就会解锁互斥量并让队列可以自由 访问。 220 | SDL_UnlockMutex(q->mutex); 221 | return 0; 222 | } 223 | //下面是相应的接收函数。注意函数 SDL_CondWait()是如何按照我们的要求让函 数阻塞 block 的(例如一直等到队列中有数据 224 | //正如你所看到的,我们已经用一个无限循环包装了这个函数以便于我们想用阻塞 的方式来得到数据。我们通过使用 SDL 中的函数 SDL_CondWait()来 避免无限循 环。基本上,所有的 CondWait 只等待从 SDL_CondSignal()函数(或者 SDL_CondBroadcast()函数)中发出 的信号,然后再继续执行。然而,虽然看起 来我们陷入了我们的互斥体中--如果我们一直保持着这个锁,我们的函数将永 远无法把数据放入到队列中去!但 是,SDL_CondWait()函数也为我们做了解锁 互斥量的动作然后才尝试着在得到信号后去重新锁定它。 225 | static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block) { 226 | AVPacketList *pkt1; 227 | int ret; 228 | 229 | SDL_LockMutex(q->mutex); 230 | 231 | for(;;) { 232 | 233 | if(global_video_state->quit) { 234 | ret = -1; 235 | break; 236 | } 237 | 238 | pkt1 = q->first_pkt; 239 | if (pkt1) { 240 | q->first_pkt = pkt1->next; 241 | if (!q->first_pkt) 242 | q->last_pkt = NULL; 243 | q->nb_packets--; 244 | q->size -= pkt1->pkt.size; 245 | *pkt = pkt1->pkt; 246 | av_free(pkt1); 247 | ret = 1; 248 | break; 249 | } else if (!block) { 250 | ret = 0; 251 | break; 252 | } else { 253 | SDL_CondWait(q->cond, q->mutex); 254 | } 255 | } 256 | SDL_UnlockMutex(q->mutex); 257 | return ret; 258 | } 259 | static void packet_queue_flush(PacketQueue *q) { 260 | AVPacketList *pkt, *pkt1; 261 | 262 | SDL_LockMutex(q->mutex); 263 | for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) { 264 | pkt1 = pkt->next; 265 | av_free_packet(&pkt->pkt); 266 | av_freep(&pkt); 267 | } 268 | q->last_pkt = NULL; 269 | q->first_pkt = NULL; 270 | q->nb_packets = 0; 271 | q->size = 0; 272 | SDL_UnlockMutex(q->mutex); 273 | } 274 | double get_audio_clock(VideoState *is) { 275 | double pts; 276 | int hw_buf_size, bytes_per_sec, n; 277 | 278 | pts = is->audio_clock; /* maintained in the audio thread */ 279 | hw_buf_size = is->audio_buf_size - is->audio_buf_index; 280 | bytes_per_sec = 0; 281 | n = is->audio_st->codec->channels * 2; 282 | if(is->audio_st) { 283 | bytes_per_sec = is->audio_st->codec->sample_rate * n; 284 | } 285 | if(bytes_per_sec) { 286 | pts -= (double)hw_buf_size / bytes_per_sec; 287 | } 288 | return pts; 289 | } 290 | double get_video_clock(VideoState *is) { 291 | double delta; 292 | 293 | delta = (av_gettime() - is->video_current_pts_time) / 1000000.0; 294 | return is->video_current_pts + delta; 295 | } 296 | double get_external_clock(VideoState *is) { 297 | return av_gettime() / 1000000.0; 298 | } 299 | double get_master_clock(VideoState *is) { 300 | if(is->av_sync_type == AV_SYNC_VIDEO_MASTER) { 301 | return get_video_clock(is); 302 | } else if(is->av_sync_type == AV_SYNC_AUDIO_MASTER) { 303 | return get_audio_clock(is); 304 | } else { 305 | return get_external_clock(is); 306 | } 307 | } 308 | /* Add or subtract samples to get a better sync, return new 309 | audio buffer size */ 310 | int synchronize_audio(VideoState *is, short *samples, 311 | int samples_size, double pts) { 312 | int n; 313 | double ref_clock; 314 | 315 | n = 2 * is->audio_st->codec->channels; 316 | 317 | if(is->av_sync_type != AV_SYNC_AUDIO_MASTER) { 318 | double diff, avg_diff; 319 | int wanted_size, min_size, max_size, nb_samples; 320 | 321 | ref_clock = get_master_clock(is); 322 | diff = get_audio_clock(is) - ref_clock; 323 | if(diff < AV_NOSYNC_THRESHOLD) { 324 | // accumulate the diffs 325 | is->audio_diff_cum = diff + is->audio_diff_avg_coef 326 | * is->audio_diff_cum; 327 | if(is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) { 328 | is->audio_diff_avg_count++; 329 | } else { 330 | avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef); 331 | if(fabs(avg_diff) >= is->audio_diff_threshold) { 332 | wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n); 333 | min_size = samples_size * ((100 - SAMPLE_CORRECTION_PERCENT_MAX) / 100); 334 | max_size = samples_size * ((100 + SAMPLE_CORRECTION_PERCENT_MAX) / 100); 335 | if(wanted_size < min_size) { 336 | wanted_size = min_size; 337 | } else if (wanted_size > max_size) { 338 | wanted_size = max_size; 339 | } 340 | if(wanted_size < samples_size) { 341 | /* remove samples */ 342 | samples_size = wanted_size; 343 | } else if(wanted_size > samples_size) { 344 | uint8_t *samples_end, *q; 345 | int nb; 346 | /* add samples by copying final sample*/ 347 | nb = (samples_size - wanted_size); 348 | samples_end = (uint8_t *)samples + samples_size - n; 349 | q = samples_end + n; 350 | while(nb > 0) { 351 | memcpy(q, samples_end, n); 352 | q += n; 353 | nb -= n; 354 | } 355 | samples_size = wanted_size; 356 | } 357 | } 358 | } 359 | } else { 360 | /* difference is TOO big; reset diff stuff */ 361 | is->audio_diff_avg_count = 0; 362 | is->audio_diff_cum = 0; 363 | } 364 | } 365 | return samples_size; 366 | } 367 | int audio_decode_frame(VideoState *is, uint8_t *audio_buf, int buf_size, double *pts_ptr) { 368 | int len1, data_size, n; 369 | AVPacket *pkt = &is->audio_pkt; 370 | double pts; 371 | 372 | for(;;) { 373 | while(is->audio_pkt_size > 0) { 374 | data_size = buf_size; 375 | // len1 = avcodec_decode_audio2(is->audio_st->codec, 376 | // (int16_t *)audio_buf, &data_size, 377 | // is->audio_pkt_data, is->audio_pkt_size); 378 | len1 = avcodec_decode_audio3(is->audio_st->codec, (int16_t *)audio_buf, &data_size, &is->audio_pkt); 379 | is->audio_pkt_size = is->audio_pkt.size; 380 | 381 | if(len1 < 0) { 382 | /* if error, skip frame */ 383 | is->audio_pkt_size = 0; 384 | break; 385 | } 386 | is->audio_pkt_data += len1; 387 | is->audio_pkt_size -= len1; 388 | if(data_size <= 0) { 389 | /* No data yet, get more frames */ 390 | continue; 391 | } 392 | pts = is->audio_clock; 393 | *pts_ptr = pts; 394 | n = 2 * is->audio_st->codec->channels; 395 | is->audio_clock += (double)data_size / 396 | (double)(n * is->audio_st->codec->sample_rate); 397 | 398 | /* We have data, return it and come back for more later */ 399 | return data_size; 400 | } 401 | if(pkt->data) 402 | av_free_packet(pkt); 403 | 404 | if(is->quit) { 405 | return -1; 406 | } 407 | /* next packet */ 408 | if(packet_queue_get(&is->audioq, pkt, 1) < 0) { 409 | return -1; 410 | } 411 | if(pkt->data == flush_pkt.data) { 412 | avcodec_flush_buffers(is->audio_st->codec); 413 | continue; 414 | } 415 | is->audio_pkt_data = pkt->data; 416 | is->audio_pkt_size = pkt->size; 417 | /* if update, update the audio clock w/pts */ 418 | if(pkt->pts != AV_NOPTS_VALUE) { 419 | is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts; 420 | } 421 | } 422 | } 423 | 424 | void audio_callback(void *userdata, Uint8 *stream, int len) { 425 | VideoState *is = (VideoState *)userdata; 426 | int len1, audio_size; 427 | double pts; 428 | 429 | while(len > 0) { 430 | if(is->audio_buf_index >= is->audio_buf_size) { 431 | /* We have already sent all our data; get more */ 432 | audio_size = audio_decode_frame(is, is->audio_buf, sizeof(is->audio_buf), &pts); 433 | if(audio_size < 0) { 434 | /* If error, output silence */ 435 | is->audio_buf_size = 1024; 436 | memset(is->audio_buf, 0, is->audio_buf_size); 437 | } else { 438 | audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, 439 | audio_size, pts); 440 | is->audio_buf_size = audio_size; 441 | } 442 | is->audio_buf_index = 0; 443 | } 444 | len1 = is->audio_buf_size - is->audio_buf_index; 445 | if(len1 > len) 446 | len1 = len; 447 | memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1); 448 | len -= len1; 449 | stream += len1; 450 | is->audio_buf_index += len1; 451 | } 452 | } 453 | 454 | static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque) { 455 | SDL_Event event; 456 | event.type = FF_REFRESH_EVENT; 457 | event.user.data1 = opaque; 458 | SDL_PushEvent(&event); 459 | return 0; /* 0 means stop timer */ 460 | } 461 | /* schedule a video refresh in 'delay' ms */ 462 | static void schedule_refresh(VideoState *is, int delay) { 463 | SDL_AddTimer(delay, sdl_refresh_timer_cb, is); 464 | } 465 | void video_display(VideoState *is) { 466 | SDL_Rect rect; 467 | VideoPicture *vp; 468 | AVPicture pict; 469 | float aspect_ratio; 470 | int w, h, x, y; 471 | int i; 472 | 473 | vp = &is->pictq[is->pictq_rindex]; 474 | if(vp->bmp) { 475 | if(is->video_st->codec->sample_aspect_ratio.num == 0) { 476 | aspect_ratio = 0; 477 | } else { 478 | aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio) * 479 | is->video_st->codec->width / is->video_st->codec->height; 480 | } 481 | if(aspect_ratio <= 0.0) { 482 | aspect_ratio = (float)is->video_st->codec->width / 483 | (float)is->video_st->codec->height; 484 | } 485 | // apparently this assumption is bad 486 | h = screen->h; 487 | w = ((int)rint(h * aspect_ratio)) & -3; 488 | if(w > screen->w) { 489 | w = screen->w; 490 | h = ((int)rint(w / aspect_ratio)) & -3; 491 | } 492 | x = (screen->w - w) / 2; 493 | y = (screen->h - h) / 2; 494 | rect.x = x; 495 | rect.y = y; 496 | rect.w = w; 497 | rect.h = h; 498 | SDL_DisplayYUVOverlay(vp->bmp, &rect); 499 | } 500 | } 501 | 502 | void video_refresh_timer(void *userdata) { 503 | 504 | VideoState *is = (VideoState *)userdata; 505 | VideoPicture *vp; 506 | double actual_delay, delay, sync_threshold, ref_clock, diff; 507 | 508 | if(is->video_st) { 509 | if(is->pictq_size == 0) { 510 | schedule_refresh(is, 1); 511 | } else { 512 | vp = &is->pictq[is->pictq_rindex]; 513 | 514 | is->video_current_pts = vp->pts; 515 | is->video_current_pts_time = av_gettime(); 516 | 517 | delay = vp->pts - is->frame_last_pts; /* the pts from last time */ 518 | if(delay <= 0 || delay >= 1.0) { 519 | /* if incorrect delay, use previous one */ 520 | delay = is->frame_last_delay; 521 | } 522 | /* save for next time */ 523 | is->frame_last_delay = delay; 524 | is->frame_last_pts = vp->pts; 525 | 526 | /* update delay to sync to audio if not master source */ 527 | if(is->av_sync_type != AV_SYNC_VIDEO_MASTER) { 528 | ref_clock = get_master_clock(is); 529 | diff = vp->pts - ref_clock; 530 | 531 | /* Skip or repeat the frame. Take delay into account 532 | FFPlay still doesn't "know if this is the best guess." */ 533 | sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD; 534 | if(fabs(diff) < AV_NOSYNC_THRESHOLD) { 535 | if(diff <= -sync_threshold) { 536 | delay = 0; 537 | } else if(diff >= sync_threshold) { 538 | delay = 2 * delay; 539 | } 540 | } 541 | } 542 | 543 | is->frame_timer += delay; 544 | /* computer the REAL delay */ 545 | actual_delay = is->frame_timer - (av_gettime() / 1000000.0); 546 | if(actual_delay < 0.010) { 547 | /* Really it should skip the picture instead */ 548 | actual_delay = 0.010; 549 | } 550 | schedule_refresh(is, (int)(actual_delay * 1000 + 0.5)); 551 | 552 | /* show the picture! */ 553 | video_display(is); 554 | 555 | /* update queue for next picture! */ 556 | if(++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) { 557 | is->pictq_rindex = 0; 558 | } 559 | SDL_LockMutex(is->pictq_mutex); 560 | is->pictq_size--; 561 | SDL_CondSignal(is->pictq_cond); 562 | SDL_UnlockMutex(is->pictq_mutex); 563 | } 564 | } else { 565 | schedule_refresh(is, 100); 566 | } 567 | } 568 | 569 | void alloc_picture(void *userdata) { 570 | 571 | VideoState *is = (VideoState *)userdata; 572 | VideoPicture *vp; 573 | 574 | vp = &is->pictq[is->pictq_windex]; 575 | if(vp->bmp) { 576 | // we already have one make another, bigger/smaller 577 | SDL_FreeYUVOverlay(vp->bmp); 578 | } 579 | // Allocate a place to put our YUV image on that screen 580 | vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width, 581 | is->video_st->codec->height, 582 | SDL_YV12_OVERLAY, 583 | screen); 584 | vp->width = is->video_st->codec->width; 585 | vp->height = is->video_st->codec->height; 586 | 587 | SDL_LockMutex(is->pictq_mutex); 588 | vp->allocated = 1; 589 | SDL_CondSignal(is->pictq_cond); 590 | SDL_UnlockMutex(is->pictq_mutex); 591 | 592 | } 593 | 594 | int queue_picture(VideoState *is, AVFrame *pFrame, double pts) { 595 | 596 | VideoPicture *vp; 597 | int dst_pix_fmt; 598 | AVPicture pict; 599 | static struct SwsContext *img_convert_ctx; 600 | 601 | /* wait until we have space for a new pic */ 602 | SDL_LockMutex(is->pictq_mutex); 603 | while(is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && 604 | !is->quit) { 605 | SDL_CondWait(is->pictq_cond, is->pictq_mutex); 606 | } 607 | SDL_UnlockMutex(is->pictq_mutex); 608 | 609 | if(is->quit) 610 | return -1; 611 | 612 | // windex is set to 0 initially 613 | vp = &is->pictq[is->pictq_windex]; 614 | 615 | /* allocate or resize the buffer! */ 616 | if(!vp->bmp || 617 | vp->width != is->video_st->codec->width || 618 | vp->height != is->video_st->codec->height) { 619 | SDL_Event event; 620 | 621 | vp->allocated = 0; 622 | /* we have to do it in the main thread */ 623 | event.type = FF_ALLOC_EVENT; 624 | event.user.data1 = is; 625 | SDL_PushEvent(&event); 626 | 627 | /* wait until we have a picture allocated */ 628 | SDL_LockMutex(is->pictq_mutex); 629 | while(!vp->allocated && !is->quit) { 630 | SDL_CondWait(is->pictq_cond, is->pictq_mutex); 631 | } 632 | SDL_UnlockMutex(is->pictq_mutex); 633 | if(is->quit) { 634 | return -1; 635 | } 636 | } 637 | /* We have a place to put our picture on the queue */ 638 | /* If we are skipping a frame, do we set this to null 639 | but still return vp->allocated = 1? */ 640 | 641 | 642 | if(vp->bmp) { 643 | 644 | SDL_LockYUVOverlay(vp->bmp); 645 | 646 | dst_pix_fmt = PIX_FMT_YUV420P; 647 | /* point pict at the queue */ 648 | 649 | pict.data[0] = vp->bmp->pixels[0]; 650 | pict.data[1] = vp->bmp->pixels[2]; 651 | pict.data[2] = vp->bmp->pixels[1]; 652 | 653 | pict.linesize[0] = vp->bmp->pitches[0]; 654 | pict.linesize[1] = vp->bmp->pitches[2]; 655 | pict.linesize[2] = vp->bmp->pitches[1]; 656 | 657 | // Convert the image into YUV format that SDL uses 658 | if(img_convert_ctx == NULL) { 659 | int w = is->video_st->codec->width; 660 | int h = is->video_st->codec->height; 661 | img_convert_ctx = sws_getContext(w, h, 662 | is->video_st->codec->pix_fmt, w, h, 663 | dst_pix_fmt, SWS_BICUBIC, NULL, NULL, NULL); 664 | if(img_convert_ctx == NULL) { 665 | fprintf(stderr, "Cannot initialize the conversion context!\n"); 666 | exit(1); 667 | } 668 | } 669 | sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 670 | 0, is->video_st->codec->height, pict.data, pict.linesize); 671 | 672 | SDL_UnlockYUVOverlay(vp->bmp); 673 | vp->pts = pts; 674 | 675 | /* now we inform our display thread that we have a pic ready */ 676 | if(++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) { 677 | is->pictq_windex = 0; 678 | } 679 | SDL_LockMutex(is->pictq_mutex); 680 | is->pictq_size++; 681 | SDL_UnlockMutex(is->pictq_mutex); 682 | } 683 | return 0; 684 | } 685 | 686 | double synchronize_video(VideoState *is, AVFrame *src_frame, double pts) { 687 | 688 | double frame_delay; 689 | 690 | if(pts != 0) { 691 | /* if we have pts, set video clock to it */ 692 | is->video_clock = pts; 693 | } else { 694 | /* if we aren't given a pts, set it to the clock */ 695 | pts = is->video_clock; 696 | } 697 | /* update the video clock */ 698 | frame_delay = av_q2d(is->video_st->codec->time_base); 699 | /* if we are repeating a frame, adjust clock accordingly */ 700 | frame_delay += src_frame->repeat_pict * (frame_delay * 0.5); 701 | is->video_clock += frame_delay; 702 | return pts; 703 | } 704 | 705 | uint64_t global_video_pkt_pts = AV_NOPTS_VALUE; 706 | 707 | /* These are called whenever we allocate a frame 708 | * buffer. We use this to store the global_pts in 709 | * a frame at the time it is allocated. 710 | */ 711 | int our_get_buffer(struct AVCodecContext *c, AVFrame *pic) { 712 | int ret = avcodec_default_get_buffer(c, pic); 713 | uint64_t *pts = av_malloc(sizeof(uint64_t)); 714 | *pts = global_video_pkt_pts; 715 | pic->opaque = pts; 716 | return ret; 717 | } 718 | void our_release_buffer(struct AVCodecContext *c, AVFrame *pic) { 719 | if(pic) av_freep(&pic->opaque); 720 | avcodec_default_release_buffer(c, pic); 721 | } 722 | 723 | int video_thread(void *arg) { 724 | VideoState *is = (VideoState *)arg; 725 | AVPacket pkt1, *packet = &pkt1; 726 | int len1, frameFinished; 727 | AVFrame *pFrame; 728 | double pts; 729 | 730 | pFrame = avcodec_alloc_frame(); 731 | 732 | for(;;) { 733 | if(packet_queue_get(&is->videoq, packet, 1) < 0) { 734 | // means we quit getting packets 735 | break; 736 | } 737 | if(packet->data == flush_pkt.data) { 738 | avcodec_flush_buffers(is->video_st->codec); 739 | continue; 740 | } 741 | pts = 0; 742 | 743 | // Save global pts to be stored in pFrame 744 | global_video_pkt_pts = packet->pts; 745 | // Decode video frame 746 | // len1 = avcodec_decode_video(is->video_st->codec, pFrame, &frameFinished, 747 | // packet->data, packet->size); 748 | //int avcodec_decode_video2(AVCodecContext *avctx, AVFrame *picture, 749 | // int *got_picture_ptr, 750 | // const AVPacket *avpkt); 751 | len1 = avcodec_decode_video2(is->video_st->codec, pFrame, &frameFinished, 752 | packet); 753 | 754 | if(packet->dts == AV_NOPTS_VALUE 755 | && pFrame->opaque && *(uint64_t*)pFrame->opaque != AV_NOPTS_VALUE) { 756 | pts = *(uint64_t *)pFrame->opaque; 757 | } else if(packet->dts != AV_NOPTS_VALUE) { 758 | pts = packet->dts; 759 | } else { 760 | pts = 0; 761 | } 762 | pts *= av_q2d(is->video_st->time_base); 763 | 764 | 765 | // Did we get a video frame? 766 | if(frameFinished) { 767 | pts = synchronize_video(is, pFrame, pts); 768 | if(queue_picture(is, pFrame, pts) < 0) { 769 | break; 770 | } 771 | } 772 | av_free_packet(packet); 773 | } 774 | av_free(pFrame); 775 | return 0; 776 | } 777 | //函数 SDL_PauseAudio()让音频设备最终开始工作。如果没有立即供给足够的数 据,它会播放静音。 778 | int stream_component_open(VideoState *is, int stream_index) { 779 | 780 | AVFormatContext *pFormatCtx = is->pFormatCtx; 781 | AVCodecContext *codecCtx; 782 | AVCodec *codec; 783 | SDL_AudioSpec wanted_spec, spec; 784 | 785 | if(stream_index < 0 || stream_index >= pFormatCtx->nb_streams) { 786 | return -1; 787 | } 788 | 789 | // Get a pointer to the codec context for the video stream 790 | codecCtx = pFormatCtx->streams[stream_index]->codec; 791 | 792 | if(codecCtx->codec_type == AVMEDIA_TYPE_AUDIO) { 793 | // Set audio settings from codec info 794 | wanted_spec.freq = codecCtx->sample_rate; 795 | wanted_spec.format = AUDIO_S16SYS; 796 | wanted_spec.channels = codecCtx->channels; 797 | wanted_spec.silence = 0; 798 | wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; 799 | wanted_spec.callback = audio_callback; 800 | wanted_spec.userdata = is; 801 | 802 | if(SDL_OpenAudio(&wanted_spec, &spec) < 0) { 803 | fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError()); 804 | return -1; 805 | } 806 | is->audio_hw_buf_size = spec.size; 807 | } 808 | codec = avcodec_find_decoder(codecCtx->codec_id); 809 | if(!codec || (avcodec_open(codecCtx, codec) < 0)) { 810 | fprintf(stderr, "Unsupported codec!\n"); 811 | return -1; 812 | } 813 | 814 | switch(codecCtx->codec_type) { 815 | case AVMEDIA_TYPE_AUDIO: 816 | is->audioStream = stream_index; 817 | is->audio_st = pFormatCtx->streams[stream_index]; 818 | is->audio_buf_size = 0; 819 | is->audio_buf_index = 0; 820 | 821 | /* averaging filter for audio sync */ 822 | is->audio_diff_avg_coef = exp(log(0.01 / AUDIO_DIFF_AVG_NB)); 823 | is->audio_diff_avg_count = 0; 824 | /* Correct audio only if larger error than this */ 825 | is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / codecCtx->sample_rate; 826 | 827 | memset(&is->audio_pkt, 0, sizeof(is->audio_pkt)); 828 | packet_queue_init(&is->audioq); 829 | SDL_PauseAudio(0); 830 | break; 831 | case AVMEDIA_TYPE_VIDEO: 832 | is->videoStream = stream_index; 833 | is->video_st = pFormatCtx->streams[stream_index]; 834 | 835 | is->frame_timer = (double)av_gettime() / 1000000.0; 836 | is->frame_last_delay = 40e-3; 837 | is->video_current_pts_time = av_gettime(); 838 | 839 | packet_queue_init(&is->videoq); 840 | is->video_tid = SDL_CreateThread(video_thread, is->filename, is); 841 | codecCtx->get_buffer = our_get_buffer; 842 | codecCtx->release_buffer = our_release_buffer; 843 | 844 | break; 845 | default: 846 | break; 847 | } 848 | 849 | 850 | } 851 | 852 | //int decode_interrupt_cb(void) { 853 | // return (global_video_state && global_video_state->quit); 854 | //} 855 | int decode_interrupt_cb(void) { 856 | return (global_video_state && global_video_state->quit); 857 | // AVFormatContext* formatContext = (AVFormatContext*)(ctx); 858 | // // do something 859 | // return 0; 860 | } 861 | 862 | int decode_thread(void *arg) { 863 | 864 | VideoState *is = (VideoState *)arg; 865 | AVFormatContext *pFormatCtx; 866 | AVPacket pkt1, *packet = &pkt1; 867 | 868 | int video_index = -1; 869 | int audio_index = -1; 870 | int i; 871 | 872 | is->videoStream=-1; 873 | is->audioStream=-1; 874 | 875 | global_video_state = is; 876 | // will interrupt blocking functions if we quit! 877 | // url_set_interrupt_cb(decode_interrupt_cb); 878 | // avio_set_interrupt_cb(decode_interrupt_cb); 879 | 880 | // static const AVIOInterruptCB int_cb={decode_interrupt_cb, NULL}; 881 | // pFormatCtx->interrupt_callback=int_cb; 882 | 883 | 884 | // AVIOInterruptCB int_cb; 885 | // 886 | // int_cb.callback = decode_interrupt_cb; 887 | // int_cb.opaque = NULL; 888 | 889 | // pFormatCtx->interrupt_callback = (AVIOInterruptCB){decode_interrupt_cb, NULL}; 890 | 891 | // Open video file 892 | // if(av_open_input_file(&pFormatCtx, is->filename, NULL, 0, NULL)!=0) 893 | // return -1; // Couldn't open file 894 | if(avformat_open_input(&pFormatCtx, is->filename, NULL, NULL)!=0) 895 | return -1; // Couldn't open file 896 | 897 | is->pFormatCtx = pFormatCtx; 898 | 899 | // Retrieve stream information 900 | // if(av_find_stream_info(pFormatCtx)<0) 901 | // return -1; // Couldn't find stream information 902 | if(avformat_find_stream_info(pFormatCtx, NULL)<0) 903 | return -1; // Couldn't find stream information 904 | 905 | // Dump information about file onto standard error 906 | // dump_format(pFormatCtx, 0, is->filename, 0); 907 | 908 | #if DEBUG 909 | av_dump_format(pFormatCtx, -1, is->filename, 0); 910 | #endif 911 | 912 | // Find the first video stream 913 | 914 | for(i=0; inb_streams; i++) { 915 | if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO && 916 | video_index < 0) { 917 | video_index=i; 918 | } 919 | if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO && 920 | audio_index < 0) { 921 | audio_index=i; 922 | } 923 | } 924 | if(audio_index >= 0) { 925 | stream_component_open(is, audio_index); 926 | } 927 | if(video_index >= 0) { 928 | stream_component_open(is, video_index); 929 | } 930 | 931 | if(is->videoStream < 0 || is->audioStream < 0) { 932 | fprintf(stderr, "%s: could not open codecs\n", is->filename); 933 | goto fail; 934 | } 935 | 936 | // main decode loop 937 | 938 | for(;;) { 939 | if(is->quit) { 940 | break; 941 | } 942 | // seek stuff goes here 943 | if(is->seek_req) { 944 | int stream_index= -1; 945 | int64_t seek_target = is->seek_pos; 946 | 947 | if (is->videoStream >= 0) stream_index = is->videoStream; 948 | else if(is->audioStream >= 0) stream_index = is->audioStream; 949 | 950 | if(stream_index>=0){ 951 | seek_target= av_rescale_q(seek_target, AV_TIME_BASE_Q, pFormatCtx->streams[stream_index]->time_base); 952 | } 953 | if(av_seek_frame(is->pFormatCtx, stream_index, seek_target, is->seek_flags) < 0) { 954 | 955 | 956 | if (is->pFormatCtx->iformat->read_seek) { 957 | printf("format specific\n"); 958 | } else if(is->pFormatCtx->iformat->read_timestamp) { 959 | printf("frame_binary\n"); 960 | } else { 961 | printf("generic\n"); 962 | } 963 | 964 | fprintf(stderr, "%s: error while seeking. target: %d, stream_index: %d\n", is->pFormatCtx->filename, seek_target, stream_index); 965 | } else { 966 | if(is->audioStream >= 0) { 967 | packet_queue_flush(&is->audioq); 968 | packet_queue_put(&is->audioq, &flush_pkt); 969 | } 970 | if(is->videoStream >= 0) { 971 | packet_queue_flush(&is->videoq); 972 | packet_queue_put(&is->videoq, &flush_pkt); 973 | } 974 | } 975 | is->seek_req = 0; 976 | } 977 | if(is->audioq.size > MAX_AUDIOQ_SIZE || 978 | is->videoq.size > MAX_VIDEOQ_SIZE) { 979 | SDL_Delay(10); 980 | continue; 981 | } 982 | if(av_read_frame(is->pFormatCtx, packet) < 0) { 983 | // if(url_ferror(&pFormatCtx->pb) == 0) { 984 | 985 | if(pFormatCtx->pb&&pFormatCtx->pb->error) { 986 | SDL_Delay(100); /* no error; wait for user input */ 987 | continue; 988 | } else { 989 | break; 990 | } 991 | 992 | } 993 | // Is this a packet from the video stream? 994 | if(packet->stream_index == is->videoStream) { 995 | packet_queue_put(&is->videoq, packet); 996 | } else if(packet->stream_index == is->audioStream) { 997 | packet_queue_put(&is->audioq, packet); 998 | } else { 999 | av_free_packet(packet); 1000 | } 1001 | } 1002 | /* all done - wait for it */ 1003 | while(!is->quit) { 1004 | SDL_Delay(100); 1005 | } 1006 | 1007 | fail: 1008 | { 1009 | SDL_Event event; 1010 | event.type = FF_QUIT_EVENT; 1011 | event.user.data1 = is; 1012 | SDL_PushEvent(&event); 1013 | } 1014 | return 0; 1015 | } 1016 | 1017 | void stream_seek(VideoState *is, int64_t pos, int rel) { 1018 | 1019 | if(!is->seek_req) { 1020 | is->seek_pos = pos; 1021 | is->seek_flags = rel < 0 ? AVSEEK_FLAG_BACKWARD : 0; 1022 | is->seek_req = 1; 1023 | } 1024 | } 1025 | /* 1026 | int main(int argc, char *argv[]) { 1027 | 1028 | SDL_Event event; 1029 | double pos; 1030 | VideoState *is; 1031 | 1032 | is = av_mallocz(sizeof(VideoState)); 1033 | 1034 | if(argc < 2) { 1035 | fprintf(stderr, "Usage: test \n"); 1036 | exit(1); 1037 | } 1038 | // Register all formats and codecs 1039 | av_register_all(); 1040 | 1041 | if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { 1042 | fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); 1043 | exit(1); 1044 | } 1045 | 1046 | // Make a screen to put our video 1047 | #ifndef __DARWIN__ 1048 | screen = SDL_SetVideoMode(640, 480, 0, 0); 1049 | #else 1050 | screen = SDL_SetVideoMode(640, 480, 24, 0); 1051 | #endif 1052 | if(!screen) { 1053 | fprintf(stderr, "SDL: could not set video mode - exiting\n"); 1054 | exit(1); 1055 | } 1056 | 1057 | pstrcpy(is->filename, sizeof(is->filename), argv[1]); 1058 | 1059 | is->pictq_mutex = SDL_CreateMutex(); 1060 | is->pictq_cond = SDL_CreateCond(); 1061 | 1062 | schedule_refresh(is, 40); 1063 | 1064 | is->av_sync_type = DEFAULT_AV_SYNC_TYPE; 1065 | is->parse_tid = SDL_CreateThread(decode_thread, is); 1066 | if(!is->parse_tid) { 1067 | av_free(is); 1068 | return -1; 1069 | } 1070 | 1071 | av_init_packet(&flush_pkt); 1072 | flush_pkt.data = "FLUSH"; 1073 | 1074 | for(;;) { 1075 | double incr, pos; 1076 | 1077 | SDL_WaitEvent(&event); 1078 | switch(event.type) { 1079 | case SDL_KEYDOWN: 1080 | switch(event.key.keysym.sym) { 1081 | case SDLK_LEFT: 1082 | incr = -10.0; 1083 | goto do_seek; 1084 | case SDLK_RIGHT: 1085 | incr = 10.0; 1086 | goto do_seek; 1087 | case SDLK_UP: 1088 | incr = 60.0; 1089 | goto do_seek; 1090 | case SDLK_DOWN: 1091 | incr = -60.0; 1092 | goto do_seek; 1093 | do_seek: 1094 | if(global_video_state) { 1095 | pos = get_master_clock(global_video_state); 1096 | pos += incr; 1097 | stream_seek(global_video_state, (int64_t)(pos * AV_TIME_BASE), incr); 1098 | } 1099 | break; 1100 | default: 1101 | break; 1102 | } 1103 | break; 1104 | case FF_QUIT_EVENT: 1105 | case SDL_QUIT: 1106 | is->quit = 1; 1107 | SDL_Quit(); 1108 | exit(0); 1109 | break; 1110 | case FF_ALLOC_EVENT: 1111 | alloc_picture(event.user.data1); 1112 | break; 1113 | case FF_REFRESH_EVENT: 1114 | video_refresh_timer(event.user.data1); 1115 | break; 1116 | default: 1117 | break; 1118 | } 1119 | } 1120 | return 0; 1121 | } 1122 | */ 1123 | 1124 | 1125 | @end 1126 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/KxMovieDecoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // KxMovieDecoder.h 3 | // kxmovie 4 | // 5 | // Created by Kolyvan on 15.10.12. 6 | // Copyright (c) 2012 Konstantin Boukreev . All rights reserved. 7 | // 8 | // https://github.com/kolyvan/kxmovie 9 | // this file is part of KxMovie 10 | // KxMovie is licenced under the LGPL v3, see lgpl-3.0.txt 11 | 12 | #import 13 | #import 14 | #include "libavcodec/avcodec.h" 15 | 16 | extern NSString * kxmovieErrorDomain; 17 | 18 | typedef enum { 19 | 20 | kxMovieErrorNone, 21 | kxMovieErrorOpenFile, 22 | kxMovieErrorStreamInfoNotFound, 23 | kxMovieErrorStreamNotFound, 24 | kxMovieErrorCodecNotFound, 25 | kxMovieErrorOpenCodec, 26 | kxMovieErrorAllocateFrame, 27 | kxMovieErroSetupScaler, 28 | kxMovieErroReSampler, 29 | kxMovieErroUnsupported, 30 | 31 | } kxMovieError; 32 | 33 | typedef enum { 34 | 35 | KxMovieFrameTypeAudio, 36 | KxMovieFrameTypeVideo, 37 | KxMovieFrameTypeArtwork, 38 | KxMovieFrameTypeSubtitle, 39 | 40 | } KxMovieFrameType; 41 | 42 | typedef enum { 43 | 44 | KxVideoFrameFormatRGB, 45 | KxVideoFrameFormatYUV, 46 | 47 | } KxVideoFrameFormat; 48 | 49 | @interface KxMovieFrame : NSObject 50 | @property (readonly, nonatomic) KxMovieFrameType type; 51 | @property (readonly, nonatomic) CGFloat position; 52 | @property (readonly, nonatomic) CGFloat duration; 53 | @end 54 | 55 | @interface KxAudioFrame : KxMovieFrame 56 | @property (readonly, nonatomic, strong) NSData *samples; 57 | @end 58 | 59 | @interface KxVideoFrame : KxMovieFrame 60 | @property (readonly, nonatomic) KxVideoFrameFormat format; 61 | @property (readonly, nonatomic) NSUInteger width; 62 | @property (readonly, nonatomic) NSUInteger height; 63 | @end 64 | 65 | @interface KxVideoFrameRGB : KxVideoFrame 66 | @property (readonly, nonatomic) NSUInteger linesize; 67 | @property (readonly, nonatomic, strong) NSData *rgb; 68 | - (UIImage *) asImage; 69 | @end 70 | 71 | @interface KxVideoFrameYUV : KxVideoFrame { 72 | NSData *luma; 73 | NSData *chromaB; 74 | NSData *chromaR; 75 | } 76 | @property (nonatomic, retain) NSData *luma; 77 | @property (nonatomic, retain) NSData *chromaB; 78 | @property (nonatomic, retain) NSData *chromaR; 79 | @end 80 | 81 | @interface KxArtworkFrame : KxMovieFrame 82 | @property (readonly, nonatomic, strong) NSData *picture; 83 | - (UIImage *) asImage; 84 | @end 85 | 86 | @interface KxSubtitleFrame : KxMovieFrame 87 | @property (readonly, nonatomic, strong) NSString *text; 88 | @end 89 | 90 | @interface KxMovieDecoder : NSObject 91 | 92 | @property (readonly, nonatomic) NSUInteger frameWidth; 93 | @property (readonly, nonatomic) NSUInteger frameHeight; 94 | 95 | - (void)handleVieoFrameWithFrame:(AVFrame *)avframe andvideoCodecCtx:(AVCodecContext *)videoCodecCtx andKxVideoFrameYUV:(KxVideoFrameYUV *)vFrameYUV; 96 | 97 | @end 98 | 99 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/KxMovieDecoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // KxMovieDecoder.m 3 | // kxmovie 4 | // 5 | // Created by Kolyvan on 15.10.12. 6 | // Copyright (c) 2012 Konstantin Boukreev . All rights reserved. 7 | // 8 | // https://github.com/kolyvan/kxmovie 9 | // this file is part of KxMovie 10 | // KxMovie is licenced under the LGPL v3, see lgpl-3.0.txt 11 | 12 | #import "KxMovieDecoder.h" 13 | #import 14 | #include "libavformat/avformat.h" 15 | #include "libswscale/swscale.h" 16 | #include "libswresample/swresample.h" 17 | #include "libavutil/pixdesc.h" 18 | //#import "KxAudioManager.h" 19 | 20 | //////////////////////////////////////////////////////////////////////////////// 21 | NSString * kxmovieErrorDomain = @"ru.kolyvan.kxmovie"; 22 | 23 | static NSError * kxmovieError (NSInteger code, id info) 24 | { 25 | NSDictionary *userInfo = nil; 26 | 27 | if ([info isKindOfClass: [NSDictionary class]]) { 28 | 29 | userInfo = info; 30 | 31 | } else if ([info isKindOfClass: [NSString class]]) { 32 | 33 | userInfo = @{ NSLocalizedDescriptionKey : info }; 34 | } 35 | 36 | return [NSError errorWithDomain:kxmovieErrorDomain 37 | code:code 38 | userInfo:userInfo]; 39 | } 40 | 41 | static NSString * errorMessage (kxMovieError errorCode) 42 | { 43 | switch (errorCode) { 44 | case kxMovieErrorNone: 45 | return @""; 46 | 47 | case kxMovieErrorOpenFile: 48 | return NSLocalizedString(@"Unable to open file", nil); 49 | 50 | case kxMovieErrorStreamInfoNotFound: 51 | return NSLocalizedString(@"Unable to find stream information", nil); 52 | 53 | case kxMovieErrorStreamNotFound: 54 | return NSLocalizedString(@"Unable to find stream", nil); 55 | 56 | case kxMovieErrorCodecNotFound: 57 | return NSLocalizedString(@"Unable to find codec", nil); 58 | 59 | case kxMovieErrorOpenCodec: 60 | return NSLocalizedString(@"Unable to open codec", nil); 61 | 62 | case kxMovieErrorAllocateFrame: 63 | return NSLocalizedString(@"Unable to allocate frame", nil); 64 | 65 | case kxMovieErroSetupScaler: 66 | return NSLocalizedString(@"Unable to setup scaler", nil); 67 | 68 | case kxMovieErroReSampler: 69 | return NSLocalizedString(@"Unable to setup resampler", nil); 70 | 71 | case kxMovieErroUnsupported: 72 | return NSLocalizedString(@"The ability is not supported", nil); 73 | } 74 | } 75 | 76 | static void avStreamFPSTimeBase(AVStream *st, CGFloat defaultTimeBase, CGFloat *pFPS, CGFloat *pTimeBase) 77 | { 78 | CGFloat fps, timebase; 79 | 80 | if (st->time_base.den && st->time_base.num) 81 | timebase = av_q2d(st->time_base); 82 | else if(st->codec->time_base.den && st->codec->time_base.num) 83 | timebase = av_q2d(st->codec->time_base); 84 | else 85 | timebase = defaultTimeBase; 86 | 87 | if (st->codec->ticks_per_frame != 1) { 88 | NSLog(@"WARNING: st.codec.ticks_per_frame=%d", st->codec->ticks_per_frame); 89 | //timebase *= st->codec->ticks_per_frame; 90 | } 91 | 92 | if (st->avg_frame_rate.den && st->avg_frame_rate.num) 93 | fps = av_q2d(st->avg_frame_rate); 94 | else if (st->r_frame_rate.den && st->r_frame_rate.num) 95 | fps = av_q2d(st->r_frame_rate); 96 | else 97 | fps = 1.0 / timebase; 98 | 99 | if (pFPS) 100 | *pFPS = fps; 101 | if (pTimeBase) 102 | *pTimeBase = timebase; 103 | } 104 | 105 | static NSArray *collectStreams(AVFormatContext *formatCtx, enum AVMediaType codecType) 106 | { 107 | NSMutableArray *ma = [NSMutableArray array]; 108 | for (NSInteger i = 0; i < formatCtx->nb_streams; ++i) 109 | if (codecType == formatCtx->streams[i]->codec->codec_type) 110 | [ma addObject: [NSNumber numberWithInteger: i]]; 111 | return [ma copy]; 112 | } 113 | 114 | static NSData * copyFrameData(UInt8 *src, int linesize, int width, int height) 115 | { 116 | width = MIN(linesize, width); 117 | NSMutableData *md = [NSMutableData dataWithLength: width * height]; 118 | Byte *dst = md.mutableBytes; 119 | for (NSUInteger i = 0; i < height; ++i) { 120 | memcpy(dst, src, width); 121 | dst += width; 122 | src += linesize; 123 | } 124 | return md; 125 | } 126 | 127 | static BOOL isNetworkPath (NSString *path) 128 | { 129 | NSRange r = [path rangeOfString:@":"]; 130 | if (r.location == NSNotFound) 131 | return NO; 132 | NSString *scheme = [path substringToIndex:r.length]; 133 | if ([scheme isEqualToString:@"file"]) 134 | return NO; 135 | return YES; 136 | } 137 | 138 | static int interrupt_callback(void *ctx); 139 | 140 | //////////////////////////////////////////////////////////////////////////////// 141 | 142 | @interface KxMovieFrame() 143 | @property (readwrite, nonatomic) CGFloat position; 144 | @property (readwrite, nonatomic) CGFloat duration; 145 | @end 146 | 147 | @implementation KxMovieFrame 148 | @end 149 | 150 | @interface KxAudioFrame() 151 | @property (readwrite, nonatomic, strong) NSData *samples; 152 | @end 153 | 154 | @implementation KxAudioFrame 155 | - (KxMovieFrameType) type { return KxMovieFrameTypeAudio; } 156 | @end 157 | 158 | @interface KxVideoFrame() 159 | @property (readwrite, nonatomic) NSUInteger width; 160 | @property (readwrite, nonatomic) NSUInteger height; 161 | @end 162 | 163 | @implementation KxVideoFrame 164 | - (KxMovieFrameType) type { return KxMovieFrameTypeVideo; } 165 | @end 166 | 167 | @interface KxVideoFrameRGB () 168 | @property (readwrite, nonatomic) NSUInteger linesize; 169 | @property (readwrite, nonatomic, strong) NSData *rgb; 170 | @end 171 | 172 | @implementation KxVideoFrameRGB 173 | - (KxVideoFrameFormat) format { return KxVideoFrameFormatRGB; } 174 | - (UIImage *) asImage 175 | { 176 | UIImage *image = nil; 177 | 178 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)(_rgb)); 179 | if (provider) { 180 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 181 | if (colorSpace) { 182 | CGImageRef imageRef = CGImageCreate(self.width, 183 | self.height, 184 | 8, 185 | 24, 186 | self.linesize, 187 | colorSpace, 188 | kCGBitmapByteOrderDefault, 189 | provider, 190 | NULL, 191 | YES, // NO 192 | kCGRenderingIntentDefault); 193 | 194 | if (imageRef) { 195 | image = [UIImage imageWithCGImage:imageRef]; 196 | CGImageRelease(imageRef); 197 | } 198 | CGColorSpaceRelease(colorSpace); 199 | } 200 | CGDataProviderRelease(provider); 201 | } 202 | 203 | return image; 204 | } 205 | @end 206 | 207 | @interface KxVideoFrameYUV() 208 | 209 | 210 | @end 211 | 212 | @implementation KxVideoFrameYUV 213 | 214 | @synthesize luma; 215 | @synthesize chromaB; 216 | @synthesize chromaR; 217 | 218 | 219 | - (KxVideoFrameFormat) format { return KxVideoFrameFormatYUV; } 220 | 221 | - (void)dealloc { 222 | [luma release]; 223 | [chromaB release]; 224 | [chromaR release]; 225 | [super dealloc]; 226 | } 227 | 228 | @end 229 | 230 | @interface KxArtworkFrame() 231 | @property (readwrite, nonatomic, strong) NSData *picture; 232 | @end 233 | 234 | @implementation KxArtworkFrame 235 | - (KxMovieFrameType) type { return KxMovieFrameTypeArtwork; } 236 | - (UIImage *) asImage 237 | { 238 | UIImage *image = nil; 239 | 240 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)(_picture)); 241 | if (provider) { 242 | 243 | CGImageRef imageRef = CGImageCreateWithJPEGDataProvider(provider, 244 | NULL, 245 | YES, 246 | kCGRenderingIntentDefault); 247 | if (imageRef) { 248 | 249 | image = [UIImage imageWithCGImage:imageRef]; 250 | CGImageRelease(imageRef); 251 | } 252 | CGDataProviderRelease(provider); 253 | } 254 | 255 | return image; 256 | 257 | } 258 | @end 259 | 260 | @interface KxSubtitleFrame() 261 | @property (readwrite, nonatomic, strong) NSString *text; 262 | @end 263 | 264 | @implementation KxSubtitleFrame 265 | - (KxMovieFrameType) type { return KxMovieFrameTypeSubtitle; } 266 | @end 267 | 268 | //////////////////////////////////////////////////////////////////////////////// 269 | 270 | @interface KxMovieDecoder () { 271 | 272 | AVCodecContext *_videoCodecCtx; 273 | 274 | } 275 | @end 276 | 277 | @implementation KxMovieDecoder 278 | 279 | @dynamic frameWidth; 280 | @dynamic frameHeight; 281 | 282 | - (NSUInteger) frameWidth 283 | { 284 | return _videoCodecCtx ? _videoCodecCtx->width : 0; 285 | } 286 | 287 | - (NSUInteger) frameHeight 288 | { 289 | return _videoCodecCtx ? _videoCodecCtx->height : 0; 290 | } 291 | - (void) dealloc 292 | { 293 | NSLog(@"%@ dealloc", self); 294 | // [self closeFile]; 295 | [super dealloc]; 296 | } 297 | 298 | - (void)handleVieoFrameWithFrame:(AVFrame *)avframe andvideoCodecCtx:(AVCodecContext *)videoCodecCtx andKxVideoFrameYUV:(KxVideoFrameYUV *)vFrameYUV{ 299 | if (!avframe->data[0] || !avframe->data[1] || !avframe->data[2]) 300 | return; 301 | 302 | [vFrameYUV.luma release]; 303 | vFrameYUV.luma = nil; 304 | [vFrameYUV.chromaB release]; 305 | vFrameYUV.chromaB = nil; 306 | [vFrameYUV.chromaR release]; 307 | vFrameYUV.chromaR = nil; 308 | 309 | vFrameYUV.luma = copyFrameData(avframe->data[0], 310 | avframe->linesize[0], 311 | videoCodecCtx->width, 312 | videoCodecCtx->height); 313 | 314 | vFrameYUV.chromaB = copyFrameData(avframe->data[1], 315 | avframe->linesize[1], 316 | videoCodecCtx->width / 2, 317 | videoCodecCtx->height / 2); 318 | 319 | vFrameYUV.chromaR = copyFrameData(avframe->data[2], 320 | avframe->linesize[2], 321 | videoCodecCtx->width / 2, 322 | videoCodecCtx->height / 2); 323 | 324 | 325 | vFrameYUV.width = videoCodecCtx->width; 326 | vFrameYUV.height = videoCodecCtx->height; 327 | 328 | _videoCodecCtx = videoCodecCtx; 329 | 330 | } 331 | 332 | @end 333 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/KxMovieGLView.h: -------------------------------------------------------------------------------- 1 | // 2 | // ESGLView.h 3 | // kxmovie 4 | // 5 | // Created by Kolyvan on 22.10.12. 6 | // Copyright (c) 2012 Konstantin Boukreev . All rights reserved. 7 | // 8 | // https://github.com/kolyvan/kxmovie 9 | // this file is part of KxMovie 10 | // KxMovie is licenced under the LGPL v3, see lgpl-3.0.txt 11 | 12 | #import 13 | 14 | @class KxVideoFrame; 15 | @class KxMovieDecoder; 16 | 17 | @interface KxMovieGLView : UIView 18 | 19 | - (id) initWithFrame:(CGRect)frame 20 | decoder: (KxMovieDecoder *) decoder; 21 | 22 | - (void) render: (KxVideoFrame *) frame; 23 | 24 | @end 25 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/KxMovieGLView.m: -------------------------------------------------------------------------------- 1 | // 2 | // KxMovieGLView.m 3 | // kxmovie 4 | // 5 | // Created by Kolyvan on 22.10.12. 6 | // Copyright (c) 2012 Konstantin Boukreev . All rights reserved. 7 | // 8 | // https://github.com/kolyvan/kxmovie 9 | // this file is part of KxMovie 10 | // KxMovie is licenced under the LGPL v3, see lgpl-3.0.txt 11 | 12 | #import "KxMovieGLView.h" 13 | #import 14 | #import 15 | #import 16 | #import 17 | #import 18 | #import "KxMovieDecoder.h" 19 | 20 | ////////////////////////////////////////////////////////// 21 | 22 | #pragma mark - shaders 23 | 24 | #define STRINGIZE(x) #x 25 | #define STRINGIZE2(x) STRINGIZE(x) 26 | #define SHADER_STRING(text) @ STRINGIZE2(text) 27 | 28 | NSString *const vertexShaderString = SHADER_STRING 29 | ( 30 | attribute vec4 position; 31 | attribute vec2 texcoord; 32 | uniform mat4 modelViewProjectionMatrix; 33 | varying vec2 v_texcoord; 34 | 35 | void main() 36 | { 37 | gl_Position = modelViewProjectionMatrix * position; 38 | v_texcoord = texcoord.xy; 39 | } 40 | ); 41 | 42 | NSString *const rgbFragmentShaderString = SHADER_STRING 43 | ( 44 | varying highp vec2 v_texcoord; 45 | uniform sampler2D s_texture; 46 | 47 | void main() 48 | { 49 | gl_FragColor = texture2D(s_texture, v_texcoord); 50 | } 51 | ); 52 | 53 | NSString *const yuvFragmentShaderString = SHADER_STRING 54 | ( 55 | varying highp vec2 v_texcoord; 56 | uniform sampler2D s_texture_y; 57 | uniform sampler2D s_texture_u; 58 | uniform sampler2D s_texture_v; 59 | 60 | void main() 61 | { 62 | highp float y = texture2D(s_texture_y, v_texcoord).r; 63 | highp float u = texture2D(s_texture_u, v_texcoord).r - 0.5; 64 | highp float v = texture2D(s_texture_v, v_texcoord).r - 0.5; 65 | 66 | highp float r = y + 1.402 * v; 67 | highp float g = y - 0.344 * u - 0.714 * v; 68 | highp float b = y + 1.772 * u; 69 | 70 | gl_FragColor = vec4(r,g,b,1.0); 71 | } 72 | ); 73 | 74 | static BOOL validateProgram(GLuint prog) 75 | { 76 | GLint status; 77 | 78 | glValidateProgram(prog); 79 | 80 | #ifdef DEBUG 81 | GLint logLength; 82 | glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); 83 | if (logLength > 0) 84 | { 85 | GLchar *log = (GLchar *)malloc(logLength); 86 | glGetProgramInfoLog(prog, logLength, &logLength, log); 87 | NSLog(@"Program validate log:\n%s", log); 88 | free(log); 89 | } 90 | #endif 91 | 92 | glGetProgramiv(prog, GL_VALIDATE_STATUS, &status); 93 | if (status == GL_FALSE) { 94 | NSLog(@"Failed to validate program %d", prog); 95 | return NO; 96 | } 97 | 98 | return YES; 99 | } 100 | 101 | static GLuint compileShader(GLenum type, NSString *shaderString) 102 | { 103 | GLint status; 104 | const GLchar *sources = (GLchar *)shaderString.UTF8String; 105 | 106 | GLuint shader = glCreateShader(type); 107 | if (shader == 0 || shader == GL_INVALID_ENUM) { 108 | NSLog(@"Failed to create shader %d", type); 109 | return 0; 110 | } 111 | 112 | glShaderSource(shader, 1, &sources, NULL); 113 | glCompileShader(shader); 114 | 115 | #ifdef DEBUG 116 | GLint logLength; 117 | glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength); 118 | if (logLength > 0) 119 | { 120 | GLchar *log = (GLchar *)malloc(logLength); 121 | glGetShaderInfoLog(shader, logLength, &logLength, log); 122 | NSLog(@"Shader compile log:\n%s", log); 123 | free(log); 124 | } 125 | #endif 126 | 127 | glGetShaderiv(shader, GL_COMPILE_STATUS, &status); 128 | if (status == GL_FALSE) { 129 | glDeleteShader(shader); 130 | NSLog(@"Failed to compile shader:\n"); 131 | return 0; 132 | } 133 | 134 | return shader; 135 | } 136 | 137 | static void mat4f_LoadOrtho(float left, float right, float bottom, float top, float near, float far, float* mout) 138 | { 139 | float r_l = right - left; 140 | float t_b = top - bottom; 141 | float f_n = far - near; 142 | float tx = - (right + left) / (right - left); 143 | float ty = - (top + bottom) / (top - bottom); 144 | float tz = - (far + near) / (far - near); 145 | 146 | mout[0] = 2.0f / r_l; 147 | mout[1] = 0.0f; 148 | mout[2] = 0.0f; 149 | mout[3] = 0.0f; 150 | 151 | mout[4] = 0.0f; 152 | mout[5] = 2.0f / t_b; 153 | mout[6] = 0.0f; 154 | mout[7] = 0.0f; 155 | 156 | mout[8] = 0.0f; 157 | mout[9] = 0.0f; 158 | mout[10] = -2.0f / f_n; 159 | mout[11] = 0.0f; 160 | 161 | mout[12] = tx; 162 | mout[13] = ty; 163 | mout[14] = tz; 164 | mout[15] = 1.0f; 165 | } 166 | 167 | ////////////////////////////////////////////////////////// 168 | 169 | #pragma mark - frame renderers 170 | 171 | @protocol KxMovieGLRenderer 172 | - (BOOL) isValid; 173 | - (NSString *) fragmentShader; 174 | - (void) resolveUniforms: (GLuint) program; 175 | - (void) setFrame: (KxVideoFrame *) frame; 176 | - (BOOL) prepareRender; 177 | @end 178 | 179 | @interface KxMovieGLRenderer_RGB : NSObject { 180 | 181 | GLint _uniformSampler; 182 | GLuint _texture; 183 | } 184 | @end 185 | 186 | @implementation KxMovieGLRenderer_RGB 187 | 188 | - (BOOL) isValid 189 | { 190 | return (_texture != 0); 191 | } 192 | 193 | - (NSString *) fragmentShader 194 | { 195 | return rgbFragmentShaderString; 196 | } 197 | 198 | - (void) resolveUniforms: (GLuint) program 199 | { 200 | _uniformSampler = glGetUniformLocation(program, "s_texture"); 201 | } 202 | 203 | - (void) setFrame: (KxVideoFrame *) frame 204 | { 205 | KxVideoFrameRGB *rgbFrame = (KxVideoFrameRGB *)frame; 206 | 207 | assert(rgbFrame.rgb.length == rgbFrame.width * rgbFrame.height * 3); 208 | 209 | glPixelStorei(GL_UNPACK_ALIGNMENT, 1); 210 | 211 | if (0 == _texture) 212 | glGenTextures(1, &_texture); 213 | 214 | glBindTexture(GL_TEXTURE_2D, _texture); 215 | 216 | glTexImage2D(GL_TEXTURE_2D, 217 | 0, 218 | GL_RGB, 219 | frame.width, 220 | frame.height, 221 | 0, 222 | GL_RGB, 223 | GL_UNSIGNED_BYTE, 224 | rgbFrame.rgb.bytes); 225 | 226 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 227 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 228 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 229 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 230 | } 231 | 232 | - (BOOL) prepareRender 233 | { 234 | if (_texture == 0) 235 | return NO; 236 | 237 | glActiveTexture(GL_TEXTURE0); 238 | glBindTexture(GL_TEXTURE_2D, _texture); 239 | glUniform1i(_uniformSampler, 0); 240 | 241 | return YES; 242 | } 243 | 244 | - (void) dealloc 245 | { 246 | if (_texture) { 247 | glDeleteTextures(1, &_texture); 248 | _texture = 0; 249 | } 250 | [super dealloc]; 251 | } 252 | 253 | @end 254 | 255 | @interface KxMovieGLRenderer_YUV : NSObject { 256 | 257 | GLint _uniformSamplers[3]; 258 | GLuint _textures[3]; 259 | } 260 | @end 261 | 262 | @implementation KxMovieGLRenderer_YUV 263 | 264 | - (BOOL) isValid 265 | { 266 | return (_textures[0] != 0); 267 | } 268 | 269 | - (NSString *) fragmentShader 270 | { 271 | return yuvFragmentShaderString; 272 | } 273 | 274 | - (void) resolveUniforms: (GLuint) program 275 | { 276 | _uniformSamplers[0] = glGetUniformLocation(program, "s_texture_y"); 277 | _uniformSamplers[1] = glGetUniformLocation(program, "s_texture_u"); 278 | _uniformSamplers[2] = glGetUniformLocation(program, "s_texture_v"); 279 | } 280 | 281 | - (void) setFrame: (KxVideoFrame *) frame 282 | { 283 | KxVideoFrameYUV *yuvFrame = (KxVideoFrameYUV *)frame; 284 | 285 | // assert(yuvFrame.luma.length == yuvFrame.width * yuvFrame.height); 286 | // assert(yuvFrame.chromaB.length == (yuvFrame.width * yuvFrame.height) / 4); 287 | // assert(yuvFrame.chromaR.length == (yuvFrame.width * yuvFrame.height) / 4); 288 | 289 | const NSUInteger frameWidth = frame.width; 290 | const NSUInteger frameHeight = frame.height; 291 | 292 | glPixelStorei(GL_UNPACK_ALIGNMENT, 1); 293 | 294 | if (0 == _textures[0]) 295 | glGenTextures(3, _textures); 296 | 297 | const UInt8 *pixels[3] = { yuvFrame.luma.bytes, yuvFrame.chromaB.bytes, yuvFrame.chromaR.bytes }; 298 | const NSUInteger widths[3] = { frameWidth, frameWidth / 2, frameWidth / 2 }; 299 | const NSUInteger heights[3] = { frameHeight, frameHeight / 2, frameHeight / 2 }; 300 | 301 | for (int i = 0; i < 3; ++i) { 302 | 303 | glBindTexture(GL_TEXTURE_2D, _textures[i]); 304 | 305 | glTexImage2D(GL_TEXTURE_2D, 306 | 0, 307 | GL_LUMINANCE, 308 | widths[i], 309 | heights[i], 310 | 0, 311 | GL_LUMINANCE, 312 | GL_UNSIGNED_BYTE, 313 | pixels[i]); 314 | 315 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 316 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 317 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 318 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 319 | } 320 | } 321 | 322 | - (BOOL) prepareRender 323 | { 324 | if (_textures[0] == 0) 325 | return NO; 326 | 327 | for (int i = 0; i < 3; ++i) { 328 | glActiveTexture(GL_TEXTURE0 + i); 329 | glBindTexture(GL_TEXTURE_2D, _textures[i]); 330 | glUniform1i(_uniformSamplers[i], i); 331 | } 332 | 333 | return YES; 334 | } 335 | 336 | - (void) dealloc 337 | { 338 | if (_textures[0]) 339 | glDeleteTextures(3, _textures); 340 | 341 | [super dealloc]; 342 | } 343 | 344 | @end 345 | 346 | ////////////////////////////////////////////////////////// 347 | 348 | #pragma mark - gl view 349 | 350 | enum { 351 | ATTRIBUTE_VERTEX, 352 | ATTRIBUTE_TEXCOORD, 353 | }; 354 | 355 | @implementation KxMovieGLView { 356 | 357 | KxMovieDecoder *_decoder; 358 | EAGLContext *_context; 359 | GLuint _framebuffer; 360 | GLuint _renderbuffer; 361 | GLint _backingWidth; 362 | GLint _backingHeight; 363 | GLuint _program; 364 | GLint _uniformMatrix; 365 | GLfloat _vertices[8]; 366 | 367 | id _renderer; 368 | } 369 | 370 | + (Class) layerClass 371 | { 372 | return [CAEAGLLayer class]; 373 | } 374 | 375 | - (id) initWithFrame:(CGRect)frame 376 | decoder: (KxMovieDecoder *) decoder 377 | { 378 | self = [super initWithFrame:frame]; 379 | if (self) { 380 | 381 | _decoder = decoder; 382 | 383 | _renderer = [[KxMovieGLRenderer_YUV alloc] init]; 384 | 385 | CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer; 386 | eaglLayer.opaque = YES; 387 | eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: 388 | [NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking, 389 | kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, 390 | nil]; 391 | 392 | _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 393 | 394 | if (!_context || 395 | ![EAGLContext setCurrentContext:_context]) { 396 | 397 | NSLog(@"failed to setup EAGLContext"); 398 | self = nil; 399 | return nil; 400 | } 401 | 402 | glGenFramebuffers(1, &_framebuffer); 403 | glGenRenderbuffers(1, &_renderbuffer); 404 | glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer); 405 | glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer); 406 | [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer]; 407 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth); 408 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight); 409 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderbuffer); 410 | 411 | GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); 412 | if (status != GL_FRAMEBUFFER_COMPLETE) { 413 | 414 | NSLog(@"failed to make complete framebuffer object %x", status); 415 | self = nil; 416 | return nil; 417 | } 418 | 419 | GLenum glError = glGetError(); 420 | if (GL_NO_ERROR != glError) { 421 | 422 | NSLog(@"failed to setup GL %x", glError); 423 | self = nil; 424 | return nil; 425 | } 426 | 427 | if (![self loadShaders]) { 428 | 429 | self = nil; 430 | return nil; 431 | } 432 | 433 | _vertices[0] = -1.0f; // x0 434 | _vertices[1] = -1.0f; // y0 435 | _vertices[2] = 1.0f; // .. 436 | _vertices[3] = -1.0f; 437 | _vertices[4] = -1.0f; 438 | _vertices[5] = 1.0f; 439 | _vertices[6] = 1.0f; // x3 440 | _vertices[7] = 1.0f; // y3 441 | 442 | NSLog(@"OK setup GL"); 443 | } 444 | 445 | return self; 446 | } 447 | 448 | - (void)dealloc 449 | { 450 | _renderer = nil; 451 | 452 | if (_framebuffer) { 453 | glDeleteFramebuffers(1, &_framebuffer); 454 | _framebuffer = 0; 455 | } 456 | 457 | if (_renderbuffer) { 458 | glDeleteRenderbuffers(1, &_renderbuffer); 459 | _renderbuffer = 0; 460 | } 461 | 462 | if (_program) { 463 | glDeleteProgram(_program); 464 | _program = 0; 465 | } 466 | 467 | if ([EAGLContext currentContext] == _context) { 468 | [EAGLContext setCurrentContext:nil]; 469 | } 470 | 471 | _context = nil; 472 | 473 | [super dealloc]; 474 | } 475 | 476 | - (void)layoutSubviews 477 | { 478 | glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer); 479 | [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer]; 480 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth); 481 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight); 482 | 483 | GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); 484 | if (status != GL_FRAMEBUFFER_COMPLETE) { 485 | 486 | NSLog(@"failed to make complete framebuffer object %x", status); 487 | 488 | } else { 489 | 490 | NSLog(@"OK setup GL framebuffer %d:%d", _backingWidth, _backingHeight); 491 | } 492 | 493 | [self updateVertices]; 494 | [self render: nil]; 495 | } 496 | 497 | - (void)setContentMode:(UIViewContentMode)contentMode 498 | { 499 | [super setContentMode:contentMode]; 500 | [self updateVertices]; 501 | if (_renderer.isValid) 502 | [self render:nil]; 503 | } 504 | 505 | - (BOOL)loadShaders 506 | { 507 | BOOL result = NO; 508 | GLuint vertShader = 0, fragShader = 0; 509 | 510 | _program = glCreateProgram(); 511 | 512 | vertShader = compileShader(GL_VERTEX_SHADER, vertexShaderString); 513 | if (!vertShader) 514 | goto exit; 515 | 516 | fragShader = compileShader(GL_FRAGMENT_SHADER, _renderer.fragmentShader); 517 | if (!fragShader) 518 | goto exit; 519 | 520 | glAttachShader(_program, vertShader); 521 | glAttachShader(_program, fragShader); 522 | glBindAttribLocation(_program, ATTRIBUTE_VERTEX, "position"); 523 | glBindAttribLocation(_program, ATTRIBUTE_TEXCOORD, "texcoord"); 524 | 525 | glLinkProgram(_program); 526 | 527 | GLint status; 528 | glGetProgramiv(_program, GL_LINK_STATUS, &status); 529 | if (status == GL_FALSE) { 530 | NSLog(@"Failed to link program %d", _program); 531 | goto exit; 532 | } 533 | 534 | result = validateProgram(_program); 535 | 536 | _uniformMatrix = glGetUniformLocation(_program, "modelViewProjectionMatrix"); 537 | [_renderer resolveUniforms:_program]; 538 | 539 | exit: 540 | 541 | if (vertShader) 542 | glDeleteShader(vertShader); 543 | if (fragShader) 544 | glDeleteShader(fragShader); 545 | 546 | if (result) { 547 | 548 | NSLog(@"OK setup GL programm"); 549 | 550 | } else { 551 | 552 | glDeleteProgram(_program); 553 | _program = 0; 554 | } 555 | 556 | return result; 557 | } 558 | 559 | - (void)updateVertices 560 | { 561 | const BOOL fit = (self.contentMode == UIViewContentModeScaleAspectFit); 562 | const float width = _decoder.frameWidth; 563 | const float height = _decoder.frameHeight; 564 | const float dH = (float)_backingHeight / height; 565 | const float dW = (float)_backingWidth / width; 566 | const float dd = fit ? MIN(dH, dW) : MAX(dH, dW); 567 | const float h = (height * dd / (float)_backingHeight); 568 | const float w = (width * dd / (float)_backingWidth ); 569 | 570 | _vertices[0] = - w; 571 | _vertices[1] = - h; 572 | _vertices[2] = w; 573 | _vertices[3] = - h; 574 | _vertices[4] = - w; 575 | _vertices[5] = h; 576 | _vertices[6] = w; 577 | _vertices[7] = h; 578 | } 579 | 580 | - (void)render: (KxVideoFrame *) frame 581 | { 582 | [self updateVertices]; 583 | static const GLfloat texCoords[] = { 584 | 0.0f, 1.0f, 585 | 1.0f, 1.0f, 586 | 0.0f, 0.0f, 587 | 1.0f, 0.0f, 588 | }; 589 | 590 | [EAGLContext setCurrentContext:_context]; 591 | 592 | glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer); 593 | glViewport(0, 0, _backingWidth, _backingHeight); 594 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f); 595 | glClear(GL_COLOR_BUFFER_BIT); 596 | glUseProgram(_program); 597 | 598 | if (frame) { 599 | [_renderer setFrame:frame]; 600 | } 601 | 602 | if ([_renderer prepareRender]) { 603 | 604 | GLfloat modelviewProj[16]; 605 | mat4f_LoadOrtho(-1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f, modelviewProj); 606 | glUniformMatrix4fv(_uniformMatrix, 1, GL_FALSE, modelviewProj); 607 | 608 | glVertexAttribPointer(ATTRIBUTE_VERTEX, 2, GL_FLOAT, 0, 0, _vertices); 609 | glEnableVertexAttribArray(ATTRIBUTE_VERTEX); 610 | glVertexAttribPointer(ATTRIBUTE_TEXCOORD, 2, GL_FLOAT, 0, 0, texCoords); 611 | glEnableVertexAttribArray(ATTRIBUTE_TEXCOORD); 612 | 613 | #if 0 614 | if (!validateProgram(_program)) 615 | { 616 | NSLog(@"Failed to validate program"); 617 | return; 618 | } 619 | #endif 620 | 621 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); 622 | } 623 | 624 | glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer); 625 | [_context presentRenderbuffer:GL_RENDERBUFFER]; 626 | } 627 | 628 | @end 629 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/TestPlayWithFFMPEGAndSDL-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ffplay播放器 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIconFiles 12 | 13 | CFBundleIdentifier 14 | 15 | CFBundleInfoDictionaryVersion 16 | 6.0 17 | CFBundleName 18 | FFMPEG播放器 19 | CFBundlePackageType 20 | APPL 21 | CFBundleShortVersionString 22 | 1.0 23 | CFBundleSignature 24 | ???? 25 | CFBundleVersion 26 | 1.0 27 | LSRequiresIPhoneOS 28 | 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | UIInterfaceOrientationPortraitUpsideDown 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/TestPlayWithFFMPEGAndSDL-Prefix.pch: -------------------------------------------------------------------------------- 1 | // 2 | // Prefix header for all source files of the 'TestPlayWithFFMPEGAndSDL' target in the 'TestPlayWithFFMPEGAndSDL' project 3 | // 4 | 5 | #import 6 | 7 | #ifndef __IPHONE_4_0 8 | #warning "This project uses features only available in iOS SDK 4.0 and later." 9 | #endif 10 | 11 | #ifdef __OBJC__ 12 | #import 13 | #import 14 | #endif 15 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/en.lproj/FSVideoPlayViewController.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 1280 5 | 11C25 6 | 1919 7 | 1138.11 8 | 566.00 9 | 10 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 11 | 916 12 | 13 | 14 | IBProxyObject 15 | IBUIView 16 | 17 | 18 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 19 | 20 | 21 | PluginDependencyRecalculationVersion 22 | 23 | 24 | 25 | 26 | IBFilesOwner 27 | IBCocoaTouchFramework 28 | 29 | 30 | IBFirstResponder 31 | IBCocoaTouchFramework 32 | 33 | 34 | 35 | 274 36 | {{0, 20}, {320, 460}} 37 | 38 | 39 | 40 | 3 41 | MC43NQA 42 | 43 | 2 44 | 45 | 46 | NO 47 | 48 | IBCocoaTouchFramework 49 | 50 | 51 | 52 | 53 | 54 | 55 | view 56 | 57 | 58 | 59 | 7 60 | 61 | 62 | 63 | 64 | 65 | 0 66 | 67 | 68 | 69 | 70 | 71 | -1 72 | 73 | 74 | File's Owner 75 | 76 | 77 | -2 78 | 79 | 80 | 81 | 82 | 6 83 | 84 | 85 | 86 | 87 | 88 | 89 | FSVideoPlayViewController 90 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 91 | UIResponder 92 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 93 | com.apple.InterfaceBuilder.IBCocoaTouchPlugin 94 | 95 | 96 | 97 | 98 | 99 | 7 100 | 101 | 102 | 103 | 104 | FSVideoPlayViewController 105 | UIViewController 106 | 107 | IBProjectSource 108 | ./Classes/FSVideoPlayViewController.h 109 | 110 | 111 | 112 | 113 | 0 114 | IBCocoaTouchFramework 115 | YES 116 | 3 117 | 916 118 | 119 | 120 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/ffmpegclasses/cmdutils.c: -------------------------------------------------------------------------------- 1 | /* 2 | * Various utilities for command line tools 3 | * Copyright (c) 2000-2003 Fabrice Bellard 4 | * 5 | * This file is part of FFmpeg. 6 | * 7 | * FFmpeg is free software; you can redistribute it and/or 8 | * modify it under the terms of the GNU Lesser General Public 9 | * License as published by the Free Software Foundation; either 10 | * version 2.1 of the License, or (at your option) any later version. 11 | * 12 | * FFmpeg is distributed in the hope that it will be useful, 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 | * Lesser General Public License for more details. 16 | * 17 | * You should have received a copy of the GNU Lesser General Public 18 | * License along with FFmpeg; if not, write to the Free Software 19 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 20 | */ 21 | 22 | #include 23 | #include 24 | #include 25 | #include 26 | 27 | /* Include only the enabled headers since some compilers (namely, Sun 28 | Studio) will not omit unused inline functions and create undefined 29 | references to libraries that are not being built. */ 30 | 31 | #include "config.h" 32 | #include "compat/va_copy.h" 33 | #include "libavformat/avformat.h" 34 | #include "libavfilter/avfilter.h" 35 | #include "libavdevice/avdevice.h" 36 | #include "libavresample/avresample.h" 37 | #include "libswscale/swscale.h" 38 | #include "libswresample/swresample.h" 39 | #if CONFIG_POSTPROC 40 | #include "libpostproc/postprocess.h" 41 | #endif 42 | #include "libavutil/avassert.h" 43 | #include "libavutil/avstring.h" 44 | #include "libavutil/bprint.h" 45 | #include "libavutil/mathematics.h" 46 | #include "libavutil/imgutils.h" 47 | #include "libavutil/parseutils.h" 48 | #include "libavutil/pixdesc.h" 49 | #include "libavutil/eval.h" 50 | #include "libavutil/dict.h" 51 | #include "libavutil/opt.h" 52 | #include "cmdutils.h" 53 | #include "version.h" 54 | #if CONFIG_NETWORK 55 | #include "libavformat/network.h" 56 | #endif 57 | #if HAVE_SYS_RESOURCE_H 58 | #include 59 | #include 60 | #endif 61 | 62 | static int init_report(const char *env); 63 | 64 | struct SwsContext *sws_opts; 65 | SwrContext *swr_opts; 66 | AVDictionary *format_opts, *codec_opts; 67 | 68 | const int this_year = 2012; 69 | 70 | static FILE *report_file; 71 | 72 | void init_opts(void) 73 | { 74 | 75 | if(CONFIG_SWSCALE) 76 | sws_opts = sws_getContext(16, 16, 0, 16, 16, 0, SWS_BICUBIC, 77 | NULL, NULL, NULL); 78 | 79 | if(CONFIG_SWRESAMPLE) 80 | swr_opts = swr_alloc(); 81 | } 82 | 83 | void uninit_opts(void) 84 | { 85 | #if CONFIG_SWSCALE 86 | sws_freeContext(sws_opts); 87 | sws_opts = NULL; 88 | #endif 89 | 90 | if(CONFIG_SWRESAMPLE) 91 | swr_free(&swr_opts); 92 | 93 | av_dict_free(&format_opts); 94 | av_dict_free(&codec_opts); 95 | } 96 | 97 | void log_callback_help(void *ptr, int level, const char *fmt, va_list vl) 98 | { 99 | vfprintf(stdout, fmt, vl); 100 | } 101 | 102 | static void log_callback_report(void *ptr, int level, const char *fmt, va_list vl) 103 | { 104 | va_list vl2; 105 | char line[1024]; 106 | static int print_prefix = 1; 107 | 108 | va_copy(vl2, vl); 109 | av_log_default_callback(ptr, level, fmt, vl); 110 | av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix); 111 | va_end(vl2); 112 | fputs(line, report_file); 113 | fflush(report_file); 114 | } 115 | 116 | double parse_number_or_die(const char *context, const char *numstr, int type, 117 | double min, double max) 118 | { 119 | char *tail; 120 | const char *error; 121 | double d = av_strtod(numstr, &tail); 122 | if (*tail) 123 | error = "Expected number for %s but found: %s\n"; 124 | else if (d < min || d > max) 125 | error = "The value for %s was %s which is not within %f - %f\n"; 126 | else if (type == OPT_INT64 && (int64_t)d != d) 127 | error = "Expected int64 for %s but found %s\n"; 128 | else if (type == OPT_INT && (int)d != d) 129 | error = "Expected int for %s but found %s\n"; 130 | else 131 | return d; 132 | av_log(NULL, AV_LOG_FATAL, error, context, numstr, min, max); 133 | exit(1); 134 | return 0; 135 | } 136 | 137 | int64_t parse_time_or_die(const char *context, const char *timestr, 138 | int is_duration) 139 | { 140 | int64_t us; 141 | if (av_parse_time(&us, timestr, is_duration) < 0) { 142 | av_log(NULL, AV_LOG_FATAL, "Invalid %s specification for %s: %s\n", 143 | is_duration ? "duration" : "date", context, timestr); 144 | exit(1); 145 | } 146 | return us; 147 | } 148 | 149 | void show_help_options(const OptionDef *options, const char *msg, int req_flags, 150 | int rej_flags, int alt_flags) 151 | { 152 | const OptionDef *po; 153 | int first; 154 | 155 | first = 1; 156 | for (po = options; po->name != NULL; po++) { 157 | char buf[64]; 158 | 159 | if (((po->flags & req_flags) != req_flags) || 160 | (alt_flags && !(po->flags & alt_flags)) || 161 | (po->flags & rej_flags)) 162 | continue; 163 | 164 | if (first) { 165 | printf("%s\n", msg); 166 | first = 0; 167 | } 168 | av_strlcpy(buf, po->name, sizeof(buf)); 169 | if (po->argname) { 170 | av_strlcat(buf, " ", sizeof(buf)); 171 | av_strlcat(buf, po->argname, sizeof(buf)); 172 | } 173 | printf("-%-17s %s\n", buf, po->help); 174 | } 175 | printf("\n"); 176 | } 177 | 178 | void show_help_children(const AVClass *class, int flags) 179 | { 180 | const AVClass *child = NULL; 181 | if (class->option) { 182 | av_opt_show2(&class, NULL, flags, 0); 183 | printf("\n"); 184 | } 185 | 186 | while (child = av_opt_child_class_next(class, child)) 187 | show_help_children(child, flags); 188 | } 189 | 190 | static const OptionDef *find_option(const OptionDef *po, const char *name) 191 | { 192 | const char *p = strchr(name, ':'); 193 | int len = p ? p - name : strlen(name); 194 | 195 | while (po->name != NULL) { 196 | if (!strncmp(name, po->name, len) && strlen(po->name) == len) 197 | break; 198 | po++; 199 | } 200 | return po; 201 | } 202 | 203 | #if defined(_WIN32) && !defined(__MINGW32CE__) 204 | #include 205 | #include 206 | /* Will be leaked on exit */ 207 | static char** win32_argv_utf8 = NULL; 208 | static int win32_argc = 0; 209 | 210 | /** 211 | * Prepare command line arguments for executable. 212 | * For Windows - perform wide-char to UTF-8 conversion. 213 | * Input arguments should be main() function arguments. 214 | * @param argc_ptr Arguments number (including executable) 215 | * @param argv_ptr Arguments list. 216 | */ 217 | static void prepare_app_arguments(int *argc_ptr, char ***argv_ptr) 218 | { 219 | char *argstr_flat; 220 | wchar_t **argv_w; 221 | int i, buffsize = 0, offset = 0; 222 | 223 | if (win32_argv_utf8) { 224 | *argc_ptr = win32_argc; 225 | *argv_ptr = win32_argv_utf8; 226 | return; 227 | } 228 | 229 | win32_argc = 0; 230 | argv_w = CommandLineToArgvW(GetCommandLineW(), &win32_argc); 231 | if (win32_argc <= 0 || !argv_w) 232 | return; 233 | 234 | /* determine the UTF-8 buffer size (including NULL-termination symbols) */ 235 | for (i = 0; i < win32_argc; i++) 236 | buffsize += WideCharToMultiByte(CP_UTF8, 0, argv_w[i], -1, 237 | NULL, 0, NULL, NULL); 238 | 239 | win32_argv_utf8 = av_mallocz(sizeof(char *) * (win32_argc + 1) + buffsize); 240 | argstr_flat = (char *)win32_argv_utf8 + sizeof(char *) * (win32_argc + 1); 241 | if (win32_argv_utf8 == NULL) { 242 | LocalFree(argv_w); 243 | return; 244 | } 245 | 246 | for (i = 0; i < win32_argc; i++) { 247 | win32_argv_utf8[i] = &argstr_flat[offset]; 248 | offset += WideCharToMultiByte(CP_UTF8, 0, argv_w[i], -1, 249 | &argstr_flat[offset], 250 | buffsize - offset, NULL, NULL); 251 | } 252 | win32_argv_utf8[i] = NULL; 253 | LocalFree(argv_w); 254 | 255 | *argc_ptr = win32_argc; 256 | *argv_ptr = win32_argv_utf8; 257 | } 258 | #else 259 | static inline void prepare_app_arguments(int *argc_ptr, char ***argv_ptr) 260 | { 261 | /* nothing to do */ 262 | } 263 | #endif /* WIN32 && !__MINGW32CE__ */ 264 | 265 | int parse_option(void *optctx, const char *opt, const char *arg, 266 | const OptionDef *options) 267 | { 268 | const OptionDef *po; 269 | int bool_val = 1; 270 | int *dstcount; 271 | void *dst; 272 | 273 | po = find_option(options, opt); 274 | if (!po->name && opt[0] == 'n' && opt[1] == 'o') { 275 | /* handle 'no' bool option */ 276 | po = find_option(options, opt + 2); 277 | if ((po->name && (po->flags & OPT_BOOL))) 278 | bool_val = 0; 279 | } 280 | if (!po->name) 281 | po = find_option(options, "default"); 282 | if (!po->name) { 283 | av_log(NULL, AV_LOG_ERROR, "Unrecognized option '%s'\n", opt); 284 | return AVERROR(EINVAL); 285 | } 286 | if (po->flags & HAS_ARG && !arg) { 287 | av_log(NULL, AV_LOG_ERROR, "Missing argument for option '%s'\n", opt); 288 | return AVERROR(EINVAL); 289 | } 290 | 291 | /* new-style options contain an offset into optctx, old-style address of 292 | * a global var*/ 293 | dst = po->flags & (OPT_OFFSET | OPT_SPEC) ? (uint8_t *)optctx + po->u.off 294 | : po->u.dst_ptr; 295 | 296 | if (po->flags & OPT_SPEC) { 297 | SpecifierOpt **so = dst; 298 | char *p = strchr(opt, ':'); 299 | 300 | dstcount = (int *)(so + 1); 301 | *so = grow_array(*so, sizeof(**so), dstcount, *dstcount + 1); 302 | (*so)[*dstcount - 1].specifier = av_strdup(p ? p + 1 : ""); 303 | dst = &(*so)[*dstcount - 1].u; 304 | } 305 | 306 | if (po->flags & OPT_STRING) { 307 | char *str; 308 | str = av_strdup(arg); 309 | // av_freep(dst); 310 | *(char **)dst = str; 311 | } else if (po->flags & OPT_BOOL) { 312 | *(int *)dst = bool_val; 313 | } else if (po->flags & OPT_INT) { 314 | *(int *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX); 315 | } else if (po->flags & OPT_INT64) { 316 | *(int64_t *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT64_MIN, INT64_MAX); 317 | } else if (po->flags & OPT_TIME) { 318 | *(int64_t *)dst = parse_time_or_die(opt, arg, 1); 319 | } else if (po->flags & OPT_FLOAT) { 320 | *(float *)dst = parse_number_or_die(opt, arg, OPT_FLOAT, -INFINITY, INFINITY); 321 | } else if (po->flags & OPT_DOUBLE) { 322 | *(double *)dst = parse_number_or_die(opt, arg, OPT_DOUBLE, -INFINITY, INFINITY); 323 | } else if (po->u.func_arg) { 324 | int ret = po->u.func_arg(optctx, opt, arg); 325 | if (ret < 0) { 326 | av_log(NULL, AV_LOG_ERROR, 327 | "Failed to set value '%s' for option '%s'\n", arg, opt); 328 | return ret; 329 | } 330 | } 331 | if (po->flags & OPT_EXIT) 332 | exit(0); 333 | return !!(po->flags & HAS_ARG); 334 | } 335 | 336 | void parse_options(void *optctx, int argc, char **argv, const OptionDef *options, 337 | void (*parse_arg_function)(void *, const char*)) 338 | { 339 | const char *opt; 340 | int optindex, handleoptions = 1, ret; 341 | 342 | /* perform system-dependent conversions for arguments list */ 343 | prepare_app_arguments(&argc, &argv); 344 | 345 | /* parse options */ 346 | optindex = 1; 347 | while (optindex < argc) { 348 | opt = argv[optindex++]; 349 | 350 | if (handleoptions && opt[0] == '-' && opt[1] != '\0') { 351 | if (opt[1] == '-' && opt[2] == '\0') { 352 | handleoptions = 0; 353 | continue; 354 | } 355 | opt++; 356 | 357 | if ((ret = parse_option(optctx, opt, argv[optindex], options)) < 0) 358 | exit(1); 359 | optindex += ret; 360 | } else { 361 | if (parse_arg_function) 362 | parse_arg_function(optctx, opt); 363 | } 364 | } 365 | } 366 | 367 | int locate_option(int argc, char **argv, const OptionDef *options, 368 | const char *optname) 369 | { 370 | const OptionDef *po; 371 | int i; 372 | 373 | for (i = 1; i < argc; i++) { 374 | const char *cur_opt = argv[i]; 375 | 376 | if (*cur_opt++ != '-') 377 | continue; 378 | 379 | po = find_option(options, cur_opt); 380 | if (!po->name && cur_opt[0] == 'n' && cur_opt[1] == 'o') 381 | po = find_option(options, cur_opt + 2); 382 | 383 | if ((!po->name && !strcmp(cur_opt, optname)) || 384 | (po->name && !strcmp(optname, po->name))) 385 | return i; 386 | 387 | if (po->flags & HAS_ARG) 388 | i++; 389 | } 390 | return 0; 391 | } 392 | 393 | static void dump_argument(const char *a) 394 | { 395 | const unsigned char *p; 396 | 397 | for (p = a; *p; p++) 398 | if (!((*p >= '+' && *p <= ':') || (*p >= '@' && *p <= 'Z') || 399 | *p == '_' || (*p >= 'a' && *p <= 'z'))) 400 | break; 401 | if (!*p) { 402 | fputs(a, report_file); 403 | return; 404 | } 405 | fputc('"', report_file); 406 | for (p = a; *p; p++) { 407 | if (*p == '\\' || *p == '"' || *p == '$' || *p == '`') 408 | fprintf(report_file, "\\%c", *p); 409 | else if (*p < ' ' || *p > '~') 410 | fprintf(report_file, "\\x%02x", *p); 411 | else 412 | fputc(*p, report_file); 413 | } 414 | fputc('"', report_file); 415 | } 416 | 417 | void parse_loglevel(int argc, char **argv, const OptionDef *options) 418 | { 419 | int idx = locate_option(argc, argv, options, "loglevel"); 420 | const char *env; 421 | if (!idx) 422 | idx = locate_option(argc, argv, options, "v"); 423 | if (idx && argv[idx + 1]) 424 | opt_loglevel(NULL, "loglevel", argv[idx + 1]); 425 | idx = locate_option(argc, argv, options, "report"); 426 | if ((env = getenv("FFREPORT")) || idx) { 427 | init_report(env); 428 | if (report_file) { 429 | int i; 430 | fprintf(report_file, "Command line:\n"); 431 | for (i = 0; i < argc; i++) { 432 | dump_argument(argv[i]); 433 | fputc(i < argc - 1 ? ' ' : '\n', report_file); 434 | } 435 | fflush(report_file); 436 | } 437 | } 438 | } 439 | 440 | #define FLAGS (o->type == AV_OPT_TYPE_FLAGS) ? AV_DICT_APPEND : 0 441 | int opt_default(void *optctx, const char *opt, const char *arg) 442 | { 443 | const AVOption *o; 444 | int consumed = 0; 445 | char opt_stripped[128]; 446 | const char *p; 447 | const AVClass *cc = avcodec_get_class(), *fc = avformat_get_class(); 448 | const AVClass *sc, *swr_class; 449 | 450 | if (!(p = strchr(opt, ':'))) 451 | p = opt + strlen(opt); 452 | av_strlcpy(opt_stripped, opt, FFMIN(sizeof(opt_stripped), p - opt + 1)); 453 | 454 | if ((o = av_opt_find(&cc, opt_stripped, NULL, 0, 455 | AV_OPT_SEARCH_CHILDREN | AV_OPT_SEARCH_FAKE_OBJ)) || 456 | ((opt[0] == 'v' || opt[0] == 'a' || opt[0] == 's') && 457 | (o = av_opt_find(&cc, opt + 1, NULL, 0, AV_OPT_SEARCH_FAKE_OBJ)))) { 458 | av_dict_set(&codec_opts, opt, arg, FLAGS); 459 | consumed = 1; 460 | } 461 | if ((o = av_opt_find(&fc, opt, NULL, 0, 462 | AV_OPT_SEARCH_CHILDREN | AV_OPT_SEARCH_FAKE_OBJ))) { 463 | av_dict_set(&format_opts, opt, arg, FLAGS); 464 | if(consumed) 465 | av_log(NULL, AV_LOG_VERBOSE, "Routing %s to codec and muxer layer\n", opt); 466 | consumed = 1; 467 | } 468 | #if CONFIG_SWSCALE 469 | sc = sws_get_class(); 470 | if (!consumed && av_opt_find(&sc, opt, NULL, 0, 471 | AV_OPT_SEARCH_CHILDREN | AV_OPT_SEARCH_FAKE_OBJ)) { 472 | // XXX we only support sws_flags, not arbitrary sws options 473 | int ret = av_opt_set(sws_opts, opt, arg, 0); 474 | if (ret < 0) { 475 | av_log(NULL, AV_LOG_ERROR, "Error setting option %s.\n", opt); 476 | return ret; 477 | } 478 | consumed = 1; 479 | } 480 | #endif 481 | #if CONFIG_SWRESAMPLE 482 | swr_class = swr_get_class(); 483 | if (!consumed && av_opt_find(&swr_class, opt, NULL, 0, 484 | AV_OPT_SEARCH_CHILDREN | AV_OPT_SEARCH_FAKE_OBJ)) { 485 | int ret = av_opt_set(swr_opts, opt, arg, 0); 486 | if (ret < 0) { 487 | av_log(NULL, AV_LOG_ERROR, "Error setting option %s.\n", opt); 488 | return ret; 489 | } 490 | consumed = 1; 491 | } 492 | #endif 493 | 494 | if (consumed) 495 | return 0; 496 | av_log(NULL, AV_LOG_ERROR, "Unrecognized option '%s'\n", opt); 497 | return AVERROR_OPTION_NOT_FOUND; 498 | } 499 | 500 | int opt_loglevel(void *optctx, const char *opt, const char *arg) 501 | { 502 | const struct { const char *name; int level; } log_levels[] = { 503 | { "quiet" , AV_LOG_QUIET }, 504 | { "panic" , AV_LOG_PANIC }, 505 | { "fatal" , AV_LOG_FATAL }, 506 | { "error" , AV_LOG_ERROR }, 507 | { "warning", AV_LOG_WARNING }, 508 | { "info" , AV_LOG_INFO }, 509 | { "verbose", AV_LOG_VERBOSE }, 510 | { "debug" , AV_LOG_DEBUG }, 511 | }; 512 | char *tail; 513 | int level; 514 | int i; 515 | 516 | for (i = 0; i < FF_ARRAY_ELEMS(log_levels); i++) { 517 | if (!strcmp(log_levels[i].name, arg)) { 518 | av_log_set_level(log_levels[i].level); 519 | return 0; 520 | } 521 | } 522 | 523 | level = strtol(arg, &tail, 10); 524 | if (*tail) { 525 | av_log(NULL, AV_LOG_FATAL, "Invalid loglevel \"%s\". " 526 | "Possible levels are numbers or:\n", arg); 527 | for (i = 0; i < FF_ARRAY_ELEMS(log_levels); i++) 528 | av_log(NULL, AV_LOG_FATAL, "\"%s\"\n", log_levels[i].name); 529 | exit(1); 530 | } 531 | av_log_set_level(level); 532 | return 0; 533 | } 534 | 535 | static void expand_filename_template(AVBPrint *bp, const char *template, 536 | struct tm *tm) 537 | { 538 | int c; 539 | 540 | while ((c = *(template++))) { 541 | if (c == '%') { 542 | if (!(c = *(template++))) 543 | break; 544 | switch (c) { 545 | case 'p': 546 | av_bprintf(bp, "%s", program_name); 547 | break; 548 | case 't': 549 | av_bprintf(bp, "%04d%02d%02d-%02d%02d%02d", 550 | tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday, 551 | tm->tm_hour, tm->tm_min, tm->tm_sec); 552 | break; 553 | case '%': 554 | av_bprint_chars(bp, c, 1); 555 | break; 556 | } 557 | } else { 558 | av_bprint_chars(bp, c, 1); 559 | } 560 | } 561 | } 562 | 563 | static int init_report(const char *env) 564 | { 565 | char *filename_template = NULL; 566 | char *key, *val; 567 | int ret, count = 0; 568 | time_t now; 569 | struct tm *tm; 570 | AVBPrint filename; 571 | 572 | if (report_file) /* already opened */ 573 | return 0; 574 | time(&now); 575 | tm = localtime(&now); 576 | 577 | while (env && *env) { 578 | if ((ret = av_opt_get_key_value(&env, "=", ":", 0, &key, &val)) < 0) { 579 | if (count) 580 | av_log(NULL, AV_LOG_ERROR, 581 | "Failed to parse FFREPORT environment variable: %s\n", 582 | av_err2str(ret)); 583 | break; 584 | } 585 | if (*env) 586 | env++; 587 | count++; 588 | if (!strcmp(key, "file")) { 589 | av_free(filename_template); 590 | filename_template = val; 591 | val = NULL; 592 | } else { 593 | av_log(NULL, AV_LOG_ERROR, "Unknown key '%s' in FFREPORT\n", key); 594 | } 595 | av_free(val); 596 | av_free(key); 597 | } 598 | 599 | av_bprint_init(&filename, 0, 1); 600 | expand_filename_template(&filename, 601 | av_x_if_null(filename_template, "%p-%t.log"), tm); 602 | av_free(filename_template); 603 | if (!av_bprint_is_complete(&filename)) { 604 | av_log(NULL, AV_LOG_ERROR, "Out of memory building report file name\n"); 605 | return AVERROR(ENOMEM); 606 | } 607 | 608 | report_file = fopen(filename.str, "w"); 609 | if (!report_file) { 610 | av_log(NULL, AV_LOG_ERROR, "Failed to open report \"%s\": %s\n", 611 | filename.str, strerror(errno)); 612 | return AVERROR(errno); 613 | } 614 | av_log_set_callback(log_callback_report); 615 | av_log(NULL, AV_LOG_INFO, 616 | "%s started on %04d-%02d-%02d at %02d:%02d:%02d\n" 617 | "Report written to \"%s\"\n", 618 | program_name, 619 | tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday, 620 | tm->tm_hour, tm->tm_min, tm->tm_sec, 621 | filename.str); 622 | av_log_set_level(FFMAX(av_log_get_level(), AV_LOG_VERBOSE)); 623 | av_bprint_finalize(&filename, NULL); 624 | return 0; 625 | } 626 | 627 | int opt_report(const char *opt) 628 | { 629 | return init_report(NULL); 630 | } 631 | 632 | int opt_max_alloc(void *optctx, const char *opt, const char *arg) 633 | { 634 | char *tail; 635 | size_t max; 636 | 637 | max = strtol(arg, &tail, 10); 638 | if (*tail) { 639 | av_log(NULL, AV_LOG_FATAL, "Invalid max_alloc \"%s\".\n", arg); 640 | exit(1); 641 | } 642 | av_max_alloc(max); 643 | return 0; 644 | } 645 | 646 | int opt_cpuflags(void *optctx, const char *opt, const char *arg) 647 | { 648 | int ret; 649 | unsigned flags = av_get_cpu_flags(); 650 | 651 | if ((ret = av_parse_cpu_caps(&flags, arg)) < 0) 652 | return ret; 653 | 654 | av_force_cpu_flags(flags); 655 | return 0; 656 | } 657 | 658 | int opt_codec_debug(void *optctx, const char *opt, const char *arg) 659 | { 660 | av_log_set_level(AV_LOG_DEBUG); 661 | return opt_default(NULL, opt, arg); 662 | } 663 | 664 | int opt_timelimit(void *optctx, const char *opt, const char *arg) 665 | { 666 | #if HAVE_SETRLIMIT 667 | int lim = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX); 668 | struct rlimit rl = { lim, lim + 1 }; 669 | if (setrlimit(RLIMIT_CPU, &rl)) 670 | perror("setrlimit"); 671 | #else 672 | av_log(NULL, AV_LOG_WARNING, "-%s not implemented on this OS\n", opt); 673 | #endif 674 | return 0; 675 | } 676 | 677 | void print_error(const char *filename, int err) 678 | { 679 | char errbuf[128]; 680 | const char *errbuf_ptr = errbuf; 681 | 682 | if (av_strerror(err, errbuf, sizeof(errbuf)) < 0) 683 | errbuf_ptr = strerror(AVUNERROR(err)); 684 | av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, errbuf_ptr); 685 | } 686 | 687 | static int warned_cfg = 0; 688 | 689 | #define INDENT 1 690 | #define SHOW_VERSION 2 691 | #define SHOW_CONFIG 4 692 | #define SHOW_COPYRIGHT 8 693 | 694 | #define PRINT_LIB_INFO(libname, LIBNAME, flags, level) \ 695 | if (CONFIG_##LIBNAME) { \ 696 | const char *indent = flags & INDENT? " " : ""; \ 697 | if (flags & SHOW_VERSION) { \ 698 | unsigned int version = libname##_version(); \ 699 | av_log(NULL, level, \ 700 | "%slib%-11s %2d.%3d.%3d / %2d.%3d.%3d\n", \ 701 | indent, #libname, \ 702 | LIB##LIBNAME##_VERSION_MAJOR, \ 703 | LIB##LIBNAME##_VERSION_MINOR, \ 704 | LIB##LIBNAME##_VERSION_MICRO, \ 705 | version >> 16, version >> 8 & 0xff, version & 0xff); \ 706 | } \ 707 | if (flags & SHOW_CONFIG) { \ 708 | const char *cfg = libname##_configuration(); \ 709 | if (strcmp(FFMPEG_CONFIGURATION, cfg)) { \ 710 | if (!warned_cfg) { \ 711 | av_log(NULL, level, \ 712 | "%sWARNING: library configuration mismatch\n", \ 713 | indent); \ 714 | warned_cfg = 1; \ 715 | } \ 716 | av_log(NULL, level, "%s%-11s configuration: %s\n", \ 717 | indent, #libname, cfg); \ 718 | } \ 719 | } \ 720 | } \ 721 | 722 | static void print_all_libs_info(int flags, int level) 723 | { 724 | PRINT_LIB_INFO(avutil, AVUTIL, flags, level); 725 | PRINT_LIB_INFO(avcodec, AVCODEC, flags, level); 726 | PRINT_LIB_INFO(avformat, AVFORMAT, flags, level); 727 | PRINT_LIB_INFO(avdevice, AVDEVICE, flags, level); 728 | PRINT_LIB_INFO(avfilter, AVFILTER, flags, level); 729 | // PRINT_LIB_INFO(avresample, AVRESAMPLE, flags, level); 730 | PRINT_LIB_INFO(swscale, SWSCALE, flags, level); 731 | PRINT_LIB_INFO(swresample,SWRESAMPLE, flags, level); 732 | #if CONFIG_POSTPROC 733 | PRINT_LIB_INFO(postproc, POSTPROC, flags, level); 734 | #endif 735 | } 736 | 737 | static void print_program_info(int flags, int level) 738 | { 739 | const char *indent = flags & INDENT? " " : ""; 740 | 741 | av_log(NULL, level, "%s version " FFMPEG_VERSION, program_name); 742 | if (flags & SHOW_COPYRIGHT) 743 | av_log(NULL, level, " Copyright (c) %d-%d the FFmpeg developers", 744 | program_birth_year, this_year); 745 | av_log(NULL, level, "\n"); 746 | av_log(NULL, level, "%sbuilt on %s %s with %s\n", 747 | indent, __DATE__, __TIME__, CC_IDENT); 748 | 749 | av_log(NULL, level, "%sconfiguration: " FFMPEG_CONFIGURATION "\n", indent); 750 | } 751 | 752 | void show_banner(int argc, char **argv, const OptionDef *options) 753 | { 754 | int idx = locate_option(argc, argv, options, "version"); 755 | if (idx) 756 | return; 757 | 758 | print_program_info (INDENT|SHOW_COPYRIGHT, AV_LOG_INFO); 759 | print_all_libs_info(INDENT|SHOW_CONFIG, AV_LOG_INFO); 760 | print_all_libs_info(INDENT|SHOW_VERSION, AV_LOG_INFO); 761 | } 762 | 763 | int show_version(void *optctx, const char *opt, const char *arg) 764 | { 765 | av_log_set_callback(log_callback_help); 766 | print_program_info (0 , AV_LOG_INFO); 767 | print_all_libs_info(SHOW_VERSION, AV_LOG_INFO); 768 | 769 | return 0; 770 | } 771 | 772 | int show_license(void *optctx, const char *opt, const char *arg) 773 | { 774 | #if CONFIG_NONFREE 775 | printf( 776 | "This version of %s has nonfree parts compiled in.\n" 777 | "Therefore it is not legally redistributable.\n", 778 | program_name ); 779 | #elif CONFIG_GPLV3 780 | printf( 781 | "%s is free software; you can redistribute it and/or modify\n" 782 | "it under the terms of the GNU General Public License as published by\n" 783 | "the Free Software Foundation; either version 3 of the License, or\n" 784 | "(at your option) any later version.\n" 785 | "\n" 786 | "%s is distributed in the hope that it will be useful,\n" 787 | "but WITHOUT ANY WARRANTY; without even the implied warranty of\n" 788 | "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" 789 | "GNU General Public License for more details.\n" 790 | "\n" 791 | "You should have received a copy of the GNU General Public License\n" 792 | "along with %s. If not, see .\n", 793 | program_name, program_name, program_name ); 794 | #elif CONFIG_GPL 795 | printf( 796 | "%s is free software; you can redistribute it and/or modify\n" 797 | "it under the terms of the GNU General Public License as published by\n" 798 | "the Free Software Foundation; either version 2 of the License, or\n" 799 | "(at your option) any later version.\n" 800 | "\n" 801 | "%s is distributed in the hope that it will be useful,\n" 802 | "but WITHOUT ANY WARRANTY; without even the implied warranty of\n" 803 | "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" 804 | "GNU General Public License for more details.\n" 805 | "\n" 806 | "You should have received a copy of the GNU General Public License\n" 807 | "along with %s; if not, write to the Free Software\n" 808 | "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n", 809 | program_name, program_name, program_name ); 810 | #elif CONFIG_LGPLV3 811 | printf( 812 | "%s is free software; you can redistribute it and/or modify\n" 813 | "it under the terms of the GNU Lesser General Public License as published by\n" 814 | "the Free Software Foundation; either version 3 of the License, or\n" 815 | "(at your option) any later version.\n" 816 | "\n" 817 | "%s is distributed in the hope that it will be useful,\n" 818 | "but WITHOUT ANY WARRANTY; without even the implied warranty of\n" 819 | "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" 820 | "GNU Lesser General Public License for more details.\n" 821 | "\n" 822 | "You should have received a copy of the GNU Lesser General Public License\n" 823 | "along with %s. If not, see .\n", 824 | program_name, program_name, program_name ); 825 | #else 826 | printf( 827 | "%s is free software; you can redistribute it and/or\n" 828 | "modify it under the terms of the GNU Lesser General Public\n" 829 | "License as published by the Free Software Foundation; either\n" 830 | "version 2.1 of the License, or (at your option) any later version.\n" 831 | "\n" 832 | "%s is distributed in the hope that it will be useful,\n" 833 | "but WITHOUT ANY WARRANTY; without even the implied warranty of\n" 834 | "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n" 835 | "Lesser General Public License for more details.\n" 836 | "\n" 837 | "You should have received a copy of the GNU Lesser General Public\n" 838 | "License along with %s; if not, write to the Free Software\n" 839 | "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n", 840 | program_name, program_name, program_name ); 841 | #endif 842 | 843 | return 0; 844 | } 845 | 846 | int show_formats(void *optctx, const char *opt, const char *arg) 847 | { 848 | AVInputFormat *ifmt = NULL; 849 | AVOutputFormat *ofmt = NULL; 850 | const char *last_name; 851 | 852 | printf("File formats:\n" 853 | " D. = Demuxing supported\n" 854 | " .E = Muxing supported\n" 855 | " --\n"); 856 | last_name = "000"; 857 | for (;;) { 858 | int decode = 0; 859 | int encode = 0; 860 | const char *name = NULL; 861 | const char *long_name = NULL; 862 | 863 | while ((ofmt = av_oformat_next(ofmt))) { 864 | if ((name == NULL || strcmp(ofmt->name, name) < 0) && 865 | strcmp(ofmt->name, last_name) > 0) { 866 | name = ofmt->name; 867 | long_name = ofmt->long_name; 868 | encode = 1; 869 | } 870 | } 871 | while ((ifmt = av_iformat_next(ifmt))) { 872 | if ((name == NULL || strcmp(ifmt->name, name) < 0) && 873 | strcmp(ifmt->name, last_name) > 0) { 874 | name = ifmt->name; 875 | long_name = ifmt->long_name; 876 | encode = 0; 877 | } 878 | if (name && strcmp(ifmt->name, name) == 0) 879 | decode = 1; 880 | } 881 | if (name == NULL) 882 | break; 883 | last_name = name; 884 | 885 | printf(" %s%s %-15s %s\n", 886 | decode ? "D" : " ", 887 | encode ? "E" : " ", 888 | name, 889 | long_name ? long_name:" "); 890 | } 891 | return 0; 892 | } 893 | 894 | #define PRINT_CODEC_SUPPORTED(codec, field, type, list_name, term, get_name) \ 895 | if (codec->field) { \ 896 | const type *p = codec->field; \ 897 | \ 898 | printf(" Supported " list_name ":"); \ 899 | while (*p != term) { \ 900 | get_name(*p); \ 901 | printf(" %s", name); \ 902 | p++; \ 903 | } \ 904 | printf("\n"); \ 905 | } \ 906 | 907 | static void print_codec(const AVCodec *c) 908 | { 909 | int encoder = av_codec_is_encoder(c); 910 | 911 | printf("%s %s [%s]:\n", encoder ? "Encoder" : "Decoder", c->name, 912 | c->long_name ? c->long_name : ""); 913 | 914 | if (c->type == AVMEDIA_TYPE_VIDEO) { 915 | printf(" Threading capabilities: "); 916 | switch (c->capabilities & (CODEC_CAP_FRAME_THREADS | 917 | CODEC_CAP_SLICE_THREADS)) { 918 | case CODEC_CAP_FRAME_THREADS | 919 | CODEC_CAP_SLICE_THREADS: printf("frame and slice"); break; 920 | case CODEC_CAP_FRAME_THREADS: printf("frame"); break; 921 | case CODEC_CAP_SLICE_THREADS: printf("slice"); break; 922 | default: printf("no"); break; 923 | } 924 | printf("\n"); 925 | } 926 | 927 | if (c->supported_framerates) { 928 | const AVRational *fps = c->supported_framerates; 929 | 930 | printf(" Supported framerates:"); 931 | while (fps->num) { 932 | printf(" %d/%d", fps->num, fps->den); 933 | fps++; 934 | } 935 | printf("\n"); 936 | } 937 | PRINT_CODEC_SUPPORTED(c, pix_fmts, enum AVPixelFormat, "pixel formats", 938 | AV_PIX_FMT_NONE, GET_PIX_FMT_NAME); 939 | PRINT_CODEC_SUPPORTED(c, supported_samplerates, int, "sample rates", 0, 940 | GET_SAMPLE_RATE_NAME); 941 | PRINT_CODEC_SUPPORTED(c, sample_fmts, enum AVSampleFormat, "sample formats", 942 | AV_SAMPLE_FMT_NONE, GET_SAMPLE_FMT_NAME); 943 | PRINT_CODEC_SUPPORTED(c, channel_layouts, uint64_t, "channel layouts", 944 | 0, GET_CH_LAYOUT_DESC); 945 | 946 | if (c->priv_class) { 947 | show_help_children(c->priv_class, 948 | AV_OPT_FLAG_ENCODING_PARAM | 949 | AV_OPT_FLAG_DECODING_PARAM); 950 | } 951 | } 952 | 953 | static char get_media_type_char(enum AVMediaType type) 954 | { 955 | switch (type) { 956 | case AVMEDIA_TYPE_VIDEO: return 'V'; 957 | case AVMEDIA_TYPE_AUDIO: return 'A'; 958 | case AVMEDIA_TYPE_DATA: return 'D'; 959 | case AVMEDIA_TYPE_SUBTITLE: return 'S'; 960 | case AVMEDIA_TYPE_ATTACHMENT:return 'T'; 961 | default: return '?'; 962 | } 963 | } 964 | 965 | static const AVCodec *next_codec_for_id(enum AVCodecID id, const AVCodec *prev, 966 | int encoder) 967 | { 968 | while ((prev = av_codec_next(prev))) { 969 | if (prev->id == id && 970 | (encoder ? av_codec_is_encoder(prev) : av_codec_is_decoder(prev))) 971 | return prev; 972 | } 973 | return NULL; 974 | } 975 | 976 | static int compare_codec_desc(const void *a, const void *b) 977 | { 978 | const AVCodecDescriptor * const *da = a; 979 | const AVCodecDescriptor * const *db = b; 980 | 981 | return (*da)->type != (*db)->type ? (*da)->type - (*db)->type : 982 | strcmp((*da)->name, (*db)->name); 983 | } 984 | 985 | static unsigned get_codecs_sorted(const AVCodecDescriptor ***rcodecs) 986 | { 987 | const AVCodecDescriptor *desc = NULL; 988 | const AVCodecDescriptor **codecs; 989 | unsigned nb_codecs = 0, i = 0; 990 | 991 | while ((desc = avcodec_descriptor_next(desc))) 992 | nb_codecs++; 993 | if (!(codecs = av_calloc(nb_codecs, sizeof(*codecs)))) { 994 | av_log(0, AV_LOG_ERROR, "Out of memory\n"); 995 | exit(1); 996 | } 997 | desc = NULL; 998 | while ((desc = avcodec_descriptor_next(desc))) 999 | codecs[i++] = desc; 1000 | av_assert0(i == nb_codecs); 1001 | qsort(codecs, nb_codecs, sizeof(*codecs), compare_codec_desc); 1002 | *rcodecs = codecs; 1003 | return nb_codecs; 1004 | } 1005 | 1006 | static void print_codecs_for_id(enum AVCodecID id, int encoder) 1007 | { 1008 | const AVCodec *codec = NULL; 1009 | 1010 | printf(" (%s: ", encoder ? "encoders" : "decoders"); 1011 | 1012 | while ((codec = next_codec_for_id(id, codec, encoder))) 1013 | printf("%s ", codec->name); 1014 | 1015 | printf(")"); 1016 | } 1017 | 1018 | int show_codecs(void *optctx, const char *opt, const char *arg) 1019 | { 1020 | const AVCodecDescriptor **codecs; 1021 | unsigned i, nb_codecs = get_codecs_sorted(&codecs); 1022 | 1023 | printf("Codecs:\n" 1024 | " D..... = Decoding supported\n" 1025 | " .E.... = Encoding supported\n" 1026 | " ..V... = Video codec\n" 1027 | " ..A... = Audio codec\n" 1028 | " ..S... = Subtitle codec\n" 1029 | " ...I.. = Intra frame-only codec\n" 1030 | " ....L. = Lossy compression\n" 1031 | " .....S = Lossless compression\n" 1032 | " -------\n"); 1033 | for (i = 0; i < nb_codecs; i++) { 1034 | const AVCodecDescriptor *desc = codecs[i]; 1035 | const AVCodec *codec = NULL; 1036 | 1037 | printf(" "); 1038 | printf(avcodec_find_decoder(desc->id) ? "D" : "."); 1039 | printf(avcodec_find_encoder(desc->id) ? "E" : "."); 1040 | 1041 | printf("%c", get_media_type_char(desc->type)); 1042 | printf((desc->props & AV_CODEC_PROP_INTRA_ONLY) ? "I" : "."); 1043 | printf((desc->props & AV_CODEC_PROP_LOSSY) ? "L" : "."); 1044 | printf((desc->props & AV_CODEC_PROP_LOSSLESS) ? "S" : "."); 1045 | 1046 | printf(" %-20s %s", desc->name, desc->long_name ? desc->long_name : ""); 1047 | 1048 | /* print decoders/encoders when there's more than one or their 1049 | * names are different from codec name */ 1050 | while ((codec = next_codec_for_id(desc->id, codec, 0))) { 1051 | if (strcmp(codec->name, desc->name)) { 1052 | print_codecs_for_id(desc->id, 0); 1053 | break; 1054 | } 1055 | } 1056 | codec = NULL; 1057 | while ((codec = next_codec_for_id(desc->id, codec, 1))) { 1058 | if (strcmp(codec->name, desc->name)) { 1059 | print_codecs_for_id(desc->id, 1); 1060 | break; 1061 | } 1062 | } 1063 | 1064 | printf("\n"); 1065 | } 1066 | av_free(codecs); 1067 | return 0; 1068 | } 1069 | 1070 | static void print_codecs(int encoder) 1071 | { 1072 | const AVCodecDescriptor **codecs; 1073 | unsigned i, nb_codecs = get_codecs_sorted(&codecs); 1074 | 1075 | printf("%s:\n" 1076 | " V..... = Video\n" 1077 | " A..... = Audio\n" 1078 | " S..... = Subtitle\n" 1079 | " .F.... = Frame-level multithreading\n" 1080 | " ..S... = Slice-level multithreading\n" 1081 | " ...X.. = Codec is experimental\n" 1082 | " ....B. = Supports draw_horiz_band\n" 1083 | " .....D = Supports direct rendering method 1\n" 1084 | " ------\n", 1085 | encoder ? "Encoders" : "Decoders"); 1086 | for (i = 0; i < nb_codecs; i++) { 1087 | const AVCodecDescriptor *desc = codecs[i]; 1088 | const AVCodec *codec = NULL; 1089 | 1090 | while ((codec = next_codec_for_id(desc->id, codec, encoder))) { 1091 | printf(" %c", get_media_type_char(desc->type)); 1092 | printf((codec->capabilities & CODEC_CAP_FRAME_THREADS) ? "F" : "."); 1093 | printf((codec->capabilities & CODEC_CAP_SLICE_THREADS) ? "S" : "."); 1094 | printf((codec->capabilities & CODEC_CAP_EXPERIMENTAL) ? "X" : "."); 1095 | printf((codec->capabilities & CODEC_CAP_DRAW_HORIZ_BAND)?"B" : "."); 1096 | printf((codec->capabilities & CODEC_CAP_DR1) ? "D" : "."); 1097 | 1098 | printf(" %-20s %s", codec->name, codec->long_name ? codec->long_name : ""); 1099 | if (strcmp(codec->name, desc->name)) 1100 | printf(" (codec %s)", desc->name); 1101 | 1102 | printf("\n"); 1103 | } 1104 | } 1105 | av_free(codecs); 1106 | } 1107 | 1108 | int show_decoders(void *optctx, const char *opt, const char *arg) 1109 | { 1110 | print_codecs(0); 1111 | return 0; 1112 | } 1113 | 1114 | int show_encoders(void *optctx, const char *opt, const char *arg) 1115 | { 1116 | print_codecs(1); 1117 | return 0; 1118 | } 1119 | 1120 | int show_bsfs(void *optctx, const char *opt, const char *arg) 1121 | { 1122 | AVBitStreamFilter *bsf = NULL; 1123 | 1124 | printf("Bitstream filters:\n"); 1125 | while ((bsf = av_bitstream_filter_next(bsf))) 1126 | printf("%s\n", bsf->name); 1127 | printf("\n"); 1128 | return 0; 1129 | } 1130 | 1131 | int show_protocols(void *optctx, const char *opt, const char *arg) 1132 | { 1133 | void *opaque = NULL; 1134 | const char *name; 1135 | 1136 | printf("Supported file protocols:\n" 1137 | "Input:\n"); 1138 | while ((name = avio_enum_protocols(&opaque, 0))) 1139 | printf("%s\n", name); 1140 | printf("Output:\n"); 1141 | while ((name = avio_enum_protocols(&opaque, 1))) 1142 | printf("%s\n", name); 1143 | return 0; 1144 | } 1145 | 1146 | int show_filters(void *optctx, const char *opt, const char *arg) 1147 | { 1148 | AVFilter av_unused(**filter) = NULL; 1149 | char descr[64], *descr_cur; 1150 | int i, j; 1151 | const AVFilterPad *pad; 1152 | 1153 | printf("Filters:\n"); 1154 | #if CONFIG_AVFILTER 1155 | while ((filter = av_filter_next(filter)) && *filter) { 1156 | descr_cur = descr; 1157 | for (i = 0; i < 2; i++) { 1158 | if (i) { 1159 | *(descr_cur++) = '-'; 1160 | *(descr_cur++) = '>'; 1161 | } 1162 | pad = i ? (*filter)->outputs : (*filter)->inputs; 1163 | for (j = 0; pad && pad[j].name; j++) { 1164 | if (descr_cur >= descr + sizeof(descr) - 4) 1165 | break; 1166 | *(descr_cur++) = get_media_type_char(pad[j].type); 1167 | } 1168 | if (!j) 1169 | *(descr_cur++) = '|'; 1170 | } 1171 | *descr_cur = 0; 1172 | printf("%-16s %-10s %s\n", (*filter)->name, descr, (*filter)->description); 1173 | } 1174 | #endif 1175 | return 0; 1176 | } 1177 | 1178 | int show_pix_fmts(void *optctx, const char *opt, const char *arg) 1179 | { 1180 | const AVPixFmtDescriptor *pix_desc = NULL; 1181 | 1182 | printf("Pixel formats:\n" 1183 | "I.... = Supported Input format for conversion\n" 1184 | ".O... = Supported Output format for conversion\n" 1185 | "..H.. = Hardware accelerated format\n" 1186 | "...P. = Paletted format\n" 1187 | "....B = Bitstream format\n" 1188 | "FLAGS NAME NB_COMPONENTS BITS_PER_PIXEL\n" 1189 | "-----\n"); 1190 | 1191 | #if !CONFIG_SWSCALE 1192 | # define sws_isSupportedInput(x) 0 1193 | # define sws_isSupportedOutput(x) 0 1194 | #endif 1195 | 1196 | while ((pix_desc = av_pix_fmt_desc_next(pix_desc))) { 1197 | enum AVPixelFormat pix_fmt = av_pix_fmt_desc_get_id(pix_desc); 1198 | if(!pix_desc->name) 1199 | continue; 1200 | printf("%c%c%c%c%c %-16s %d %2d\n", 1201 | sws_isSupportedInput (pix_fmt) ? 'I' : '.', 1202 | sws_isSupportedOutput(pix_fmt) ? 'O' : '.', 1203 | pix_desc->flags & PIX_FMT_HWACCEL ? 'H' : '.', 1204 | pix_desc->flags & PIX_FMT_PAL ? 'P' : '.', 1205 | pix_desc->flags & PIX_FMT_BITSTREAM ? 'B' : '.', 1206 | pix_desc->name, 1207 | pix_desc->nb_components, 1208 | av_get_bits_per_pixel(pix_desc)); 1209 | } 1210 | return 0; 1211 | } 1212 | 1213 | int show_layouts(void *optctx, const char *opt, const char *arg) 1214 | { 1215 | int i = 0; 1216 | uint64_t layout, j; 1217 | const char *name, *descr; 1218 | 1219 | printf("Individual channels:\n" 1220 | "NAME DESCRIPTION\n"); 1221 | for (i = 0; i < 63; i++) { 1222 | name = av_get_channel_name((uint64_t)1 << i); 1223 | if (!name) 1224 | continue; 1225 | descr = av_get_channel_description((uint64_t)1 << i); 1226 | printf("%-12s%s\n", name, descr); 1227 | } 1228 | printf("\nStandard channel layouts:\n" 1229 | "NAME DECOMPOSITION\n"); 1230 | for (i = 0; !av_get_standard_channel_layout(i, &layout, &name); i++) { 1231 | if (name) { 1232 | printf("%-12s", name); 1233 | for (j = 1; j; j <<= 1) 1234 | if ((layout & j)) 1235 | printf("%s%s", (layout & (j - 1)) ? "+" : "", av_get_channel_name(j)); 1236 | printf("\n"); 1237 | } 1238 | } 1239 | return 0; 1240 | } 1241 | 1242 | int show_sample_fmts(void *optctx, const char *opt, const char *arg) 1243 | { 1244 | int i; 1245 | char fmt_str[128]; 1246 | for (i = -1; i < AV_SAMPLE_FMT_NB; i++) 1247 | printf("%s\n", av_get_sample_fmt_string(fmt_str, sizeof(fmt_str), i)); 1248 | return 0; 1249 | } 1250 | 1251 | static void show_help_codec(const char *name, int encoder) 1252 | { 1253 | const AVCodecDescriptor *desc; 1254 | const AVCodec *codec; 1255 | 1256 | if (!name) { 1257 | av_log(NULL, AV_LOG_ERROR, "No codec name specified.\n"); 1258 | return; 1259 | } 1260 | 1261 | codec = encoder ? avcodec_find_encoder_by_name(name) : 1262 | avcodec_find_decoder_by_name(name); 1263 | 1264 | if (codec) 1265 | print_codec(codec); 1266 | else if ((desc = avcodec_descriptor_get_by_name(name))) { 1267 | int printed = 0; 1268 | 1269 | while ((codec = next_codec_for_id(desc->id, codec, encoder))) { 1270 | printed = 1; 1271 | print_codec(codec); 1272 | } 1273 | 1274 | if (!printed) { 1275 | av_log(NULL, AV_LOG_ERROR, "Codec '%s' is known to FFmpeg, " 1276 | "but no %s for it are available. FFmpeg might need to be " 1277 | "recompiled with additional external libraries.\n", 1278 | name, encoder ? "encoders" : "decoders"); 1279 | } 1280 | } else { 1281 | av_log(NULL, AV_LOG_ERROR, "Codec '%s' is not recognized by FFmpeg.\n", 1282 | name); 1283 | } 1284 | } 1285 | 1286 | static void show_help_demuxer(const char *name) 1287 | { 1288 | const AVInputFormat *fmt = av_find_input_format(name); 1289 | 1290 | if (!fmt) { 1291 | av_log(NULL, AV_LOG_ERROR, "Unknown format '%s'.\n", name); 1292 | return; 1293 | } 1294 | 1295 | printf("Demuxer %s [%s]:\n", fmt->name, fmt->long_name); 1296 | 1297 | if (fmt->extensions) 1298 | printf(" Common extensions: %s.\n", fmt->extensions); 1299 | 1300 | if (fmt->priv_class) 1301 | show_help_children(fmt->priv_class, AV_OPT_FLAG_DECODING_PARAM); 1302 | } 1303 | 1304 | static void show_help_muxer(const char *name) 1305 | { 1306 | const AVCodecDescriptor *desc; 1307 | const AVOutputFormat *fmt = av_guess_format(name, NULL, NULL); 1308 | 1309 | if (!fmt) { 1310 | av_log(NULL, AV_LOG_ERROR, "Unknown format '%s'.\n", name); 1311 | return; 1312 | } 1313 | 1314 | printf("Muxer %s [%s]:\n", fmt->name, fmt->long_name); 1315 | 1316 | if (fmt->extensions) 1317 | printf(" Common extensions: %s.\n", fmt->extensions); 1318 | if (fmt->mime_type) 1319 | printf(" Mime type: %s.\n", fmt->mime_type); 1320 | if (fmt->video_codec != AV_CODEC_ID_NONE && 1321 | (desc = avcodec_descriptor_get(fmt->video_codec))) { 1322 | printf(" Default video codec: %s.\n", desc->name); 1323 | } 1324 | if (fmt->audio_codec != AV_CODEC_ID_NONE && 1325 | (desc = avcodec_descriptor_get(fmt->audio_codec))) { 1326 | printf(" Default audio codec: %s.\n", desc->name); 1327 | } 1328 | if (fmt->subtitle_codec != AV_CODEC_ID_NONE && 1329 | (desc = avcodec_descriptor_get(fmt->subtitle_codec))) { 1330 | printf(" Default subtitle codec: %s.\n", desc->name); 1331 | } 1332 | 1333 | if (fmt->priv_class) 1334 | show_help_children(fmt->priv_class, AV_OPT_FLAG_ENCODING_PARAM); 1335 | } 1336 | 1337 | int show_help(void *optctx, const char *opt, const char *arg) 1338 | { 1339 | char *topic, *par; 1340 | av_log_set_callback(log_callback_help); 1341 | 1342 | topic = av_strdup(arg ? arg : ""); 1343 | par = strchr(topic, '='); 1344 | if (par) 1345 | *par++ = 0; 1346 | 1347 | if (!*topic) { 1348 | show_help_default(topic, par); 1349 | } else if (!strcmp(topic, "decoder")) { 1350 | show_help_codec(par, 0); 1351 | } else if (!strcmp(topic, "encoder")) { 1352 | show_help_codec(par, 1); 1353 | } else if (!strcmp(topic, "demuxer")) { 1354 | show_help_demuxer(par); 1355 | } else if (!strcmp(topic, "muxer")) { 1356 | show_help_muxer(par); 1357 | } else { 1358 | show_help_default(topic, par); 1359 | } 1360 | 1361 | av_freep(&topic); 1362 | return 0; 1363 | } 1364 | 1365 | int read_yesno(void) 1366 | { 1367 | int c = getchar(); 1368 | int yesno = (toupper(c) == 'Y'); 1369 | 1370 | while (c != '\n' && c != EOF) 1371 | c = getchar(); 1372 | 1373 | return yesno; 1374 | } 1375 | 1376 | int cmdutils_read_file(const char *filename, char **bufptr, size_t *size) 1377 | { 1378 | int ret; 1379 | FILE *f = fopen(filename, "rb"); 1380 | 1381 | if (!f) { 1382 | av_log(NULL, AV_LOG_ERROR, "Cannot read file '%s': %s\n", filename, 1383 | strerror(errno)); 1384 | return AVERROR(errno); 1385 | } 1386 | fseek(f, 0, SEEK_END); 1387 | *size = ftell(f); 1388 | fseek(f, 0, SEEK_SET); 1389 | if (*size == (size_t)-1) { 1390 | av_log(NULL, AV_LOG_ERROR, "IO error: %s\n", strerror(errno)); 1391 | fclose(f); 1392 | return AVERROR(errno); 1393 | } 1394 | *bufptr = av_malloc(*size + 1); 1395 | if (!*bufptr) { 1396 | av_log(NULL, AV_LOG_ERROR, "Could not allocate file buffer\n"); 1397 | fclose(f); 1398 | return AVERROR(ENOMEM); 1399 | } 1400 | ret = fread(*bufptr, 1, *size, f); 1401 | if (ret < *size) { 1402 | av_free(*bufptr); 1403 | if (ferror(f)) { 1404 | av_log(NULL, AV_LOG_ERROR, "Error while reading file '%s': %s\n", 1405 | filename, strerror(errno)); 1406 | ret = AVERROR(errno); 1407 | } else 1408 | ret = AVERROR_EOF; 1409 | } else { 1410 | ret = 0; 1411 | (*bufptr)[(*size)++] = '\0'; 1412 | } 1413 | 1414 | fclose(f); 1415 | return ret; 1416 | } 1417 | 1418 | FILE *get_preset_file(char *filename, size_t filename_size, 1419 | const char *preset_name, int is_path, 1420 | const char *codec_name) 1421 | { 1422 | FILE *f = NULL; 1423 | int i; 1424 | const char *base[3] = { getenv("FFMPEG_DATADIR"), 1425 | getenv("HOME"), 1426 | FFMPEG_DATADIR, }; 1427 | 1428 | if (is_path) { 1429 | av_strlcpy(filename, preset_name, filename_size); 1430 | f = fopen(filename, "r"); 1431 | } else { 1432 | #ifdef _WIN32 1433 | char datadir[MAX_PATH], *ls; 1434 | base[2] = NULL; 1435 | 1436 | if (GetModuleFileNameA(GetModuleHandleA(NULL), datadir, sizeof(datadir) - 1)) 1437 | { 1438 | for (ls = datadir; ls < datadir + strlen(datadir); ls++) 1439 | if (*ls == '\\') *ls = '/'; 1440 | 1441 | if (ls = strrchr(datadir, '/')) 1442 | { 1443 | *ls = 0; 1444 | strncat(datadir, "/ffpresets", sizeof(datadir) - 1 - strlen(datadir)); 1445 | base[2] = datadir; 1446 | } 1447 | } 1448 | #endif 1449 | for (i = 0; i < 3 && !f; i++) { 1450 | if (!base[i]) 1451 | continue; 1452 | snprintf(filename, filename_size, "%s%s/%s.ffpreset", base[i], 1453 | i != 1 ? "" : "/.ffmpeg", preset_name); 1454 | f = fopen(filename, "r"); 1455 | if (!f && codec_name) { 1456 | snprintf(filename, filename_size, 1457 | "%s%s/%s-%s.ffpreset", 1458 | base[i], i != 1 ? "" : "/.ffmpeg", codec_name, 1459 | preset_name); 1460 | f = fopen(filename, "r"); 1461 | } 1462 | } 1463 | } 1464 | 1465 | return f; 1466 | } 1467 | 1468 | int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec) 1469 | { 1470 | int ret = avformat_match_stream_specifier(s, st, spec); 1471 | if (ret < 0) 1472 | av_log(s, AV_LOG_ERROR, "Invalid stream specifier: %s.\n", spec); 1473 | return ret; 1474 | } 1475 | 1476 | AVDictionary *filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id, 1477 | AVFormatContext *s, AVStream *st, AVCodec *codec) 1478 | { 1479 | AVDictionary *ret = NULL; 1480 | AVDictionaryEntry *t = NULL; 1481 | int flags = s->oformat ? AV_OPT_FLAG_ENCODING_PARAM 1482 | : AV_OPT_FLAG_DECODING_PARAM; 1483 | char prefix = 0; 1484 | const AVClass *cc = avcodec_get_class(); 1485 | 1486 | if (!codec) 1487 | codec = s->oformat ? avcodec_find_encoder(codec_id) 1488 | : avcodec_find_decoder(codec_id); 1489 | if (!codec) 1490 | return NULL; 1491 | 1492 | switch (codec->type) { 1493 | case AVMEDIA_TYPE_VIDEO: 1494 | prefix = 'v'; 1495 | flags |= AV_OPT_FLAG_VIDEO_PARAM; 1496 | break; 1497 | case AVMEDIA_TYPE_AUDIO: 1498 | prefix = 'a'; 1499 | flags |= AV_OPT_FLAG_AUDIO_PARAM; 1500 | break; 1501 | case AVMEDIA_TYPE_SUBTITLE: 1502 | prefix = 's'; 1503 | flags |= AV_OPT_FLAG_SUBTITLE_PARAM; 1504 | break; 1505 | } 1506 | 1507 | while (t = av_dict_get(opts, "", t, AV_DICT_IGNORE_SUFFIX)) { 1508 | char *p = strchr(t->key, ':'); 1509 | 1510 | /* check stream specification in opt name */ 1511 | if (p) 1512 | switch (check_stream_specifier(s, st, p + 1)) { 1513 | case 1: *p = 0; break; 1514 | case 0: continue; 1515 | default: return NULL; 1516 | } 1517 | 1518 | if (av_opt_find(&cc, t->key, NULL, flags, AV_OPT_SEARCH_FAKE_OBJ) || 1519 | (codec && codec->priv_class && 1520 | av_opt_find(&codec->priv_class, t->key, NULL, flags, 1521 | AV_OPT_SEARCH_FAKE_OBJ))) 1522 | av_dict_set(&ret, t->key, t->value, 0); 1523 | else if (t->key[0] == prefix && 1524 | av_opt_find(&cc, t->key + 1, NULL, flags, 1525 | AV_OPT_SEARCH_FAKE_OBJ)) 1526 | av_dict_set(&ret, t->key + 1, t->value, 0); 1527 | 1528 | if (p) 1529 | *p = ':'; 1530 | } 1531 | return ret; 1532 | } 1533 | 1534 | AVDictionary **setup_find_stream_info_opts(AVFormatContext *s, 1535 | AVDictionary *codec_opts) 1536 | { 1537 | int i; 1538 | AVDictionary **opts; 1539 | 1540 | if (!s->nb_streams) 1541 | return NULL; 1542 | opts = av_mallocz(s->nb_streams * sizeof(*opts)); 1543 | if (!opts) { 1544 | av_log(NULL, AV_LOG_ERROR, 1545 | "Could not alloc memory for stream options.\n"); 1546 | return NULL; 1547 | } 1548 | for (i = 0; i < s->nb_streams; i++) 1549 | opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codec->codec_id, 1550 | s, s->streams[i], NULL); 1551 | return opts; 1552 | } 1553 | 1554 | void *grow_array(void *array, int elem_size, int *size, int new_size) 1555 | { 1556 | if (new_size >= INT_MAX / elem_size) { 1557 | av_log(NULL, AV_LOG_ERROR, "Array too big.\n"); 1558 | exit(1); 1559 | } 1560 | if (*size < new_size) { 1561 | uint8_t *tmp = av_realloc(array, new_size*elem_size); 1562 | if (!tmp) { 1563 | av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n"); 1564 | exit(1); 1565 | } 1566 | memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size); 1567 | *size = new_size; 1568 | return tmp; 1569 | } 1570 | return array; 1571 | } 1572 | 1573 | static int alloc_buffer(FrameBuffer **pool, AVCodecContext *s, FrameBuffer **pbuf) 1574 | { 1575 | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(s->pix_fmt); 1576 | FrameBuffer *buf; 1577 | int i, ret; 1578 | int pixel_size; 1579 | int h_chroma_shift, v_chroma_shift; 1580 | int edge = 32; // XXX should be avcodec_get_edge_width(), but that fails on svq1 1581 | int w = s->width, h = s->height; 1582 | 1583 | if (!desc) 1584 | return AVERROR(EINVAL); 1585 | pixel_size = desc->comp[0].step_minus1 + 1; 1586 | 1587 | buf = av_mallocz(sizeof(*buf)); 1588 | if (!buf) 1589 | return AVERROR(ENOMEM); 1590 | 1591 | avcodec_align_dimensions(s, &w, &h); 1592 | 1593 | if (!(s->flags & CODEC_FLAG_EMU_EDGE)) { 1594 | w += 2*edge; 1595 | h += 2*edge; 1596 | } 1597 | 1598 | if ((ret = av_image_alloc(buf->base, buf->linesize, w, h, 1599 | s->pix_fmt, 32)) < 0) { 1600 | av_freep(&buf); 1601 | av_log(s, AV_LOG_ERROR, "alloc_buffer: av_image_alloc() failed\n"); 1602 | return ret; 1603 | } 1604 | /* XXX this shouldn't be needed, but some tests break without this line 1605 | * those decoders are buggy and need to be fixed. 1606 | * the following tests fail: 1607 | * cdgraphics, ansi, aasc, fraps-v1, qtrle-1bit 1608 | */ 1609 | memset(buf->base[0], 128, ret); 1610 | 1611 | avcodec_get_chroma_sub_sample(s->pix_fmt, &h_chroma_shift, &v_chroma_shift); 1612 | for (i = 0; i < FF_ARRAY_ELEMS(buf->data); i++) { 1613 | const int h_shift = i==0 ? 0 : h_chroma_shift; 1614 | const int v_shift = i==0 ? 0 : v_chroma_shift; 1615 | if ((s->flags & CODEC_FLAG_EMU_EDGE) || !buf->linesize[i] || !buf->base[i]) 1616 | buf->data[i] = buf->base[i]; 1617 | else 1618 | buf->data[i] = buf->base[i] + 1619 | FFALIGN((buf->linesize[i]*edge >> v_shift) + 1620 | (pixel_size*edge >> h_shift), 32); 1621 | } 1622 | buf->w = s->width; 1623 | buf->h = s->height; 1624 | buf->pix_fmt = s->pix_fmt; 1625 | buf->pool = pool; 1626 | 1627 | *pbuf = buf; 1628 | return 0; 1629 | } 1630 | 1631 | int codec_get_buffer(AVCodecContext *s, AVFrame *frame) 1632 | { 1633 | FrameBuffer **pool = s->opaque; 1634 | FrameBuffer *buf; 1635 | int ret, i; 1636 | 1637 | if(av_image_check_size(s->width, s->height, 0, s) || s->pix_fmt<0) { 1638 | av_log(s, AV_LOG_ERROR, "codec_get_buffer: image parameters invalid\n"); 1639 | return -1; 1640 | } 1641 | 1642 | if (!*pool && (ret = alloc_buffer(pool, s, pool)) < 0) 1643 | return ret; 1644 | 1645 | buf = *pool; 1646 | *pool = buf->next; 1647 | buf->next = NULL; 1648 | if (buf->w != s->width || buf->h != s->height || buf->pix_fmt != s->pix_fmt) { 1649 | av_freep(&buf->base[0]); 1650 | av_free(buf); 1651 | if ((ret = alloc_buffer(pool, s, &buf)) < 0) 1652 | return ret; 1653 | } 1654 | av_assert0(!buf->refcount); 1655 | buf->refcount++; 1656 | 1657 | frame->opaque = buf; 1658 | frame->type = FF_BUFFER_TYPE_USER; 1659 | frame->extended_data = frame->data; 1660 | frame->pkt_pts = s->pkt ? s->pkt->pts : AV_NOPTS_VALUE; 1661 | frame->width = buf->w; 1662 | frame->height = buf->h; 1663 | frame->format = buf->pix_fmt; 1664 | frame->sample_aspect_ratio = s->sample_aspect_ratio; 1665 | 1666 | for (i = 0; i < FF_ARRAY_ELEMS(buf->data); i++) { 1667 | frame->base[i] = buf->base[i]; // XXX h264.c uses base though it shouldn't 1668 | frame->data[i] = buf->data[i]; 1669 | frame->linesize[i] = buf->linesize[i]; 1670 | } 1671 | 1672 | return 0; 1673 | } 1674 | 1675 | static void unref_buffer(FrameBuffer *buf) 1676 | { 1677 | FrameBuffer **pool = buf->pool; 1678 | 1679 | av_assert0(buf->refcount > 0); 1680 | buf->refcount--; 1681 | if (!buf->refcount) { 1682 | FrameBuffer *tmp; 1683 | for(tmp= *pool; tmp; tmp= tmp->next) 1684 | av_assert1(tmp != buf); 1685 | 1686 | buf->next = *pool; 1687 | *pool = buf; 1688 | } 1689 | } 1690 | 1691 | void codec_release_buffer(AVCodecContext *s, AVFrame *frame) 1692 | { 1693 | FrameBuffer *buf = frame->opaque; 1694 | int i; 1695 | 1696 | if(frame->type!=FF_BUFFER_TYPE_USER) { 1697 | avcodec_default_release_buffer(s, frame); 1698 | return; 1699 | } 1700 | 1701 | for (i = 0; i < FF_ARRAY_ELEMS(frame->data); i++) 1702 | frame->data[i] = NULL; 1703 | 1704 | unref_buffer(buf); 1705 | } 1706 | 1707 | void filter_release_buffer(AVFilterBuffer *fb) 1708 | { 1709 | FrameBuffer *buf = fb->priv; 1710 | av_free(fb); 1711 | unref_buffer(buf); 1712 | } 1713 | 1714 | void free_buffer_pool(FrameBuffer **pool) 1715 | { 1716 | FrameBuffer *buf = *pool; 1717 | while (buf) { 1718 | *pool = buf->next; 1719 | av_freep(&buf->base[0]); 1720 | av_free(buf); 1721 | buf = *pool; 1722 | } 1723 | } 1724 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/ffmpegclasses/cmdutils.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Various utilities for command line tools 3 | * copyright (c) 2003 Fabrice Bellard 4 | * 5 | * This file is part of FFmpeg. 6 | * 7 | * FFmpeg is free software; you can redistribute it and/or 8 | * modify it under the terms of the GNU Lesser General Public 9 | * License as published by the Free Software Foundation; either 10 | * version 2.1 of the License, or (at your option) any later version. 11 | * 12 | * FFmpeg is distributed in the hope that it will be useful, 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 | * Lesser General Public License for more details. 16 | * 17 | * You should have received a copy of the GNU Lesser General Public 18 | * License along with FFmpeg; if not, write to the Free Software 19 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 20 | */ 21 | 22 | #ifndef FFMPEG_CMDUTILS_H 23 | #define FFMPEG_CMDUTILS_H 24 | 25 | #include 26 | 27 | #include "libavcodec/avcodec.h" 28 | #include "libavfilter/avfilter.h" 29 | #include "libavformat/avformat.h" 30 | #include "libswscale/swscale.h" 31 | 32 | #ifdef __MINGW32__ 33 | #undef main /* We don't want SDL to override our main() */ 34 | #endif 35 | 36 | /** 37 | * program name, defined by the program for show_version(). 38 | */ 39 | extern const char program_name[]; 40 | 41 | /** 42 | * program birth year, defined by the program for show_banner() 43 | */ 44 | extern const int program_birth_year; 45 | 46 | /** 47 | * this year, defined by the program for show_banner() 48 | */ 49 | extern const int this_year; 50 | 51 | extern AVCodecContext *avcodec_opts[AVMEDIA_TYPE_NB]; 52 | extern AVFormatContext *avformat_opts; 53 | extern struct SwsContext *sws_opts; 54 | extern struct SwrContext *swr_opts; 55 | extern AVDictionary *format_opts, *codec_opts; 56 | 57 | /** 58 | * Initialize the cmdutils option system, in particular 59 | * allocate the *_opts contexts. 60 | */ 61 | void init_opts(void); 62 | /** 63 | * Uninitialize the cmdutils option system, in particular 64 | * free the *_opts contexts and their contents. 65 | */ 66 | void uninit_opts(void); 67 | 68 | /** 69 | * Trivial log callback. 70 | * Only suitable for opt_help and similar since it lacks prefix handling. 71 | */ 72 | void log_callback_help(void* ptr, int level, const char* fmt, va_list vl); 73 | 74 | /** 75 | * Fallback for options that are not explicitly handled, these will be 76 | * parsed through AVOptions. 77 | */ 78 | int opt_default(void *optctx, const char *opt, const char *arg); 79 | 80 | /** 81 | * Set the libav* libraries log level. 82 | */ 83 | int opt_loglevel(void *optctx, const char *opt, const char *arg); 84 | 85 | int opt_report(const char *opt); 86 | 87 | int opt_max_alloc(void *optctx, const char *opt, const char *arg); 88 | 89 | int opt_cpuflags(void *optctx, const char *opt, const char *arg); 90 | 91 | int opt_codec_debug(void *optctx, const char *opt, const char *arg); 92 | 93 | /** 94 | * Limit the execution time. 95 | */ 96 | int opt_timelimit(void *optctx, const char *opt, const char *arg); 97 | 98 | /** 99 | * Parse a string and return its corresponding value as a double. 100 | * Exit from the application if the string cannot be correctly 101 | * parsed or the corresponding value is invalid. 102 | * 103 | * @param context the context of the value to be set (e.g. the 104 | * corresponding command line option name) 105 | * @param numstr the string to be parsed 106 | * @param type the type (OPT_INT64 or OPT_FLOAT) as which the 107 | * string should be parsed 108 | * @param min the minimum valid accepted value 109 | * @param max the maximum valid accepted value 110 | */ 111 | double parse_number_or_die(const char *context, const char *numstr, int type, 112 | double min, double max); 113 | 114 | /** 115 | * Parse a string specifying a time and return its corresponding 116 | * value as a number of microseconds. Exit from the application if 117 | * the string cannot be correctly parsed. 118 | * 119 | * @param context the context of the value to be set (e.g. the 120 | * corresponding command line option name) 121 | * @param timestr the string to be parsed 122 | * @param is_duration a flag which tells how to interpret timestr, if 123 | * not zero timestr is interpreted as a duration, otherwise as a 124 | * date 125 | * 126 | * @see parse_date() 127 | */ 128 | int64_t parse_time_or_die(const char *context, const char *timestr, 129 | int is_duration); 130 | 131 | typedef struct SpecifierOpt { 132 | char *specifier; /**< stream/chapter/program/... specifier */ 133 | union { 134 | uint8_t *str; 135 | int i; 136 | int64_t i64; 137 | float f; 138 | double dbl; 139 | } u; 140 | } SpecifierOpt; 141 | 142 | typedef struct OptionDef { 143 | const char *name; 144 | int flags; 145 | #define HAS_ARG 0x0001 146 | #define OPT_BOOL 0x0002 147 | #define OPT_EXPERT 0x0004 148 | #define OPT_STRING 0x0008 149 | #define OPT_VIDEO 0x0010 150 | #define OPT_AUDIO 0x0020 151 | #define OPT_INT 0x0080 152 | #define OPT_FLOAT 0x0100 153 | #define OPT_SUBTITLE 0x0200 154 | #define OPT_INT64 0x0400 155 | #define OPT_EXIT 0x0800 156 | #define OPT_DATA 0x1000 157 | #define OPT_PERFILE 0x2000 /* the option is per-file (currently ffmpeg-only). 158 | implied by OPT_OFFSET or OPT_SPEC */ 159 | #define OPT_OFFSET 0x4000 /* option is specified as an offset in a passed optctx */ 160 | #define OPT_SPEC 0x8000 /* option is to be stored in an array of SpecifierOpt. 161 | Implies OPT_OFFSET. Next element after the offset is 162 | an int containing element count in the array. */ 163 | #define OPT_TIME 0x10000 164 | #define OPT_DOUBLE 0x20000 165 | union { 166 | void *dst_ptr; 167 | int (*func_arg)(void *, const char *, const char *); 168 | size_t off; 169 | } u; 170 | const char *help; 171 | const char *argname; 172 | } OptionDef; 173 | 174 | /** 175 | * Print help for all options matching specified flags. 176 | * 177 | * @param options a list of options 178 | * @param msg title of this group. Only printed if at least one option matches. 179 | * @param req_flags print only options which have all those flags set. 180 | * @param rej_flags don't print options which have any of those flags set. 181 | * @param alt_flags print only options that have at least one of those flags set 182 | */ 183 | void show_help_options(const OptionDef *options, const char *msg, int req_flags, 184 | int rej_flags, int alt_flags); 185 | 186 | /** 187 | * Show help for all options with given flags in class and all its 188 | * children. 189 | */ 190 | void show_help_children(const AVClass *class, int flags); 191 | 192 | /** 193 | * Per-avtool specific help handler. Implemented in each 194 | * avtool, called by show_help(). 195 | */ 196 | void show_help_default(const char *opt, const char *arg); 197 | 198 | /** 199 | * Generic -h handler common to all avtools. 200 | */ 201 | int show_help(void *optctx, const char *opt, const char *arg); 202 | 203 | /** 204 | * Parse the command line arguments. 205 | * 206 | * @param optctx an opaque options context 207 | * @param argc number of command line arguments 208 | * @param argv values of command line arguments 209 | * @param options Array with the definitions required to interpret every 210 | * option of the form: -option_name [argument] 211 | * @param parse_arg_function Name of the function called to process every 212 | * argument without a leading option name flag. NULL if such arguments do 213 | * not have to be processed. 214 | */ 215 | void parse_options(void *optctx, int argc, char **argv, const OptionDef *options, 216 | void (* parse_arg_function)(void *optctx, const char*)); 217 | 218 | /** 219 | * Parse one given option. 220 | * 221 | * @return on success 1 if arg was consumed, 0 otherwise; negative number on error 222 | */ 223 | int parse_option(void *optctx, const char *opt, const char *arg, 224 | const OptionDef *options); 225 | 226 | /** 227 | * Find the '-loglevel' option in the command line args and apply it. 228 | */ 229 | void parse_loglevel(int argc, char **argv, const OptionDef *options); 230 | 231 | /** 232 | * Return index of option opt in argv or 0 if not found. 233 | */ 234 | int locate_option(int argc, char **argv, const OptionDef *options, 235 | const char *optname); 236 | 237 | /** 238 | * Check if the given stream matches a stream specifier. 239 | * 240 | * @param s Corresponding format context. 241 | * @param st Stream from s to be checked. 242 | * @param spec A stream specifier of the [v|a|s|d]:[\] form. 243 | * 244 | * @return 1 if the stream matches, 0 if it doesn't, <0 on error 245 | */ 246 | int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec); 247 | 248 | /** 249 | * Filter out options for given codec. 250 | * 251 | * Create a new options dictionary containing only the options from 252 | * opts which apply to the codec with ID codec_id. 253 | * 254 | * @param opts dictionary to place options in 255 | * @param codec_id ID of the codec that should be filtered for 256 | * @param s Corresponding format context. 257 | * @param st A stream from s for which the options should be filtered. 258 | * @param codec The particular codec for which the options should be filtered. 259 | * If null, the default one is looked up according to the codec id. 260 | * @return a pointer to the created dictionary 261 | */ 262 | AVDictionary *filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id, 263 | AVFormatContext *s, AVStream *st, AVCodec *codec); 264 | 265 | /** 266 | * Setup AVCodecContext options for avformat_find_stream_info(). 267 | * 268 | * Create an array of dictionaries, one dictionary for each stream 269 | * contained in s. 270 | * Each dictionary will contain the options from codec_opts which can 271 | * be applied to the corresponding stream codec context. 272 | * 273 | * @return pointer to the created array of dictionaries, NULL if it 274 | * cannot be created 275 | */ 276 | AVDictionary **setup_find_stream_info_opts(AVFormatContext *s, 277 | AVDictionary *codec_opts); 278 | 279 | /** 280 | * Print an error message to stderr, indicating filename and a human 281 | * readable description of the error code err. 282 | * 283 | * If strerror_r() is not available the use of this function in a 284 | * multithreaded application may be unsafe. 285 | * 286 | * @see av_strerror() 287 | */ 288 | void print_error(const char *filename, int err); 289 | 290 | /** 291 | * Print the program banner to stderr. The banner contents depend on the 292 | * current version of the repository and of the libav* libraries used by 293 | * the program. 294 | */ 295 | void show_banner(int argc, char **argv, const OptionDef *options); 296 | 297 | /** 298 | * Print the version of the program to stdout. The version message 299 | * depends on the current versions of the repository and of the libav* 300 | * libraries. 301 | * This option processing function does not utilize the arguments. 302 | */ 303 | int show_version(void *optctx, const char *opt, const char *arg); 304 | 305 | /** 306 | * Print the license of the program to stdout. The license depends on 307 | * the license of the libraries compiled into the program. 308 | * This option processing function does not utilize the arguments. 309 | */ 310 | int show_license(void *optctx, const char *opt, const char *arg); 311 | 312 | /** 313 | * Print a listing containing all the formats supported by the 314 | * program. 315 | * This option processing function does not utilize the arguments. 316 | */ 317 | int show_formats(void *optctx, const char *opt, const char *arg); 318 | 319 | /** 320 | * Print a listing containing all the codecs supported by the 321 | * program. 322 | * This option processing function does not utilize the arguments. 323 | */ 324 | int show_codecs(void *optctx, const char *opt, const char *arg); 325 | 326 | /** 327 | * Print a listing containing all the decoders supported by the 328 | * program. 329 | */ 330 | int show_decoders(void *optctx, const char *opt, const char *arg); 331 | 332 | /** 333 | * Print a listing containing all the encoders supported by the 334 | * program. 335 | */ 336 | int show_encoders(void *optctx, const char *opt, const char *arg); 337 | 338 | /** 339 | * Print a listing containing all the filters supported by the 340 | * program. 341 | * This option processing function does not utilize the arguments. 342 | */ 343 | int show_filters(void *optctx, const char *opt, const char *arg); 344 | 345 | /** 346 | * Print a listing containing all the bit stream filters supported by the 347 | * program. 348 | * This option processing function does not utilize the arguments. 349 | */ 350 | int show_bsfs(void *optctx, const char *opt, const char *arg); 351 | 352 | /** 353 | * Print a listing containing all the protocols supported by the 354 | * program. 355 | * This option processing function does not utilize the arguments. 356 | */ 357 | int show_protocols(void *optctx, const char *opt, const char *arg); 358 | 359 | /** 360 | * Print a listing containing all the pixel formats supported by the 361 | * program. 362 | * This option processing function does not utilize the arguments. 363 | */ 364 | int show_pix_fmts(void *optctx, const char *opt, const char *arg); 365 | 366 | /** 367 | * Print a listing containing all the standard channel layouts supported by 368 | * the program. 369 | * This option processing function does not utilize the arguments. 370 | */ 371 | int show_layouts(void *optctx, const char *opt, const char *arg); 372 | 373 | /** 374 | * Print a listing containing all the sample formats supported by the 375 | * program. 376 | */ 377 | int show_sample_fmts(void *optctx, const char *opt, const char *arg); 378 | 379 | /** 380 | * Return a positive value if a line read from standard input 381 | * starts with [yY], otherwise return 0. 382 | */ 383 | int read_yesno(void); 384 | 385 | /** 386 | * Read the file with name filename, and put its content in a newly 387 | * allocated 0-terminated buffer. 388 | * 389 | * @param filename file to read from 390 | * @param bufptr location where pointer to buffer is returned 391 | * @param size location where size of buffer is returned 392 | * @return 0 in case of success, a negative value corresponding to an 393 | * AVERROR error code in case of failure. 394 | */ 395 | int cmdutils_read_file(const char *filename, char **bufptr, size_t *size); 396 | 397 | /** 398 | * Get a file corresponding to a preset file. 399 | * 400 | * If is_path is non-zero, look for the file in the path preset_name. 401 | * Otherwise search for a file named arg.ffpreset in the directories 402 | * $FFMPEG_DATADIR (if set), $HOME/.ffmpeg, and in the datadir defined 403 | * at configuration time or in a "ffpresets" folder along the executable 404 | * on win32, in that order. If no such file is found and 405 | * codec_name is defined, then search for a file named 406 | * codec_name-preset_name.avpreset in the above-mentioned directories. 407 | * 408 | * @param filename buffer where the name of the found filename is written 409 | * @param filename_size size in bytes of the filename buffer 410 | * @param preset_name name of the preset to search 411 | * @param is_path tell if preset_name is a filename path 412 | * @param codec_name name of the codec for which to look for the 413 | * preset, may be NULL 414 | */ 415 | FILE *get_preset_file(char *filename, size_t filename_size, 416 | const char *preset_name, int is_path, const char *codec_name); 417 | 418 | /** 419 | * Realloc array to hold new_size elements of elem_size. 420 | * Calls exit() on failure. 421 | * 422 | * @param array array to reallocate 423 | * @param elem_size size in bytes of each element 424 | * @param size new element count will be written here 425 | * @param new_size number of elements to place in reallocated array 426 | * @return reallocated array 427 | */ 428 | void *grow_array(void *array, int elem_size, int *size, int new_size); 429 | 430 | typedef struct FrameBuffer { 431 | uint8_t *base[4]; 432 | uint8_t *data[4]; 433 | int linesize[4]; 434 | 435 | int h, w; 436 | enum AVPixelFormat pix_fmt; 437 | 438 | int refcount; 439 | struct FrameBuffer **pool; ///< head of the buffer pool 440 | struct FrameBuffer *next; 441 | } FrameBuffer; 442 | 443 | /** 444 | * Get a frame from the pool. This is intended to be used as a callback for 445 | * AVCodecContext.get_buffer. 446 | * 447 | * @param s codec context. s->opaque must be a pointer to the head of the 448 | * buffer pool. 449 | * @param frame frame->opaque will be set to point to the FrameBuffer 450 | * containing the frame data. 451 | */ 452 | int codec_get_buffer(AVCodecContext *s, AVFrame *frame); 453 | 454 | /** 455 | * A callback to be used for AVCodecContext.release_buffer along with 456 | * codec_get_buffer(). 457 | */ 458 | void codec_release_buffer(AVCodecContext *s, AVFrame *frame); 459 | 460 | /** 461 | * A callback to be used for AVFilterBuffer.free. 462 | * @param fb buffer to free. fb->priv must be a pointer to the FrameBuffer 463 | * containing the buffer data. 464 | */ 465 | void filter_release_buffer(AVFilterBuffer *fb); 466 | 467 | /** 468 | * Free all the buffers in the pool. This must be called after all the 469 | * buffers have been released. 470 | */ 471 | void free_buffer_pool(FrameBuffer **pool); 472 | 473 | #define GET_PIX_FMT_NAME(pix_fmt)\ 474 | const char *name = av_get_pix_fmt_name(pix_fmt); 475 | 476 | #define GET_SAMPLE_FMT_NAME(sample_fmt)\ 477 | const char *name = av_get_sample_fmt_name(sample_fmt) 478 | 479 | #define GET_SAMPLE_RATE_NAME(rate)\ 480 | char name[16];\ 481 | snprintf(name, sizeof(name), "%d", rate); 482 | 483 | #define GET_CH_LAYOUT_NAME(ch_layout)\ 484 | char name[16];\ 485 | snprintf(name, sizeof(name), "0x%"PRIx64, ch_layout); 486 | 487 | #define GET_CH_LAYOUT_DESC(ch_layout)\ 488 | char name[128];\ 489 | av_get_channel_layout_string(name, sizeof(name), 0, ch_layout); 490 | 491 | #endif /* CMDUTILS_H */ 492 | -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/libs/libSDL.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iOSffmpegPlayer/player/cb9cc98df77ac4d0224742d8f68dafc592a7bf4b/TestPlayWithFFMPEGAndSDL/libs/libSDL.a -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/libs/libavcodec.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iOSffmpegPlayer/player/cb9cc98df77ac4d0224742d8f68dafc592a7bf4b/TestPlayWithFFMPEGAndSDL/libs/libavcodec.a -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/libs/libavdevice.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iOSffmpegPlayer/player/cb9cc98df77ac4d0224742d8f68dafc592a7bf4b/TestPlayWithFFMPEGAndSDL/libs/libavdevice.a -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/libs/libavfilter.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iOSffmpegPlayer/player/cb9cc98df77ac4d0224742d8f68dafc592a7bf4b/TestPlayWithFFMPEGAndSDL/libs/libavfilter.a -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/libs/libavformat.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iOSffmpegPlayer/player/cb9cc98df77ac4d0224742d8f68dafc592a7bf4b/TestPlayWithFFMPEGAndSDL/libs/libavformat.a -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/libs/libavutil.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iOSffmpegPlayer/player/cb9cc98df77ac4d0224742d8f68dafc592a7bf4b/TestPlayWithFFMPEGAndSDL/libs/libavutil.a -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/libs/libswresample.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iOSffmpegPlayer/player/cb9cc98df77ac4d0224742d8f68dafc592a7bf4b/TestPlayWithFFMPEGAndSDL/libs/libswresample.a -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/libs/libswscale.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iOSffmpegPlayer/player/cb9cc98df77ac4d0224742d8f68dafc592a7bf4b/TestPlayWithFFMPEGAndSDL/libs/libswscale.a -------------------------------------------------------------------------------- /TestPlayWithFFMPEGAndSDL/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // TestPlayWithFFMPEGAndSDL 4 | // 5 | // Created by on 12-12-13. 6 | // Copyright (c) 2012年 __MyCompanyName__. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #import "FSFFPLAYVideoPlayAppDelegate.h" 12 | 13 | int main(int argc, char *argv[]) 14 | { 15 | @autoreleasepool { 16 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([FSFFPLAYVideoPlayAppDelegate class])); 17 | } 18 | } 19 | --------------------------------------------------------------------------------