├── .gitignore ├── Encoder Demo.xcodeproj └── project.pbxproj ├── Encoder Demo ├── AVEncoder.h ├── AVEncoder.mm ├── CameraServer.h ├── CameraServer.m ├── Default-568h@2x.png ├── Default.png ├── Default@2x.png ├── Encoder Demo-Info.plist ├── Encoder Demo-Prefix.pch ├── EncoderDemoAppDelegate.h ├── EncoderDemoAppDelegate.m ├── EncoderDemoViewController.h ├── EncoderDemoViewController.m ├── MP4Atom.h ├── MP4Atom.m ├── NALUnit.cpp ├── NALUnit.h ├── RTSPClientConnection.h ├── RTSPClientConnection.mm ├── RTSPMessage.h ├── RTSPMessage.m ├── RTSPServer.h ├── RTSPServer.m ├── VideoEncoder.h ├── VideoEncoder.m ├── en.lproj │ ├── InfoPlist.strings │ ├── MainStoryboard_iPad.storyboard │ └── MainStoryboard_iPhone.storyboard └── main.m ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | Encoder Demo.xcodeproj/project.xcworkspace/contents.xcworkspacedata 2 | Encoder Demo.xcodeproj/project.xcworkspace/xcuserdata/geraintd.xcuserdatad/UserInterfaceState.xcuserstate 3 | Encoder Demo.xcodeproj/xcuserdata/geraintd.xcuserdatad/xcschemes/Encoder Demo.xcscheme 4 | Encoder Demo.xcodeproj/xcuserdata/geraintd.xcuserdatad/xcschemes/xcschememanagement.plist 5 | -------------------------------------------------------------------------------- /Encoder Demo.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 841255A116A035E3001749D9 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 841255A016A035E3001749D9 /* UIKit.framework */; }; 11 | 841255A316A035E3001749D9 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 841255A216A035E3001749D9 /* Foundation.framework */; }; 12 | 841255A516A035E3001749D9 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 841255A416A035E3001749D9 /* CoreGraphics.framework */; }; 13 | 841255AB16A035E3001749D9 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 841255A916A035E3001749D9 /* InfoPlist.strings */; }; 14 | 841255AD16A035E3001749D9 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 841255AC16A035E3001749D9 /* main.m */; }; 15 | 841255B116A035E3001749D9 /* EncoderDemoAppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 841255B016A035E3001749D9 /* EncoderDemoAppDelegate.m */; }; 16 | 841255B316A035E3001749D9 /* Default.png in Resources */ = {isa = PBXBuildFile; fileRef = 841255B216A035E3001749D9 /* Default.png */; }; 17 | 841255B516A035E3001749D9 /* Default@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 841255B416A035E3001749D9 /* Default@2x.png */; }; 18 | 841255B716A035E3001749D9 /* Default-568h@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 841255B616A035E3001749D9 /* Default-568h@2x.png */; }; 19 | 841255BA16A035E3001749D9 /* MainStoryboard_iPhone.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 841255B816A035E3001749D9 /* MainStoryboard_iPhone.storyboard */; }; 20 | 841255BD16A035E3001749D9 /* MainStoryboard_iPad.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 841255BB16A035E3001749D9 /* MainStoryboard_iPad.storyboard */; }; 21 | 841255C016A035E3001749D9 /* EncoderDemoViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 841255BF16A035E3001749D9 /* EncoderDemoViewController.m */; }; 22 | 841255C716A035F1001749D9 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 841255C616A035F1001749D9 /* AVFoundation.framework */; }; 23 | 841255C916A035F9001749D9 /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 841255C816A035F9001749D9 /* CoreMedia.framework */; }; 24 | 841255CB16A09114001749D9 /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 841255CA16A09114001749D9 /* CoreVideo.framework */; }; 25 | 841255CE16A47A7D001749D9 /* AVEncoder.mm in Sources */ = {isa = PBXBuildFile; fileRef = 841255CD16A47A7D001749D9 /* AVEncoder.mm */; }; 26 | 841255D116A4848E001749D9 /* VideoEncoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 841255D016A4848E001749D9 /* VideoEncoder.m */; }; 27 | 841255D616A5AB8B001749D9 /* MP4Atom.m in Sources */ = {isa = PBXBuildFile; fileRef = 841255D516A5AB8B001749D9 /* MP4Atom.m */; }; 28 | 841255D916A714B7001749D9 /* NALUnit.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 841255D716A714B7001749D9 /* NALUnit.cpp */; }; 29 | 841255DC16A85472001749D9 /* RTSPServer.m in Sources */ = {isa = PBXBuildFile; fileRef = 841255DB16A85472001749D9 /* RTSPServer.m */; }; 30 | 841255E516B14E45001749D9 /* RTSPClientConnection.mm in Sources */ = {isa = PBXBuildFile; fileRef = 841255E416B14E45001749D9 /* RTSPClientConnection.mm */; }; 31 | 841399FA16B1842B00FAD610 /* RTSPMessage.m in Sources */ = {isa = PBXBuildFile; fileRef = 841399F916B1842B00FAD610 /* RTSPMessage.m */; }; 32 | 846119C716D3BF8D00468D98 /* CameraServer.m in Sources */ = {isa = PBXBuildFile; fileRef = 846119C616D3BF8D00468D98 /* CameraServer.m */; }; 33 | /* End PBXBuildFile section */ 34 | 35 | /* Begin PBXFileReference section */ 36 | 8412559C16A035E3001749D9 /* Encoder Demo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Encoder Demo.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 37 | 841255A016A035E3001749D9 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; }; 38 | 841255A216A035E3001749D9 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; 39 | 841255A416A035E3001749D9 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; }; 40 | 841255A816A035E3001749D9 /* Encoder Demo-Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = "Encoder Demo-Info.plist"; sourceTree = ""; }; 41 | 841255AA16A035E3001749D9 /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/InfoPlist.strings; sourceTree = ""; }; 42 | 841255AC16A035E3001749D9 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 43 | 841255AE16A035E3001749D9 /* Encoder Demo-Prefix.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Encoder Demo-Prefix.pch"; sourceTree = ""; }; 44 | 841255AF16A035E3001749D9 /* EncoderDemoAppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = EncoderDemoAppDelegate.h; sourceTree = ""; }; 45 | 841255B016A035E3001749D9 /* EncoderDemoAppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = EncoderDemoAppDelegate.m; sourceTree = ""; }; 46 | 841255B216A035E3001749D9 /* Default.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = Default.png; sourceTree = ""; }; 47 | 841255B416A035E3001749D9 /* Default@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Default@2x.png"; sourceTree = ""; }; 48 | 841255B616A035E3001749D9 /* Default-568h@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Default-568h@2x.png"; sourceTree = ""; }; 49 | 841255B916A035E3001749D9 /* en */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = en; path = en.lproj/MainStoryboard_iPhone.storyboard; sourceTree = ""; }; 50 | 841255BC16A035E3001749D9 /* en */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = en; path = en.lproj/MainStoryboard_iPad.storyboard; sourceTree = ""; }; 51 | 841255BE16A035E3001749D9 /* EncoderDemoViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = EncoderDemoViewController.h; sourceTree = ""; }; 52 | 841255BF16A035E3001749D9 /* EncoderDemoViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = EncoderDemoViewController.m; sourceTree = ""; }; 53 | 841255C616A035F1001749D9 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 54 | 841255C816A035F9001749D9 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; }; 55 | 841255CA16A09114001749D9 /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; }; 56 | 841255CC16A47A7D001749D9 /* AVEncoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AVEncoder.h; sourceTree = ""; }; 57 | 841255CD16A47A7D001749D9 /* AVEncoder.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AVEncoder.mm; sourceTree = ""; }; 58 | 841255CF16A4848E001749D9 /* VideoEncoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = VideoEncoder.h; sourceTree = ""; }; 59 | 841255D016A4848E001749D9 /* VideoEncoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = VideoEncoder.m; sourceTree = ""; }; 60 | 841255D416A5AB8B001749D9 /* MP4Atom.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MP4Atom.h; sourceTree = ""; }; 61 | 841255D516A5AB8B001749D9 /* MP4Atom.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = MP4Atom.m; sourceTree = ""; }; 62 | 841255D716A714B7001749D9 /* NALUnit.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = NALUnit.cpp; sourceTree = ""; }; 63 | 841255D816A714B7001749D9 /* NALUnit.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = NALUnit.h; sourceTree = ""; }; 64 | 841255DA16A85472001749D9 /* RTSPServer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTSPServer.h; sourceTree = ""; }; 65 | 841255DB16A85472001749D9 /* RTSPServer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RTSPServer.m; sourceTree = ""; }; 66 | 841255E316B14E44001749D9 /* RTSPClientConnection.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTSPClientConnection.h; sourceTree = ""; }; 67 | 841255E416B14E45001749D9 /* RTSPClientConnection.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = RTSPClientConnection.mm; sourceTree = ""; }; 68 | 841399F816B1842B00FAD610 /* RTSPMessage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTSPMessage.h; sourceTree = ""; }; 69 | 841399F916B1842B00FAD610 /* RTSPMessage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RTSPMessage.m; sourceTree = ""; }; 70 | 846119C516D3BF8D00468D98 /* CameraServer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraServer.h; sourceTree = ""; }; 71 | 846119C616D3BF8D00468D98 /* CameraServer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraServer.m; sourceTree = ""; }; 72 | /* End PBXFileReference section */ 73 | 74 | /* Begin PBXFrameworksBuildPhase section */ 75 | 8412559916A035E3001749D9 /* Frameworks */ = { 76 | isa = PBXFrameworksBuildPhase; 77 | buildActionMask = 2147483647; 78 | files = ( 79 | 841255CB16A09114001749D9 /* CoreVideo.framework in Frameworks */, 80 | 841255C916A035F9001749D9 /* CoreMedia.framework in Frameworks */, 81 | 841255C716A035F1001749D9 /* AVFoundation.framework in Frameworks */, 82 | 841255A116A035E3001749D9 /* UIKit.framework in Frameworks */, 83 | 841255A316A035E3001749D9 /* Foundation.framework in Frameworks */, 84 | 841255A516A035E3001749D9 /* CoreGraphics.framework in Frameworks */, 85 | ); 86 | runOnlyForDeploymentPostprocessing = 0; 87 | }; 88 | /* End PBXFrameworksBuildPhase section */ 89 | 90 | /* Begin PBXGroup section */ 91 | 8412559116A035E3001749D9 = { 92 | isa = PBXGroup; 93 | children = ( 94 | 841255A616A035E3001749D9 /* Encoder Demo */, 95 | 841255CA16A09114001749D9 /* CoreVideo.framework */, 96 | 841255C816A035F9001749D9 /* CoreMedia.framework */, 97 | 841255C616A035F1001749D9 /* AVFoundation.framework */, 98 | 8412559F16A035E3001749D9 /* Frameworks */, 99 | 8412559D16A035E3001749D9 /* Products */, 100 | ); 101 | sourceTree = ""; 102 | }; 103 | 8412559D16A035E3001749D9 /* Products */ = { 104 | isa = PBXGroup; 105 | children = ( 106 | 8412559C16A035E3001749D9 /* Encoder Demo.app */, 107 | ); 108 | name = Products; 109 | sourceTree = ""; 110 | }; 111 | 8412559F16A035E3001749D9 /* Frameworks */ = { 112 | isa = PBXGroup; 113 | children = ( 114 | 841255A016A035E3001749D9 /* UIKit.framework */, 115 | 841255A216A035E3001749D9 /* Foundation.framework */, 116 | 841255A416A035E3001749D9 /* CoreGraphics.framework */, 117 | ); 118 | name = Frameworks; 119 | sourceTree = ""; 120 | }; 121 | 841255A616A035E3001749D9 /* Encoder Demo */ = { 122 | isa = PBXGroup; 123 | children = ( 124 | 841255AF16A035E3001749D9 /* EncoderDemoAppDelegate.h */, 125 | 841255B016A035E3001749D9 /* EncoderDemoAppDelegate.m */, 126 | 841255B816A035E3001749D9 /* MainStoryboard_iPhone.storyboard */, 127 | 841255BB16A035E3001749D9 /* MainStoryboard_iPad.storyboard */, 128 | 841255BE16A035E3001749D9 /* EncoderDemoViewController.h */, 129 | 841255BF16A035E3001749D9 /* EncoderDemoViewController.m */, 130 | 846119C516D3BF8D00468D98 /* CameraServer.h */, 131 | 846119C616D3BF8D00468D98 /* CameraServer.m */, 132 | 841255DD16A85477001749D9 /* RTSP */, 133 | 841255D316A57847001749D9 /* AVEncoder */, 134 | 841255A716A035E3001749D9 /* Supporting Files */, 135 | ); 136 | path = "Encoder Demo"; 137 | sourceTree = ""; 138 | }; 139 | 841255A716A035E3001749D9 /* Supporting Files */ = { 140 | isa = PBXGroup; 141 | children = ( 142 | 841255A816A035E3001749D9 /* Encoder Demo-Info.plist */, 143 | 841255A916A035E3001749D9 /* InfoPlist.strings */, 144 | 841255AC16A035E3001749D9 /* main.m */, 145 | 841255AE16A035E3001749D9 /* Encoder Demo-Prefix.pch */, 146 | 841255B216A035E3001749D9 /* Default.png */, 147 | 841255B416A035E3001749D9 /* Default@2x.png */, 148 | 841255B616A035E3001749D9 /* Default-568h@2x.png */, 149 | ); 150 | name = "Supporting Files"; 151 | sourceTree = ""; 152 | }; 153 | 841255D316A57847001749D9 /* AVEncoder */ = { 154 | isa = PBXGroup; 155 | children = ( 156 | 841255D716A714B7001749D9 /* NALUnit.cpp */, 157 | 841255D816A714B7001749D9 /* NALUnit.h */, 158 | 841255CC16A47A7D001749D9 /* AVEncoder.h */, 159 | 841255CD16A47A7D001749D9 /* AVEncoder.mm */, 160 | 841255CF16A4848E001749D9 /* VideoEncoder.h */, 161 | 841255D016A4848E001749D9 /* VideoEncoder.m */, 162 | 841255D416A5AB8B001749D9 /* MP4Atom.h */, 163 | 841255D516A5AB8B001749D9 /* MP4Atom.m */, 164 | ); 165 | name = AVEncoder; 166 | sourceTree = ""; 167 | }; 168 | 841255DD16A85477001749D9 /* RTSP */ = { 169 | isa = PBXGroup; 170 | children = ( 171 | 841255DA16A85472001749D9 /* RTSPServer.h */, 172 | 841255DB16A85472001749D9 /* RTSPServer.m */, 173 | 841255E316B14E44001749D9 /* RTSPClientConnection.h */, 174 | 841255E416B14E45001749D9 /* RTSPClientConnection.mm */, 175 | 841399F816B1842B00FAD610 /* RTSPMessage.h */, 176 | 841399F916B1842B00FAD610 /* RTSPMessage.m */, 177 | ); 178 | name = RTSP; 179 | sourceTree = ""; 180 | }; 181 | /* End PBXGroup section */ 182 | 183 | /* Begin PBXNativeTarget section */ 184 | 8412559B16A035E3001749D9 /* Encoder Demo */ = { 185 | isa = PBXNativeTarget; 186 | buildConfigurationList = 841255C316A035E3001749D9 /* Build configuration list for PBXNativeTarget "Encoder Demo" */; 187 | buildPhases = ( 188 | 8412559816A035E3001749D9 /* Sources */, 189 | 8412559916A035E3001749D9 /* Frameworks */, 190 | 8412559A16A035E3001749D9 /* Resources */, 191 | ); 192 | buildRules = ( 193 | ); 194 | dependencies = ( 195 | ); 196 | name = "Encoder Demo"; 197 | productName = "Encoder Demo"; 198 | productReference = 8412559C16A035E3001749D9 /* Encoder Demo.app */; 199 | productType = "com.apple.product-type.application"; 200 | }; 201 | /* End PBXNativeTarget section */ 202 | 203 | /* Begin PBXProject section */ 204 | 8412559316A035E3001749D9 /* Project object */ = { 205 | isa = PBXProject; 206 | attributes = { 207 | CLASSPREFIX = EncoderDemo; 208 | LastUpgradeCheck = 0450; 209 | ORGANIZATIONNAME = "Geraint Davies"; 210 | }; 211 | buildConfigurationList = 8412559616A035E3001749D9 /* Build configuration list for PBXProject "Encoder Demo" */; 212 | compatibilityVersion = "Xcode 3.2"; 213 | developmentRegion = English; 214 | hasScannedForEncodings = 0; 215 | knownRegions = ( 216 | en, 217 | ); 218 | mainGroup = 8412559116A035E3001749D9; 219 | productRefGroup = 8412559D16A035E3001749D9 /* Products */; 220 | projectDirPath = ""; 221 | projectRoot = ""; 222 | targets = ( 223 | 8412559B16A035E3001749D9 /* Encoder Demo */, 224 | ); 225 | }; 226 | /* End PBXProject section */ 227 | 228 | /* Begin PBXResourcesBuildPhase section */ 229 | 8412559A16A035E3001749D9 /* Resources */ = { 230 | isa = PBXResourcesBuildPhase; 231 | buildActionMask = 2147483647; 232 | files = ( 233 | 841255AB16A035E3001749D9 /* InfoPlist.strings in Resources */, 234 | 841255B316A035E3001749D9 /* Default.png in Resources */, 235 | 841255B516A035E3001749D9 /* Default@2x.png in Resources */, 236 | 841255B716A035E3001749D9 /* Default-568h@2x.png in Resources */, 237 | 841255BA16A035E3001749D9 /* MainStoryboard_iPhone.storyboard in Resources */, 238 | 841255BD16A035E3001749D9 /* MainStoryboard_iPad.storyboard in Resources */, 239 | ); 240 | runOnlyForDeploymentPostprocessing = 0; 241 | }; 242 | /* End PBXResourcesBuildPhase section */ 243 | 244 | /* Begin PBXSourcesBuildPhase section */ 245 | 8412559816A035E3001749D9 /* Sources */ = { 246 | isa = PBXSourcesBuildPhase; 247 | buildActionMask = 2147483647; 248 | files = ( 249 | 841255AD16A035E3001749D9 /* main.m in Sources */, 250 | 841255B116A035E3001749D9 /* EncoderDemoAppDelegate.m in Sources */, 251 | 841255C016A035E3001749D9 /* EncoderDemoViewController.m in Sources */, 252 | 841255CE16A47A7D001749D9 /* AVEncoder.mm in Sources */, 253 | 841255D116A4848E001749D9 /* VideoEncoder.m in Sources */, 254 | 841255D616A5AB8B001749D9 /* MP4Atom.m in Sources */, 255 | 841255D916A714B7001749D9 /* NALUnit.cpp in Sources */, 256 | 841255DC16A85472001749D9 /* RTSPServer.m in Sources */, 257 | 841255E516B14E45001749D9 /* RTSPClientConnection.mm in Sources */, 258 | 841399FA16B1842B00FAD610 /* RTSPMessage.m in Sources */, 259 | 846119C716D3BF8D00468D98 /* CameraServer.m in Sources */, 260 | ); 261 | runOnlyForDeploymentPostprocessing = 0; 262 | }; 263 | /* End PBXSourcesBuildPhase section */ 264 | 265 | /* Begin PBXVariantGroup section */ 266 | 841255A916A035E3001749D9 /* InfoPlist.strings */ = { 267 | isa = PBXVariantGroup; 268 | children = ( 269 | 841255AA16A035E3001749D9 /* en */, 270 | ); 271 | name = InfoPlist.strings; 272 | sourceTree = ""; 273 | }; 274 | 841255B816A035E3001749D9 /* MainStoryboard_iPhone.storyboard */ = { 275 | isa = PBXVariantGroup; 276 | children = ( 277 | 841255B916A035E3001749D9 /* en */, 278 | ); 279 | name = MainStoryboard_iPhone.storyboard; 280 | sourceTree = ""; 281 | }; 282 | 841255BB16A035E3001749D9 /* MainStoryboard_iPad.storyboard */ = { 283 | isa = PBXVariantGroup; 284 | children = ( 285 | 841255BC16A035E3001749D9 /* en */, 286 | ); 287 | name = MainStoryboard_iPad.storyboard; 288 | sourceTree = ""; 289 | }; 290 | /* End PBXVariantGroup section */ 291 | 292 | /* Begin XCBuildConfiguration section */ 293 | 841255C116A035E3001749D9 /* Debug */ = { 294 | isa = XCBuildConfiguration; 295 | buildSettings = { 296 | ALWAYS_SEARCH_USER_PATHS = NO; 297 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 298 | CLANG_CXX_LIBRARY = "libc++"; 299 | CLANG_ENABLE_OBJC_ARC = YES; 300 | CLANG_WARN_EMPTY_BODY = YES; 301 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 302 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 303 | COPY_PHASE_STRIP = NO; 304 | GCC_C_LANGUAGE_STANDARD = gnu99; 305 | GCC_DYNAMIC_NO_PIC = NO; 306 | GCC_OPTIMIZATION_LEVEL = 0; 307 | GCC_PREPROCESSOR_DEFINITIONS = ( 308 | "DEBUG=1", 309 | "$(inherited)", 310 | ); 311 | GCC_SYMBOLS_PRIVATE_EXTERN = NO; 312 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 313 | GCC_WARN_UNINITIALIZED_AUTOS = YES; 314 | GCC_WARN_UNUSED_VARIABLE = YES; 315 | IPHONEOS_DEPLOYMENT_TARGET = 6.0; 316 | ONLY_ACTIVE_ARCH = YES; 317 | SDKROOT = iphoneos; 318 | TARGETED_DEVICE_FAMILY = "1,2"; 319 | }; 320 | name = Debug; 321 | }; 322 | 841255C216A035E3001749D9 /* Release */ = { 323 | isa = XCBuildConfiguration; 324 | buildSettings = { 325 | ALWAYS_SEARCH_USER_PATHS = NO; 326 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 327 | CLANG_CXX_LIBRARY = "libc++"; 328 | CLANG_ENABLE_OBJC_ARC = YES; 329 | CLANG_WARN_EMPTY_BODY = YES; 330 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 331 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 332 | COPY_PHASE_STRIP = YES; 333 | GCC_C_LANGUAGE_STANDARD = gnu99; 334 | GCC_WARN_ABOUT_RETURN_TYPE = YES; 335 | GCC_WARN_UNINITIALIZED_AUTOS = YES; 336 | GCC_WARN_UNUSED_VARIABLE = YES; 337 | IPHONEOS_DEPLOYMENT_TARGET = 6.0; 338 | OTHER_CFLAGS = "-DNS_BLOCK_ASSERTIONS=1"; 339 | SDKROOT = iphoneos; 340 | TARGETED_DEVICE_FAMILY = "1,2"; 341 | VALIDATE_PRODUCT = YES; 342 | }; 343 | name = Release; 344 | }; 345 | 841255C416A035E3001749D9 /* Debug */ = { 346 | isa = XCBuildConfiguration; 347 | buildSettings = { 348 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 349 | GCC_PREFIX_HEADER = "Encoder Demo/Encoder Demo-Prefix.pch"; 350 | INFOPLIST_FILE = "Encoder Demo/Encoder Demo-Info.plist"; 351 | PRODUCT_NAME = "$(TARGET_NAME)"; 352 | WRAPPER_EXTENSION = app; 353 | }; 354 | name = Debug; 355 | }; 356 | 841255C516A035E3001749D9 /* Release */ = { 357 | isa = XCBuildConfiguration; 358 | buildSettings = { 359 | GCC_PRECOMPILE_PREFIX_HEADER = YES; 360 | GCC_PREFIX_HEADER = "Encoder Demo/Encoder Demo-Prefix.pch"; 361 | INFOPLIST_FILE = "Encoder Demo/Encoder Demo-Info.plist"; 362 | PRODUCT_NAME = "$(TARGET_NAME)"; 363 | WRAPPER_EXTENSION = app; 364 | }; 365 | name = Release; 366 | }; 367 | /* End XCBuildConfiguration section */ 368 | 369 | /* Begin XCConfigurationList section */ 370 | 8412559616A035E3001749D9 /* Build configuration list for PBXProject "Encoder Demo" */ = { 371 | isa = XCConfigurationList; 372 | buildConfigurations = ( 373 | 841255C116A035E3001749D9 /* Debug */, 374 | 841255C216A035E3001749D9 /* Release */, 375 | ); 376 | defaultConfigurationIsVisible = 0; 377 | defaultConfigurationName = Release; 378 | }; 379 | 841255C316A035E3001749D9 /* Build configuration list for PBXNativeTarget "Encoder Demo" */ = { 380 | isa = XCConfigurationList; 381 | buildConfigurations = ( 382 | 841255C416A035E3001749D9 /* Debug */, 383 | 841255C516A035E3001749D9 /* Release */, 384 | ); 385 | defaultConfigurationIsVisible = 0; 386 | defaultConfigurationName = Release; 387 | }; 388 | /* End XCConfigurationList section */ 389 | }; 390 | rootObject = 8412559316A035E3001749D9 /* Project object */; 391 | } 392 | -------------------------------------------------------------------------------- /Encoder Demo/AVEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // AVEncoder.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 14/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import "AVFoundation/AVAssetWriter.h" 11 | #import "AVFoundation/AVAssetWriterInput.h" 12 | #import "AVFoundation/AVMediaFormat.h" 13 | #import "AVFoundation/AVVideoSettings.h" 14 | #import "sys/stat.h" 15 | #import "VideoEncoder.h" 16 | #import "MP4Atom.h" 17 | 18 | typedef int (^encoder_handler_t)(NSArray* data, double pts); 19 | typedef int (^param_handler_t)(NSData* params); 20 | 21 | @interface AVEncoder : NSObject 22 | 23 | + (AVEncoder*) encoderForHeight:(int) height andWidth:(int) width; 24 | 25 | - (void) encodeWithBlock:(encoder_handler_t) block onParams: (param_handler_t) paramsHandler; 26 | - (void) encodeFrame:(CMSampleBufferRef) sampleBuffer; 27 | - (NSData*) getConfigData; 28 | - (void) shutdown; 29 | 30 | 31 | @property (readonly, atomic) int bitspersecond; 32 | 33 | @end 34 | -------------------------------------------------------------------------------- /Encoder Demo/AVEncoder.mm: -------------------------------------------------------------------------------- 1 | // 2 | // AVEncoder.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 14/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "AVEncoder.h" 10 | #import "NALUnit.h" 11 | 12 | static unsigned int to_host(unsigned char* p) 13 | { 14 | return (p[0] << 24) + (p[1] << 16) + (p[2] << 8) + p[3]; 15 | } 16 | 17 | #define OUTPUT_FILE_SWITCH_POINT (50 * 1024 * 1024) // 50 MB switch point 18 | #define MAX_FILENAME_INDEX 5 // filenames "capture1.mp4" wraps at capture5.mp4 19 | 20 | // store the calculated POC with a frame ready for timestamp assessment 21 | // (recalculating POC out of order will get an incorrect result) 22 | @interface EncodedFrame : NSObject 23 | 24 | - (EncodedFrame*) initWithData:(NSArray*) nalus andPOC:(int) poc; 25 | 26 | @property int poc; 27 | @property NSArray* frame; 28 | 29 | @end 30 | 31 | @implementation EncodedFrame 32 | 33 | @synthesize poc; 34 | @synthesize frame; 35 | 36 | - (EncodedFrame*) initWithData:(NSArray*) nalus andPOC:(int) POC 37 | { 38 | self.poc = POC; 39 | self.frame = nalus; 40 | return self; 41 | } 42 | 43 | @end 44 | 45 | 46 | @interface AVEncoder () 47 | 48 | { 49 | // initial writer, used to obtain SPS/PPS from header 50 | VideoEncoder* _headerWriter; 51 | 52 | // main encoder/writer 53 | VideoEncoder* _writer; 54 | 55 | // writer output file (input to our extractor) and monitoring 56 | NSFileHandle* _inputFile; 57 | dispatch_queue_t _readQueue; 58 | dispatch_source_t _readSource; 59 | 60 | // index of current file name 61 | BOOL _swapping; 62 | int _currentFile; 63 | int _height; 64 | int _width; 65 | 66 | // param set data 67 | NSData* _avcC; 68 | int _lengthSize; 69 | 70 | // POC 71 | POCState _pocState; 72 | int _prevPOC; 73 | 74 | // location of mdat 75 | BOOL _foundMDAT; 76 | uint64_t _posMDAT; 77 | int _bytesToNextAtom; 78 | BOOL _needParams; 79 | 80 | // tracking if NALU is next frame 81 | int _prev_nal_idc; 82 | int _prev_nal_type; 83 | // array of NSData comprising a single frame. each data is one nalu with no start code 84 | NSMutableArray* _pendingNALU; 85 | 86 | // FIFO for frame times 87 | NSMutableArray* _times; 88 | 89 | // FIFO for frames awaiting time assigment 90 | NSMutableArray* _frames; 91 | 92 | encoder_handler_t _outputBlock; 93 | param_handler_t _paramsBlock; 94 | 95 | // estimate bitrate over first second 96 | int _bitspersecond; 97 | double _firstpts; 98 | } 99 | 100 | - (void) initForHeight:(int) height andWidth:(int) width; 101 | 102 | @end 103 | 104 | @implementation AVEncoder 105 | 106 | @synthesize bitspersecond = _bitspersecond; 107 | 108 | + (AVEncoder*) encoderForHeight:(int) height andWidth:(int) width 109 | { 110 | AVEncoder* enc = [AVEncoder alloc]; 111 | [enc initForHeight:height andWidth:width]; 112 | return enc; 113 | } 114 | 115 | - (NSString*) makeFilename 116 | { 117 | NSString* filename = [NSString stringWithFormat:@"capture%d.mp4", _currentFile]; 118 | NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename]; 119 | return path; 120 | } 121 | - (void) initForHeight:(int)height andWidth:(int)width 122 | { 123 | _height = height; 124 | _width = width; 125 | NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:@"params.mp4"]; 126 | _headerWriter = [VideoEncoder encoderForPath:path Height:height andWidth:width]; 127 | _times = [NSMutableArray arrayWithCapacity:10]; 128 | 129 | // swap between 3 filenames 130 | _currentFile = 1; 131 | _writer = [VideoEncoder encoderForPath:[self makeFilename] Height:height andWidth:width]; 132 | } 133 | 134 | - (void) encodeWithBlock:(encoder_handler_t) block onParams: (param_handler_t) paramsHandler 135 | { 136 | _outputBlock = block; 137 | _paramsBlock = paramsHandler; 138 | _needParams = YES; 139 | _pendingNALU = nil; 140 | _firstpts = -1; 141 | _bitspersecond = 0; 142 | } 143 | 144 | - (BOOL) parseParams:(NSString*) path 145 | { 146 | NSFileHandle* file = [NSFileHandle fileHandleForReadingAtPath:path]; 147 | struct stat s; 148 | fstat([file fileDescriptor], &s); 149 | MP4Atom* movie = [MP4Atom atomAt:0 size:(int)s.st_size type:(OSType)('file') inFile:file]; 150 | MP4Atom* moov = [movie childOfType:(OSType)('moov') startAt:0]; 151 | MP4Atom* trak = nil; 152 | if (moov != nil) 153 | { 154 | for (;;) 155 | { 156 | trak = [moov nextChild]; 157 | if (trak == nil) 158 | { 159 | break; 160 | } 161 | 162 | if (trak.type == (OSType)('trak')) 163 | { 164 | MP4Atom* tkhd = [trak childOfType:(OSType)('tkhd') startAt:0]; 165 | NSData* verflags = [tkhd readAt:0 size:4]; 166 | unsigned char* p = (unsigned char*)[verflags bytes]; 167 | if (p[3] & 1) 168 | { 169 | break; 170 | } 171 | else 172 | { 173 | tkhd = nil; 174 | } 175 | } 176 | } 177 | } 178 | MP4Atom* stsd = nil; 179 | if (trak != nil) 180 | { 181 | MP4Atom* media = [trak childOfType:(OSType)('mdia') startAt:0]; 182 | if (media != nil) 183 | { 184 | MP4Atom* minf = [media childOfType:(OSType)('minf') startAt:0]; 185 | if (minf != nil) 186 | { 187 | MP4Atom* stbl = [minf childOfType:(OSType)('stbl') startAt:0]; 188 | if (stbl != nil) 189 | { 190 | stsd = [stbl childOfType:(OSType)('stsd') startAt:0]; 191 | } 192 | } 193 | } 194 | } 195 | if (stsd != nil) 196 | { 197 | MP4Atom* avc1 = [stsd childOfType:(OSType)('avc1') startAt:8]; 198 | if (avc1 != nil) 199 | { 200 | MP4Atom* esd = [avc1 childOfType:(OSType)('avcC') startAt:78]; 201 | if (esd != nil) 202 | { 203 | // this is the avcC record that we are looking for 204 | _avcC = [esd readAt:0 size:(int)esd.length]; 205 | if (_avcC != nil) 206 | { 207 | // extract size of length field 208 | unsigned char* p = (unsigned char*)[_avcC bytes]; 209 | _lengthSize = (p[4] & 3) + 1; 210 | 211 | avcCHeader avc((const BYTE*)[_avcC bytes], (int)[_avcC length]); 212 | _pocState.SetHeader(&avc); 213 | 214 | return YES; 215 | } 216 | } 217 | } 218 | } 219 | return NO; 220 | } 221 | 222 | - (void) onParamsCompletion 223 | { 224 | // the initial one-frame-only file has been completed 225 | // Extract the avcC structure and then start monitoring the 226 | // main file to extract video from the mdat chunk. 227 | if ([self parseParams:_headerWriter.path]) 228 | { 229 | if (_paramsBlock) 230 | { 231 | _paramsBlock(_avcC); 232 | } 233 | _headerWriter = nil; 234 | _swapping = NO; 235 | _inputFile = [NSFileHandle fileHandleForReadingAtPath:_writer.path]; 236 | _readQueue = dispatch_queue_create("uk.co.gdcl.avencoder.read", DISPATCH_QUEUE_SERIAL); 237 | 238 | _readSource = dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, [_inputFile fileDescriptor], 0, _readQueue); 239 | dispatch_source_set_event_handler(_readSource, ^{ 240 | [self onFileUpdate]; 241 | }); 242 | dispatch_resume(_readSource); 243 | } 244 | } 245 | 246 | - (void) encodeFrame:(CMSampleBufferRef) sampleBuffer 247 | { 248 | @synchronized(self) 249 | { 250 | if (_needParams) 251 | { 252 | // the avcC record is needed for decoding and it's not written to the file until 253 | // completion. We get round that by writing the first frame to two files; the first 254 | // file (containing only one frame) is then finished, so we can extract the avcC record. 255 | // Only when we've got that do we start reading from the main file. 256 | _needParams = NO; 257 | if ([_headerWriter encodeFrame:sampleBuffer]) 258 | { 259 | [_headerWriter finishWithCompletionHandler:^{ 260 | [self onParamsCompletion]; 261 | }]; 262 | } 263 | } 264 | } 265 | CMTime prestime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 266 | double dPTS = (double)(prestime.value) / prestime.timescale; 267 | NSNumber* pts = [NSNumber numberWithDouble:dPTS]; 268 | 269 | @synchronized(_times) 270 | { 271 | [_times addObject:pts]; 272 | } 273 | @synchronized(self) 274 | { 275 | // switch output files when we reach a size limit 276 | // to avoid runaway storage use. 277 | if (!_swapping) 278 | { 279 | struct stat st; 280 | fstat([_inputFile fileDescriptor], &st); 281 | if (st.st_size > OUTPUT_FILE_SWITCH_POINT) 282 | { 283 | _swapping = YES; 284 | VideoEncoder* oldVideo = _writer; 285 | 286 | // construct a new writer to the next filename 287 | if (++_currentFile > MAX_FILENAME_INDEX) 288 | { 289 | _currentFile = 1; 290 | } 291 | NSLog(@"Swap to file %d", _currentFile); 292 | _writer = [VideoEncoder encoderForPath:[self makeFilename] Height:_height andWidth:_width]; 293 | 294 | 295 | // to do this seamlessly requires a few steps in the right order 296 | // first, suspend the read source 297 | dispatch_source_cancel(_readSource); 298 | // execute the next step as a block on the same queue, to be sure the suspend is done 299 | dispatch_async(_readQueue, ^{ 300 | // finish the file, writing moov, before reading any more from the file 301 | // since we don't yet know where the mdat ends 302 | _readSource = nil; 303 | [oldVideo finishWithCompletionHandler:^{ 304 | [self swapFiles:oldVideo.path]; 305 | }]; 306 | }); 307 | } 308 | } 309 | [_writer encodeFrame:sampleBuffer]; 310 | } 311 | } 312 | 313 | - (void) swapFiles:(NSString*) oldPath 314 | { 315 | // save current position 316 | uint64_t pos = [_inputFile offsetInFile]; 317 | 318 | // re-read mdat length 319 | [_inputFile seekToFileOffset:_posMDAT]; 320 | NSData* hdr = [_inputFile readDataOfLength:4]; 321 | unsigned char* p = (unsigned char*) [hdr bytes]; 322 | int lenMDAT = to_host(p); 323 | 324 | // extract nalus from saved position to mdat end 325 | uint64_t posEnd = _posMDAT + lenMDAT; 326 | uint32_t cRead = (uint32_t)(posEnd - pos); 327 | [_inputFile seekToFileOffset:pos]; 328 | [self readAndDeliver:cRead]; 329 | 330 | // close and remove file 331 | [_inputFile closeFile]; 332 | _foundMDAT = false; 333 | _bytesToNextAtom = 0; 334 | [[NSFileManager defaultManager] removeItemAtPath:oldPath error:nil]; 335 | 336 | 337 | // open new file and set up dispatch source 338 | _inputFile = [NSFileHandle fileHandleForReadingAtPath:_writer.path]; 339 | _readSource = dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, [_inputFile fileDescriptor], 0, _readQueue); 340 | dispatch_source_set_event_handler(_readSource, ^{ 341 | [self onFileUpdate]; 342 | }); 343 | dispatch_resume(_readSource); 344 | _swapping = NO; 345 | } 346 | 347 | 348 | - (void) readAndDeliver:(uint32_t) cReady 349 | { 350 | // Identify the individual NALUs and extract them 351 | while (cReady > _lengthSize) 352 | { 353 | NSData* lenField = [_inputFile readDataOfLength:_lengthSize]; 354 | cReady -= _lengthSize; 355 | unsigned char* p = (unsigned char*) [lenField bytes]; 356 | unsigned int lenNALU = to_host(p); 357 | 358 | if (lenNALU > cReady) 359 | { 360 | // whole NALU not present -- seek back to start of NALU and wait for more 361 | [_inputFile seekToFileOffset:[_inputFile offsetInFile] - 4]; 362 | break; 363 | } 364 | NSData* nalu = [_inputFile readDataOfLength:lenNALU]; 365 | cReady -= lenNALU; 366 | 367 | [self onNALU:nalu]; 368 | } 369 | } 370 | 371 | - (void) onFileUpdate 372 | { 373 | // called whenever there is more data to read in the main encoder output file. 374 | 375 | struct stat s; 376 | fstat([_inputFile fileDescriptor], &s); 377 | int cReady = (int)(s.st_size - [_inputFile offsetInFile]); 378 | 379 | // locate the mdat atom if needed 380 | while (!_foundMDAT && (cReady > 8)) 381 | { 382 | if (_bytesToNextAtom == 0) 383 | { 384 | NSData* hdr = [_inputFile readDataOfLength:8]; 385 | cReady -= 8; 386 | unsigned char* p = (unsigned char*) [hdr bytes]; 387 | int lenAtom = to_host(p); 388 | unsigned int nameAtom = to_host(p+4); 389 | if (nameAtom == (unsigned int)('mdat')) 390 | { 391 | _foundMDAT = true; 392 | _posMDAT = [_inputFile offsetInFile] - 8; 393 | } 394 | else 395 | { 396 | _bytesToNextAtom = lenAtom - 8; 397 | } 398 | } 399 | if (_bytesToNextAtom > 0) 400 | { 401 | int cThis = cReady < _bytesToNextAtom ? cReady :_bytesToNextAtom; 402 | _bytesToNextAtom -= cThis; 403 | [_inputFile seekToFileOffset:[_inputFile offsetInFile]+cThis]; 404 | cReady -= cThis; 405 | } 406 | } 407 | if (!_foundMDAT) 408 | { 409 | return; 410 | } 411 | 412 | // the mdat must be just encoded video. 413 | [self readAndDeliver:cReady]; 414 | } 415 | 416 | - (void) deliverFrame: (NSArray*) frame withTime:(double) pts 417 | { 418 | 419 | if (_firstpts < 0) 420 | { 421 | _firstpts = pts; 422 | } 423 | if ((pts - _firstpts) < 1) 424 | { 425 | int bytes = 0; 426 | for (NSData* data in frame) 427 | { 428 | bytes += [data length]; 429 | } 430 | _bitspersecond += (bytes * 8); 431 | } 432 | 433 | if (_outputBlock != nil) 434 | { 435 | _outputBlock(frame, pts); 436 | } 437 | 438 | } 439 | 440 | - (void) processStoredFrames 441 | { 442 | // first has the last timestamp and rest use up timestamps from the start 443 | int n = 0; 444 | for (EncodedFrame* f in _frames) 445 | { 446 | int index = 0; 447 | if (n == 0) 448 | { 449 | index = (int) [_frames count] - 1; 450 | } 451 | else 452 | { 453 | index = n-1; 454 | } 455 | double pts = 0; 456 | @synchronized(_times) 457 | { 458 | if ([_times count] > 0) 459 | { 460 | pts = [_times[index] doubleValue]; 461 | } 462 | } 463 | [self deliverFrame:f.frame withTime:pts]; 464 | n++; 465 | } 466 | @synchronized(_times) 467 | { 468 | [_times removeObjectsInRange:NSMakeRange(0, [_frames count])]; 469 | } 470 | [_frames removeAllObjects]; 471 | } 472 | 473 | - (void) onEncodedFrame 474 | { 475 | int poc = 0; 476 | for (NSData* d in _pendingNALU) 477 | { 478 | NALUnit nal((const BYTE*)[d bytes], (int)[d length]); 479 | if (_pocState.GetPOC(&nal, &poc)) 480 | { 481 | break; 482 | } 483 | } 484 | 485 | if (poc == 0) 486 | { 487 | [self processStoredFrames]; 488 | double pts = 0; 489 | int index = 0; 490 | @synchronized(_times) 491 | { 492 | if ([_times count] > 0) 493 | { 494 | pts = [_times[index] doubleValue]; 495 | [_times removeObjectAtIndex:index]; 496 | } 497 | } 498 | [self deliverFrame:_pendingNALU withTime:pts]; 499 | _prevPOC = 0; 500 | } 501 | else 502 | { 503 | EncodedFrame* f = [[EncodedFrame alloc] initWithData:_pendingNALU andPOC:poc]; 504 | if (poc > _prevPOC) 505 | { 506 | // all pending frames come before this, so share out the 507 | // timestamps in order of POC 508 | [self processStoredFrames]; 509 | _prevPOC = poc; 510 | } 511 | if (_frames == nil) 512 | { 513 | _frames = [NSMutableArray arrayWithCapacity:2]; 514 | } 515 | [_frames addObject:f]; 516 | } 517 | } 518 | 519 | // combine multiple NALUs into a single frame, and in the process, convert to BSF 520 | // by adding 00 00 01 startcodes before each NALU. 521 | - (void) onNALU:(NSData*) nalu 522 | { 523 | unsigned char* pNal = (unsigned char*)[nalu bytes]; 524 | int idc = pNal[0] & 0x60; 525 | int naltype = pNal[0] & 0x1f; 526 | 527 | if (_pendingNALU) 528 | { 529 | NALUnit nal(pNal, (int)[nalu length]); 530 | 531 | // we have existing data —is this the same frame? 532 | // typically there are a couple of NALUs per frame in iOS encoding. 533 | // This is not general-purpose: it assumes that arbitrary slice ordering is not allowed. 534 | BOOL bNew = NO; 535 | 536 | // sei and param sets go with following nalu 537 | if (_prev_nal_type < 6) 538 | { 539 | if (naltype >= 6) 540 | { 541 | bNew = YES; 542 | } 543 | else if ((idc != _prev_nal_idc) && ((idc == 0) || (_prev_nal_idc == 0))) 544 | { 545 | bNew = YES; 546 | } 547 | else if ((naltype != _prev_nal_type) && (naltype == 5)) 548 | { 549 | bNew = YES; 550 | } 551 | else if ((naltype >= 1) && (naltype <= 5)) 552 | { 553 | nal.Skip(8); 554 | int first_mb = (int)nal.GetUE(); 555 | if (first_mb == 0) 556 | { 557 | bNew = YES; 558 | } 559 | } 560 | } 561 | 562 | if (bNew) 563 | { 564 | [self onEncodedFrame]; 565 | _pendingNALU = nil; 566 | } 567 | } 568 | _prev_nal_type = naltype; 569 | _prev_nal_idc = idc; 570 | if (_pendingNALU == nil) 571 | { 572 | _pendingNALU = [NSMutableArray arrayWithCapacity:2]; 573 | } 574 | [_pendingNALU addObject:nalu]; 575 | } 576 | 577 | - (NSData*) getConfigData 578 | { 579 | return [_avcC copy]; 580 | } 581 | 582 | - (void) shutdown 583 | { 584 | @synchronized(self) 585 | { 586 | _readSource = nil; 587 | if (_headerWriter) 588 | { 589 | [_headerWriter finishWithCompletionHandler:^{ 590 | _headerWriter = nil; 591 | }]; 592 | } 593 | if (_writer) 594 | { 595 | [_writer finishWithCompletionHandler:^{ 596 | _writer = nil; 597 | }]; 598 | } 599 | // !! wait for these to finish before returning and delete temp files 600 | } 601 | } 602 | 603 | @end 604 | -------------------------------------------------------------------------------- /Encoder Demo/CameraServer.h: -------------------------------------------------------------------------------- 1 | // 2 | // CameraServer.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 19/02/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import "AVFoundation/AVCaptureSession.h" 11 | #import "AVFoundation/AVCaptureOutput.h" 12 | #import "AVFoundation/AVCaptureDevice.h" 13 | #import "AVFoundation/AVCaptureInput.h" 14 | #import "AVFoundation/AVCaptureVideoPreviewLayer.h" 15 | #import "AVFoundation/AVMediaFormat.h" 16 | 17 | @interface CameraServer : NSObject 18 | 19 | + (CameraServer*) server; 20 | - (void) startup; 21 | - (void) shutdown; 22 | - (NSString*) getURL; 23 | - (AVCaptureVideoPreviewLayer*) getPreviewLayer; 24 | 25 | @end 26 | -------------------------------------------------------------------------------- /Encoder Demo/CameraServer.m: -------------------------------------------------------------------------------- 1 | // 2 | // CameraServer.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 19/02/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "CameraServer.h" 10 | #import "AVEncoder.h" 11 | #import "RTSPServer.h" 12 | 13 | static CameraServer* theServer; 14 | 15 | @interface CameraServer () 16 | { 17 | AVCaptureSession* _session; 18 | AVCaptureVideoPreviewLayer* _preview; 19 | AVCaptureVideoDataOutput* _output; 20 | dispatch_queue_t _captureQueue; 21 | 22 | AVEncoder* _encoder; 23 | 24 | RTSPServer* _rtsp; 25 | } 26 | @end 27 | 28 | 29 | @implementation CameraServer 30 | 31 | + (void) initialize 32 | { 33 | // test recommended to avoid duplicate init via subclass 34 | if (self == [CameraServer class]) 35 | { 36 | theServer = [[CameraServer alloc] init]; 37 | } 38 | } 39 | 40 | + (CameraServer*) server 41 | { 42 | return theServer; 43 | } 44 | 45 | - (void) startup 46 | { 47 | if (_session == nil) 48 | { 49 | NSLog(@"Starting up server"); 50 | 51 | // create capture device with video input 52 | _session = [[AVCaptureSession alloc] init]; 53 | AVCaptureDevice* dev = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 54 | AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:dev error:nil]; 55 | [_session addInput:input]; 56 | 57 | // create an output for YUV output with self as delegate 58 | _captureQueue = dispatch_queue_create("uk.co.gdcl.avencoder.capture", DISPATCH_QUEUE_SERIAL); 59 | _output = [[AVCaptureVideoDataOutput alloc] init]; 60 | [_output setSampleBufferDelegate:self queue:_captureQueue]; 61 | NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys: 62 | [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, 63 | nil]; 64 | _output.videoSettings = setcapSettings; 65 | [_session addOutput:_output]; 66 | 67 | // create an encoder 68 | _encoder = [AVEncoder encoderForHeight:480 andWidth:720]; 69 | [_encoder encodeWithBlock:^int(NSArray* data, double pts) { 70 | if (_rtsp != nil) 71 | { 72 | _rtsp.bitrate = _encoder.bitspersecond; 73 | [_rtsp onVideoData:data time:pts]; 74 | } 75 | return 0; 76 | } onParams:^int(NSData *data) { 77 | _rtsp = [RTSPServer setupListener:data]; 78 | return 0; 79 | }]; 80 | 81 | // start capture and a preview layer 82 | [_session startRunning]; 83 | 84 | 85 | _preview = [AVCaptureVideoPreviewLayer layerWithSession:_session]; 86 | _preview.videoGravity = AVLayerVideoGravityResizeAspectFill; 87 | } 88 | } 89 | 90 | - (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 91 | { 92 | // pass frame to encoder 93 | [_encoder encodeFrame:sampleBuffer]; 94 | } 95 | 96 | - (void) shutdown 97 | { 98 | NSLog(@"shutting down server"); 99 | if (_session) 100 | { 101 | [_session stopRunning]; 102 | _session = nil; 103 | } 104 | if (_rtsp) 105 | { 106 | [_rtsp shutdownServer]; 107 | } 108 | if (_encoder) 109 | { 110 | [ _encoder shutdown]; 111 | } 112 | } 113 | 114 | - (NSString*) getURL 115 | { 116 | NSString* ipaddr = [RTSPServer getIPAddress]; 117 | NSString* url = [NSString stringWithFormat:@"rtsp://%@/", ipaddr]; 118 | return url; 119 | } 120 | 121 | - (AVCaptureVideoPreviewLayer*) getPreviewLayer 122 | { 123 | return _preview; 124 | } 125 | 126 | @end 127 | -------------------------------------------------------------------------------- /Encoder Demo/Default-568h@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/teocci/RTSP-Server-iOS/7c371707946f82cc475f55f69eb3f079e57ae701/Encoder Demo/Default-568h@2x.png -------------------------------------------------------------------------------- /Encoder Demo/Default.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/teocci/RTSP-Server-iOS/7c371707946f82cc475f55f69eb3f079e57ae701/Encoder Demo/Default.png -------------------------------------------------------------------------------- /Encoder Demo/Default@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/teocci/RTSP-Server-iOS/7c371707946f82cc475f55f69eb3f079e57ae701/Encoder Demo/Default@2x.png -------------------------------------------------------------------------------- /Encoder Demo/Encoder Demo-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIdentifier 12 | uk.co.gdcl.${PRODUCT_NAME:rfc1034identifier} 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | ${PRODUCT_NAME} 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1.0 25 | LSRequiresIPhoneOS 26 | 27 | UIMainStoryboardFile 28 | MainStoryboard_iPhone 29 | UIMainStoryboardFile~ipad 30 | MainStoryboard_iPad 31 | UIRequiredDeviceCapabilities 32 | 33 | armv7 34 | 35 | UISupportedInterfaceOrientations 36 | 37 | UIInterfaceOrientationPortrait 38 | UIInterfaceOrientationLandscapeLeft 39 | UIInterfaceOrientationLandscapeRight 40 | 41 | UISupportedInterfaceOrientations~ipad 42 | 43 | UIInterfaceOrientationPortrait 44 | UIInterfaceOrientationPortraitUpsideDown 45 | UIInterfaceOrientationLandscapeLeft 46 | UIInterfaceOrientationLandscapeRight 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /Encoder Demo/Encoder Demo-Prefix.pch: -------------------------------------------------------------------------------- 1 | // 2 | // Prefix header for all source files of the 'Encoder Demo' target in the 'Encoder Demo' project 3 | // 4 | 5 | #import 6 | 7 | #ifndef __IPHONE_5_0 8 | #warning "This project uses features only available in iOS SDK 5.0 and later." 9 | #endif 10 | 11 | #ifdef __OBJC__ 12 | #import 13 | #import 14 | #endif 15 | -------------------------------------------------------------------------------- /Encoder Demo/EncoderDemoAppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // EncoderDemoAppDelegate.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | 11 | @interface EncoderDemoAppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /Encoder Demo/EncoderDemoAppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // EncoderDemoAppDelegate.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "EncoderDemoAppDelegate.h" 10 | #import "CameraServer.h" 11 | #import "EncoderDemoViewController.h" 12 | 13 | @implementation EncoderDemoAppDelegate 14 | 15 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 16 | { 17 | // Override point for customization after application launch. 18 | [[CameraServer server] startup]; 19 | 20 | return YES; 21 | } 22 | 23 | - (void)applicationWillResignActive:(UIApplication *)application 24 | { 25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 26 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 27 | } 28 | 29 | - (void)applicationDidEnterBackground:(UIApplication *)application 30 | { 31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 33 | [[CameraServer server] shutdown]; 34 | } 35 | 36 | - (void)applicationWillEnterForeground:(UIApplication *)application 37 | { 38 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 39 | } 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application 42 | { 43 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 44 | [[CameraServer server] startup]; 45 | EncoderDemoViewController* view = (EncoderDemoViewController*) self.window.rootViewController; 46 | [view startPreview]; 47 | } 48 | 49 | - (void)applicationWillTerminate:(UIApplication *)application 50 | { 51 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 52 | } 53 | 54 | @end 55 | -------------------------------------------------------------------------------- /Encoder Demo/EncoderDemoViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // EncoderDemoViewController.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | 11 | @interface EncoderDemoViewController : UIViewController 12 | @property (strong, nonatomic) IBOutlet UIView *cameraView; 13 | @property (strong, nonatomic) IBOutlet UILabel *serverAddress; 14 | 15 | - (void) startPreview; 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /Encoder Demo/EncoderDemoViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // EncoderDemoViewController.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "EncoderDemoViewController.h" 10 | #import "CameraServer.h" 11 | 12 | @implementation EncoderDemoViewController 13 | 14 | @synthesize cameraView; 15 | @synthesize serverAddress; 16 | 17 | - (void)viewDidLoad 18 | { 19 | [super viewDidLoad]; 20 | [self startPreview]; 21 | } 22 | 23 | - (void) willAnimateRotationToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration 24 | { 25 | // this is not the most beautiful animation... 26 | AVCaptureVideoPreviewLayer* preview = [[CameraServer server] getPreviewLayer]; 27 | preview.frame = self.cameraView.bounds; 28 | [[preview connection] setVideoOrientation:toInterfaceOrientation]; 29 | } 30 | 31 | - (void) startPreview 32 | { 33 | AVCaptureVideoPreviewLayer* preview = [[CameraServer server] getPreviewLayer]; 34 | [preview removeFromSuperlayer]; 35 | preview.frame = self.cameraView.bounds; 36 | [[preview connection] setVideoOrientation:UIInterfaceOrientationPortrait]; 37 | 38 | [self.cameraView.layer addSublayer:preview]; 39 | 40 | self.serverAddress.text = [[CameraServer server] getURL]; 41 | } 42 | 43 | - (void)didReceiveMemoryWarning 44 | { 45 | [super didReceiveMemoryWarning]; 46 | // Dispose of any resources that can be recreated. 47 | } 48 | @end 49 | -------------------------------------------------------------------------------- /Encoder Demo/MP4Atom.h: -------------------------------------------------------------------------------- 1 | // 2 | // MP4Atom.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 15/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | 11 | @interface MP4Atom : NSObject 12 | 13 | { 14 | NSFileHandle* _file; 15 | int64_t _offset; 16 | int64_t _length; 17 | OSType _type; 18 | int64_t _nextChild; 19 | } 20 | @property OSType type; 21 | @property int64_t length; 22 | 23 | + (MP4Atom*) atomAt:(int64_t) offset size:(int) length type:(OSType) fourcc inFile:(NSFileHandle*) handle; 24 | - (BOOL) init:(int64_t) offset size:(int) length type:(OSType) fourcc inFile:(NSFileHandle*) handle; 25 | - (NSData*) readAt:(int64_t) offset size:(int) length; 26 | - (BOOL) setChildOffset:(int64_t) offset; 27 | - (MP4Atom*) nextChild; 28 | - (MP4Atom*) childOfType:(OSType) fourcc startAt:(int64_t) offset; 29 | 30 | @end 31 | -------------------------------------------------------------------------------- /Encoder Demo/MP4Atom.m: -------------------------------------------------------------------------------- 1 | // 2 | // MP4Atom.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 15/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "MP4Atom.h" 10 | 11 | static unsigned int to_host(unsigned char* p) 12 | { 13 | return (p[0] << 24) + (p[1] << 16) + (p[2] << 8) + p[3]; 14 | } 15 | 16 | @implementation MP4Atom 17 | 18 | @synthesize type = _type; 19 | @synthesize length = _length; 20 | 21 | + (MP4Atom*) atomAt:(int64_t) offset size:(int) length type:(OSType) fourcc inFile:(NSFileHandle*) handle 22 | { 23 | MP4Atom* atom = [MP4Atom alloc]; 24 | if (![atom init:offset size:length type:fourcc inFile:handle]) 25 | { 26 | return nil; 27 | } 28 | return atom; 29 | } 30 | 31 | - (BOOL) init:(int64_t) offset size:(int) length type:(OSType) fourcc inFile:(NSFileHandle*) handle 32 | { 33 | _file = handle; 34 | _offset = offset; 35 | _length = length; 36 | _type = fourcc; 37 | _nextChild = 0; 38 | 39 | return YES; 40 | } 41 | 42 | - (NSData*) readAt:(int64_t) offset size:(int) length 43 | { 44 | [_file seekToFileOffset:_offset + offset]; 45 | return [_file readDataOfLength:length]; 46 | } 47 | 48 | - (BOOL) setChildOffset:(int64_t) offset 49 | { 50 | _nextChild = offset; 51 | return YES; 52 | } 53 | 54 | - (MP4Atom*) nextChild 55 | { 56 | if (_nextChild <= (_length - 8)) 57 | { 58 | [_file seekToFileOffset:_offset + _nextChild]; 59 | NSData* data = [_file readDataOfLength:8]; 60 | int cHeader = 8; 61 | unsigned char* p = (unsigned char*) [data bytes]; 62 | int64_t len = to_host(p); 63 | OSType fourcc = to_host(p + 4); 64 | if (len == 1) 65 | { 66 | // 64-bit extended length 67 | cHeader+= 8; 68 | data = [_file readDataOfLength:8]; 69 | p = (unsigned char*) [data bytes]; 70 | len = to_host(p); 71 | len = (len << 32) + to_host(p + 4); 72 | } 73 | else if (len == 0) 74 | { 75 | // whole remaining parent space 76 | len = _length - _nextChild; 77 | } 78 | if (fourcc == (OSType)('uuid')) 79 | { 80 | cHeader += 16; 81 | } 82 | if ((len < 0) || ((len + _nextChild) > _length)) 83 | { 84 | return nil; 85 | } 86 | int64_t offset = _nextChild + cHeader; 87 | _nextChild += len; 88 | len -= cHeader; 89 | return [MP4Atom atomAt:offset+_offset size:(int)len type:fourcc inFile:_file]; 90 | } 91 | return nil; 92 | } 93 | 94 | - (MP4Atom*) childOfType:(OSType) fourcc startAt:(int64_t) offset 95 | { 96 | [self setChildOffset:offset]; 97 | MP4Atom* child = nil; 98 | do { 99 | child = [self nextChild]; 100 | } while ((child != nil) && (child.type != fourcc)); 101 | return child; 102 | } 103 | 104 | @end 105 | -------------------------------------------------------------------------------- /Encoder Demo/NALUnit.cpp: -------------------------------------------------------------------------------- 1 | 2 | // 3 | // NALUnit.cpp 4 | // 5 | // Implementation of Basic parsing of H.264 NAL Units 6 | // 7 | // Geraint Davies, March 2004 8 | // 9 | // Copyright (c) GDCL 2004-2008 http://www.gdcl.co.uk/license.htm 10 | 11 | #ifdef WIN32 12 | #include "StdAfx.h" 13 | #endif 14 | #include "NALUnit.h" 15 | 16 | 17 | // --- core NAL Unit implementation ------------------------------ 18 | 19 | NALUnit::NALUnit() 20 | : m_pStart(NULL), 21 | m_cBytes(0) 22 | { 23 | } 24 | 25 | bool 26 | NALUnit::GetStartCode(const BYTE*& pBegin, const BYTE*& pStart, int& cRemain) 27 | { 28 | // start code is any number of 00 followed by 00 00 01 29 | // We need to record the first 00 in pBegin and the first byte 30 | // following the startcode in pStart. 31 | // if no start code is found, pStart and cRemain should be unchanged. 32 | 33 | const BYTE* pThis = pStart; 34 | int cBytes = cRemain; 35 | 36 | pBegin = NULL; 37 | while (cBytes>= 4) 38 | { 39 | if (pThis[0] == 0) 40 | { 41 | // remember first 00 42 | if (pBegin == NULL) 43 | { 44 | pBegin = pThis; 45 | } 46 | if ((pThis[1] == 0) && 47 | (pThis[2] == 1)) 48 | { 49 | // point to type byte of NAL unit 50 | pStart = pThis + 3; 51 | cRemain = cBytes - 3; 52 | return true; 53 | } 54 | } else { 55 | pBegin = NULL; 56 | } 57 | cBytes--; 58 | pThis++; 59 | } 60 | return false; 61 | } 62 | 63 | bool 64 | NALUnit::Parse(const BYTE* pBuffer, int cSpace, int LengthSize, bool bEnd) 65 | { 66 | // if we get the start code but not the whole 67 | // NALU, we can return false but still have the length property valid 68 | m_cBytes = 0; 69 | 70 | ResetBitstream(); 71 | 72 | if (LengthSize > 0) 73 | { 74 | m_pStartCodeStart = pBuffer; 75 | 76 | if (LengthSize > cSpace) 77 | { 78 | return false; 79 | } 80 | 81 | m_cBytes = 0; 82 | for (int i = 0; i < LengthSize; i++) 83 | { 84 | m_cBytes <<= 8; 85 | m_cBytes += *pBuffer++; 86 | } 87 | 88 | if ((m_cBytes+LengthSize) <= cSpace) 89 | { 90 | m_pStart = pBuffer; 91 | return true; 92 | } 93 | } else { 94 | // this is not length-delimited: we must look for start codes 95 | const BYTE* pBegin; 96 | if (GetStartCode(pBegin, pBuffer, cSpace)) 97 | { 98 | m_pStart = pBuffer; 99 | m_pStartCodeStart = pBegin; 100 | 101 | // either we find another startcode, or we continue to the 102 | // buffer end (if this is the last block of data) 103 | if (GetStartCode(pBegin, pBuffer, cSpace)) 104 | { 105 | m_cBytes = int(pBegin - m_pStart); 106 | return true; 107 | } else if (bEnd) 108 | { 109 | // current element extends to end of buffer 110 | m_cBytes = cSpace; 111 | return true; 112 | } 113 | } 114 | } 115 | return false; 116 | } 117 | 118 | // bitwise access to data 119 | void 120 | NALUnit::ResetBitstream() 121 | { 122 | m_idx = 0; 123 | m_nBits = 0; 124 | m_cZeros = 0; 125 | } 126 | 127 | void 128 | NALUnit::Skip(int nBits) 129 | { 130 | if (nBits < m_nBits) 131 | { 132 | m_nBits -= nBits; 133 | } else { 134 | nBits -= m_nBits; 135 | while (nBits >= 8) 136 | { 137 | GetBYTE(); 138 | nBits -= 8; 139 | } 140 | if (nBits) 141 | { 142 | m_byte = GetBYTE(); 143 | m_nBits = 8; 144 | 145 | m_nBits -= nBits; 146 | } 147 | } 148 | } 149 | 150 | // get the next byte, removing emulation prevention bytes 151 | BYTE 152 | NALUnit::GetBYTE() 153 | { 154 | if (m_idx >= m_cBytes) 155 | { 156 | return 0; 157 | } 158 | 159 | BYTE b = m_pStart[m_idx++]; 160 | 161 | // to avoid start-code emulation, a byte 0x03 is inserted 162 | // after any 00 00 pair. Discard that here. 163 | if (b == 0) 164 | { 165 | m_cZeros++; 166 | if ((m_idx < m_cBytes) && (m_cZeros == 2) && (m_pStart[m_idx] == 0x03)) 167 | { 168 | m_idx++; 169 | m_cZeros=0; 170 | } 171 | } else { 172 | m_cZeros = 0; 173 | } 174 | return b; 175 | } 176 | 177 | unsigned long 178 | NALUnit::GetBit() 179 | { 180 | if (m_nBits == 0) 181 | { 182 | m_byte = GetBYTE(); 183 | m_nBits = 8; 184 | } 185 | m_nBits--; 186 | return (m_byte >> m_nBits) & 0x1; 187 | } 188 | 189 | unsigned long 190 | NALUnit::GetWord(int nBits) 191 | { 192 | unsigned long u = 0; 193 | while (nBits > 0) 194 | { 195 | u <<= 1; 196 | u |= GetBit(); 197 | nBits--; 198 | } 199 | return u; 200 | } 201 | 202 | unsigned long 203 | NALUnit::GetUE() 204 | { 205 | // Exp-Golomb entropy coding: leading zeros, then a one, then 206 | // the data bits. The number of leading zeros is the number of 207 | // data bits, counting up from that number of 1s as the base. 208 | // That is, if you see 209 | // 0001010 210 | // You have three leading zeros, so there are three data bits (010) 211 | // counting up from a base of 111: thus 111 + 010 = 1001 = 9 212 | int cZeros = 0; 213 | while (GetBit() == 0) 214 | { 215 | // check for partial data (Dmitri Vasilyev) 216 | if (NoMoreBits()) 217 | { 218 | return 0; 219 | } 220 | cZeros++; 221 | } 222 | return GetWord(cZeros) + ((1 << cZeros)-1); 223 | } 224 | 225 | 226 | long 227 | NALUnit::GetSE() 228 | { 229 | // same as UE but signed. 230 | // basically the unsigned numbers are used as codes to indicate signed numbers in pairs 231 | // in increasing value. Thus the encoded values 232 | // 0, 1, 2, 3, 4 233 | // mean 234 | // 0, 1, -1, 2, -2 etc 235 | 236 | unsigned long UE = GetUE(); 237 | bool bPositive = UE & 1; 238 | long SE = (UE + 1) >> 1; 239 | if (!bPositive) 240 | { 241 | SE = -SE; 242 | } 243 | return SE; 244 | } 245 | 246 | // --- sequence params parsing --------------- 247 | SeqParamSet::SeqParamSet() 248 | : m_cx(0), 249 | m_cy(0), 250 | m_FrameBits(0) 251 | { 252 | #ifdef WIN32 253 | SetRect(&m_rcFrame, 0, 0, 0, 0); 254 | #endif 255 | } 256 | 257 | void 258 | ScalingList(int size, NALUnit* pnalu) 259 | { 260 | long lastScale = 8; 261 | long nextScale = 8; 262 | for (int j = 0 ; j < size; j++) 263 | { 264 | if (nextScale != 0) 265 | { 266 | long delta = pnalu->GetSE(); 267 | nextScale = (lastScale + delta + 256) %256; 268 | } 269 | long scaling_list_j = (nextScale == 0) ? lastScale : nextScale; 270 | lastScale = scaling_list_j; 271 | } 272 | } 273 | 274 | 275 | bool 276 | SeqParamSet::Parse(NALUnit* pnalu) 277 | { 278 | if (pnalu->Type() != NALUnit::NAL_Sequence_Params) 279 | { 280 | return false; 281 | } 282 | 283 | // with the UE/SE type encoding, we must decode all the values 284 | // to get through to the ones we want 285 | pnalu->ResetBitstream(); 286 | pnalu->Skip(8); // type 287 | m_Profile = (int)pnalu->GetWord(8); 288 | m_Compatibility = (BYTE) pnalu->GetWord(8); 289 | m_Level = (int)pnalu->GetWord(8); 290 | 291 | /*int seq_param_id =*/ pnalu->GetUE(); 292 | 293 | if ((m_Profile == 100) || (m_Profile == 110) || (m_Profile == 122) || (m_Profile == 244) || 294 | (m_Profile == 44) || (m_Profile == 83) || (m_Profile == 86) || (m_Profile == 118) || (m_Profile == 128) 295 | ) 296 | { 297 | int chroma_fmt = (int)pnalu->GetUE(); 298 | if (chroma_fmt == 3) 299 | { 300 | pnalu->Skip(1); 301 | } 302 | /* int bit_depth_luma_minus8 = */ pnalu->GetUE(); 303 | /* int bit_depth_chroma_minus8 = */ pnalu->GetUE(); 304 | pnalu->Skip(1); 305 | int seq_scaling_matrix_present = (int)pnalu->GetBit(); 306 | if (seq_scaling_matrix_present) 307 | { 308 | // Y, Cr, Cb for 4x4 intra and inter, then 8x8 (just Y unless chroma_fmt is 3) 309 | int max_scaling_lists = (chroma_fmt == 3) ? 12 : 8; 310 | for (int i = 0; i < max_scaling_lists; i++) 311 | { 312 | if (pnalu->GetBit()) 313 | { 314 | if (i < 6) 315 | { 316 | ScalingList(16, pnalu); 317 | } 318 | else 319 | { 320 | ScalingList(64, pnalu); 321 | } 322 | } 323 | } 324 | } 325 | } 326 | 327 | int log2_frame_minus4 = (int)pnalu->GetUE(); 328 | m_FrameBits = log2_frame_minus4 + 4; 329 | m_pocType = (int)pnalu->GetUE(); 330 | if (m_pocType == 0) 331 | { 332 | int log2_minus4 = (int)pnalu->GetUE(); 333 | m_pocLSBBits = log2_minus4 + 4; 334 | } else if (m_pocType == 1) 335 | { 336 | pnalu->Skip(1); // delta always zero 337 | /*int nsp_offset =*/ pnalu->GetSE(); 338 | /*int nsp_top_to_bottom = */ pnalu->GetSE(); 339 | int num_ref_in_cycle = (int)pnalu->GetUE(); 340 | for (int i = 0; i < num_ref_in_cycle; i++) 341 | { 342 | /*int sf_offset =*/ pnalu->GetSE(); 343 | } 344 | } 345 | else if (m_pocType != 2) 346 | { 347 | return false; 348 | } 349 | // else for POCtype == 2, no additional data in stream 350 | 351 | /*int num_ref_frames =*/ pnalu->GetUE(); 352 | /*int gaps_allowed =*/ pnalu->GetBit(); 353 | 354 | int mbs_width = (int)pnalu->GetUE(); 355 | int mbs_height = (int)pnalu->GetUE(); 356 | m_cx = (mbs_width+1) * 16; 357 | m_cy = (mbs_height+1) * 16; 358 | 359 | // smoke test validation of sps 360 | if ((m_cx > 2000) || (m_cy > 2000)) 361 | { 362 | return false; 363 | } 364 | 365 | // if this is false, then sizes are field sizes and need adjusting 366 | m_bFrameOnly = pnalu->GetBit() ? true : false; 367 | 368 | if (!m_bFrameOnly) 369 | { 370 | pnalu->Skip(1); // adaptive frame/field 371 | } 372 | pnalu->Skip(1); // direct 8x8 373 | 374 | #ifdef WIN32 375 | SetRect(&m_rcFrame, 0, 0, 0, 0); 376 | bool bCrop = pnalu->GetBit() ? true : false; 377 | if (bCrop) { 378 | // get cropping rect 379 | // store as exclusive, pixel parameters relative to frame 380 | m_rcFrame.left = pnalu->GetUE() * 2; 381 | m_rcFrame.right = pnalu->GetUE() * 2; 382 | m_rcFrame.top = pnalu->GetUE() * 2; 383 | m_rcFrame.bottom = pnalu->GetUE() * 2; 384 | 385 | // convert from offsets to absolute rect 386 | // can't test with IsRectEmpty until after this 387 | // change (Dmitri Vasilyev) 388 | m_rcFrame.right = m_cx - m_rcFrame.right; 389 | m_rcFrame.bottom = m_cy - m_rcFrame.bottom; 390 | } 391 | #endif 392 | // adjust rect from 2x2 units to pixels 393 | 394 | if (!m_bFrameOnly) 395 | { 396 | // adjust heights from field to frame 397 | m_cy *= 2; 398 | #ifdef WIN32 399 | m_rcFrame.top *= 2; 400 | m_rcFrame.bottom *= 2; 401 | #endif 402 | } 403 | 404 | // .. rest are not interesting yet 405 | m_nalu = *pnalu; 406 | return true; 407 | } 408 | 409 | // --- slice header -------------------- 410 | bool 411 | SliceHeader::Parse(NALUnit* pnalu, SeqParamSet* sps, bool bDeltaPresent) 412 | { 413 | switch(pnalu->Type()) 414 | { 415 | case NALUnit::NAL_IDR_Slice: 416 | case NALUnit::NAL_Slice: 417 | case NALUnit::NAL_PartitionA: 418 | // all these begin with a slice header 419 | break; 420 | 421 | default: 422 | return false; 423 | } 424 | 425 | // slice header has the 1-byte type, then one UE value, 426 | // then the frame number. 427 | pnalu->ResetBitstream(); 428 | pnalu->Skip(8); // NALU type 429 | pnalu->GetUE(); // first mb in slice 430 | pnalu->GetUE(); // slice type 431 | pnalu->GetUE(); // pic param set id 432 | 433 | m_framenum = (int)pnalu->GetWord(sps->FrameBits()); 434 | 435 | m_bField = m_bBottom = false; 436 | if (sps->Interlaced()) 437 | { 438 | m_bField = pnalu->GetBit(); 439 | if (m_bField) 440 | { 441 | m_bBottom = pnalu->GetBit(); 442 | } 443 | } 444 | if (pnalu->Type() == NALUnit::NAL_IDR_Slice) 445 | { 446 | /* int idr_pic_id = */ pnalu->GetUE(); 447 | } 448 | m_poc_lsb = 0; 449 | if (sps->POCType() == 0) 450 | { 451 | m_poc_lsb = (int)pnalu->GetWord(sps->POCLSBBits()); 452 | m_pocDelta = 0; 453 | if (bDeltaPresent && !m_bField) 454 | { 455 | m_pocDelta = (int)pnalu->GetSE(); 456 | } 457 | } 458 | 459 | 460 | return true; 461 | } 462 | 463 | // --- SEI ---------------------- 464 | 465 | 466 | SEIMessage::SEIMessage(NALUnit* pnalu) 467 | { 468 | m_pnalu = pnalu; 469 | const BYTE* p = pnalu->Start(); 470 | p++; // nalu type byte 471 | m_type = 0; 472 | while (*p == 0xff) 473 | { 474 | m_type += 255; 475 | p++; 476 | } 477 | m_type += *p; 478 | p++; 479 | m_length = 0; 480 | while (*p == 0xff) 481 | { 482 | m_length += 255; 483 | p++; 484 | } 485 | m_length += *p; 486 | p++; 487 | m_idxPayload = int(p - m_pnalu->Start()); 488 | } 489 | 490 | avcCHeader::avcCHeader(const BYTE* header, int cBytes) 491 | { 492 | if (cBytes < 8) 493 | { 494 | return; 495 | } 496 | const BYTE* pEnd = header + cBytes; 497 | 498 | m_lengthSize = (header[4] & 3) + 1; 499 | 500 | int cSeq = header[5] & 0x1f; 501 | header += 6; 502 | for (int i = 0; i < cSeq; i++) 503 | { 504 | if ((header+2) > pEnd) 505 | { 506 | return; 507 | } 508 | int cThis = (header[0] << 8) + header[1]; 509 | header += 2; 510 | if ((header+cThis) > pEnd) 511 | { 512 | return; 513 | } 514 | if (i == 0) 515 | { 516 | NALUnit n(header, cThis); 517 | m_sps = n; 518 | } 519 | header += cThis; 520 | } 521 | if ((header + 3) >= pEnd) 522 | { 523 | return; 524 | } 525 | int cPPS = header[0]; 526 | if (cPPS > 0) 527 | { 528 | int cThis = (header[1] << 8) + header[2]; 529 | header += 3; 530 | NALUnit n(header, cThis); 531 | m_pps = n; 532 | } 533 | } 534 | 535 | 536 | POCState::POCState() 537 | : m_prevLSB(0), 538 | m_prevMSB(0) 539 | { 540 | } 541 | 542 | void POCState::SetHeader(avcCHeader* avc) 543 | { 544 | m_avc = avc; 545 | m_sps.Parse(m_avc->sps()); 546 | NALUnit* pps = avc->pps(); 547 | pps->ResetBitstream(); 548 | /* int ppsid = */ pps->GetUE(); 549 | /* int spsid = */ pps->GetUE(); 550 | pps->Skip(1); 551 | m_deltaPresent = pps->GetBit() ? true : false; 552 | } 553 | 554 | bool POCState::GetPOC(NALUnit* nal, int* pPOC) 555 | { 556 | int maxlsb = 1 << (m_sps.POCLSBBits()); 557 | SliceHeader slice; 558 | if (slice.Parse(nal, &m_sps, m_deltaPresent)) 559 | { 560 | m_frameNum = slice.FrameNum(); 561 | 562 | int prevMSB = m_prevMSB; 563 | int prevLSB = m_prevLSB; 564 | if (nal->Type() == NALUnit::NAL_IDR_Slice) 565 | { 566 | prevLSB = prevMSB= 0; 567 | } 568 | // !! mmoc == 5 569 | 570 | int lsb = slice.POCLSB(); 571 | int MSB = prevMSB; 572 | if ((lsb < prevLSB) && ((prevLSB - lsb) >= (maxlsb / 2))) 573 | { 574 | MSB = prevMSB + maxlsb; 575 | } 576 | else if ((lsb > prevLSB) && ((lsb - prevLSB) > (maxlsb/2))) 577 | { 578 | MSB = prevMSB - maxlsb; 579 | } 580 | if (nal->IsRefPic()) 581 | { 582 | m_prevLSB = lsb; 583 | m_prevMSB = MSB; 584 | } 585 | 586 | *pPOC = MSB + lsb; 587 | m_lastlsb = lsb; 588 | return true; 589 | } 590 | return false; 591 | } 592 | 593 | 594 | 595 | 596 | 597 | 598 | 599 | 600 | -------------------------------------------------------------------------------- /Encoder Demo/NALUnit.h: -------------------------------------------------------------------------------- 1 | 2 | // 3 | // NALUnit.h 4 | // 5 | // Basic parsing of H.264 NAL Units 6 | // 7 | // Geraint Davies, March 2004 8 | // 9 | // Copyright (c) GDCL 2004-2008 http://www.gdcl.co.uk/license.htm 10 | 11 | 12 | 13 | #pragma once 14 | 15 | #ifndef WIN32 16 | typedef unsigned char BYTE; 17 | typedef unsigned long ULONG; 18 | #ifndef NULL 19 | #define NULL 0 20 | #endif 21 | #endif 22 | 23 | class NALUnit 24 | { 25 | public: 26 | NALUnit(); 27 | NALUnit(const BYTE* pStart, int len) 28 | { 29 | m_pStart = m_pStartCodeStart = pStart; 30 | m_cBytes = len; 31 | ResetBitstream(); 32 | } 33 | virtual ~NALUnit() {} 34 | 35 | // assignment copies a pointer into a fixed buffer managed elsewhere. We do not copy the data 36 | NALUnit(const NALUnit& r) 37 | { 38 | m_pStart = r.m_pStart; 39 | m_cBytes = r.m_cBytes; 40 | ResetBitstream(); 41 | } 42 | const NALUnit& operator=(const NALUnit& r) 43 | { 44 | m_pStart = r.m_pStart; 45 | m_cBytes = r.m_cBytes; 46 | ResetBitstream(); 47 | return *this; 48 | } 49 | 50 | enum eNALType 51 | { 52 | NAL_Slice = 1, 53 | NAL_PartitionA = 2, 54 | NAL_PartitionB = 3, 55 | NAL_PartitionC = 4, 56 | NAL_IDR_Slice = 5, 57 | NAL_SEI = 6, 58 | NAL_Sequence_Params = 7, 59 | NAL_Picture_Params = 8, 60 | NAL_AUD = 9, 61 | }; 62 | 63 | // identify a NAL unit within a buffer. 64 | // If LengthSize is non-zero, it is the number of bytes 65 | // of length field we expect. Otherwise, we expect start-code 66 | // delimiters. 67 | bool Parse(const BYTE* pBuffer, int cSpace, int LengthSize, bool bEnd); 68 | 69 | eNALType Type() 70 | { 71 | if (m_pStart == NULL) 72 | { 73 | return eNALType(0); 74 | } 75 | return eNALType(m_pStart[0] & 0x1F); 76 | } 77 | 78 | int Length() 79 | { 80 | return m_cBytes; 81 | } 82 | 83 | const BYTE* Start() 84 | { 85 | return m_pStart; 86 | } 87 | 88 | // bitwise access to data 89 | void ResetBitstream(); 90 | void Skip(int nBits); 91 | 92 | unsigned long GetWord(int nBits); 93 | unsigned long GetUE(); 94 | long GetSE(); 95 | BYTE GetBYTE(); 96 | unsigned long GetBit(); 97 | bool NoMoreBits() { return (m_idx >= m_cBytes) && (m_nBits == 0); } 98 | 99 | const BYTE* StartCodeStart() { return m_pStartCodeStart; } 100 | bool IsRefPic() 101 | { 102 | if (m_pStart && (m_pStart[0] & 0x60)) 103 | { 104 | return true; 105 | } 106 | return false; 107 | } 108 | 109 | 110 | private: 111 | bool GetStartCode(const BYTE*& pBegin, const BYTE*& pStart, int& cRemain); 112 | 113 | private: 114 | const BYTE* m_pStartCodeStart; 115 | const BYTE* m_pStart; 116 | int m_cBytes; 117 | 118 | // bitstream access 119 | int m_idx; 120 | int m_nBits; 121 | BYTE m_byte; 122 | int m_cZeros; 123 | }; 124 | 125 | 126 | 127 | // simple parser for the Sequence parameter set things that we need 128 | class SeqParamSet 129 | { 130 | public: 131 | SeqParamSet(); 132 | bool Parse(NALUnit* pnalu); 133 | int FrameBits() 134 | { 135 | return m_FrameBits; 136 | } 137 | long EncodedWidth() 138 | { 139 | return m_cx; 140 | } 141 | long EncodedHeight() 142 | { 143 | return m_cy; 144 | } 145 | #ifdef WIN32 146 | long CroppedWidth() 147 | { 148 | if (IsRectEmpty(&m_rcFrame)) 149 | { 150 | return EncodedWidth(); 151 | } 152 | return m_rcFrame.right - m_rcFrame.left; 153 | } 154 | long CroppedHeight() 155 | { 156 | if (IsRectEmpty(&m_rcFrame)) 157 | { 158 | return EncodedHeight(); 159 | } 160 | return m_rcFrame.bottom - m_rcFrame.top; 161 | } 162 | RECT* CropRect() 163 | { 164 | return &m_rcFrame; 165 | } 166 | #endif 167 | bool Interlaced() 168 | { 169 | return !m_bFrameOnly; 170 | } 171 | unsigned int Profile() { return m_Profile; } 172 | unsigned int Level() { return m_Level; } 173 | BYTE Compat() { return m_Compatibility; } 174 | NALUnit* NALU() {return &m_nalu; } 175 | int POCLSBBits() { return m_pocLSBBits; } 176 | int POCType() { return m_pocType; } 177 | 178 | private: 179 | NALUnit m_nalu; 180 | int m_FrameBits; 181 | long m_cx; 182 | long m_cy; 183 | #ifdef WIN32 184 | RECT m_rcFrame; 185 | #endif 186 | bool m_bFrameOnly; 187 | 188 | int m_Profile; 189 | int m_Level; 190 | BYTE m_Compatibility; 191 | int m_pocType; 192 | int m_pocLSBBits; 193 | }; 194 | 195 | // extract frame num from slice headers 196 | class SliceHeader 197 | { 198 | public: 199 | 200 | bool Parse(NALUnit* pnalu, SeqParamSet* sps, bool bDeltaPresent); 201 | int FrameNum() 202 | { 203 | return m_framenum; 204 | } 205 | bool IsField() { return m_bField; } 206 | bool IsBottom() { return m_bBottom; } 207 | int Delta() { return m_pocDelta; } 208 | int POCLSB() { return m_poc_lsb; } 209 | 210 | private: 211 | int m_framenum; 212 | int m_nBitsFrame; 213 | bool m_bFrameOnly; 214 | 215 | bool m_bField; 216 | bool m_bBottom; 217 | int m_pocDelta; 218 | int m_poc_lsb; 219 | }; 220 | 221 | // SEI message structure 222 | class SEIMessage 223 | { 224 | public: 225 | SEIMessage(NALUnit* pnalu); 226 | int Type() { return m_type; } 227 | int Length() { return m_length; } 228 | const BYTE* Payload() { return m_pnalu->Start() + m_idxPayload; } 229 | private: 230 | NALUnit* m_pnalu; 231 | int m_type; 232 | int m_length; 233 | int m_idxPayload; 234 | }; 235 | 236 | // avcC structure from MP4 237 | class avcCHeader 238 | { 239 | public: 240 | avcCHeader(const BYTE* header, int cBytes); 241 | NALUnit* sps() { return &m_sps; } 242 | NALUnit* pps() { return &m_pps; } 243 | long lengthSize() { return m_lengthSize; } 244 | 245 | private: 246 | long m_lengthSize; 247 | NALUnit m_sps; 248 | NALUnit m_pps; 249 | }; 250 | 251 | // NB this is NOT general-purpose. This 252 | // implements only the details that I've seen in 253 | // iOS encoding (poc type 0 only, and no checks for mmco 5) 254 | class POCState 255 | { 256 | public: 257 | POCState(); 258 | 259 | void SetHeader(avcCHeader* avc); 260 | bool GetPOC(NALUnit* nal, int* pPOC); 261 | int getFrameNum() 262 | { 263 | return m_frameNum; 264 | } 265 | int getLastLSB() 266 | { 267 | return m_lastlsb; 268 | } 269 | private: 270 | int m_prevLSB; 271 | int m_prevMSB; 272 | avcCHeader* m_avc; 273 | SeqParamSet m_sps; 274 | bool m_deltaPresent; 275 | int m_frameNum; 276 | int m_lastlsb; 277 | }; 278 | 279 | 280 | -------------------------------------------------------------------------------- /Encoder Demo/RTSPClientConnection.h: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPClientConnection.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 24/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import "RTSPServer.h" 11 | 12 | @interface RTSPClientConnection : NSObject 13 | 14 | 15 | + (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server; 16 | 17 | - (void) onVideoData:(NSArray*) data time:(double) pts; 18 | - (void) shutdown; 19 | 20 | @end 21 | -------------------------------------------------------------------------------- /Encoder Demo/RTSPClientConnection.mm: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPClientConnection.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 24/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "RTSPClientConnection.h" 10 | #import "RTSPMessage.h" 11 | #import "NALUnit.h" 12 | #import "arpa/inet.h" 13 | 14 | void tonet_short(uint8_t* p, unsigned short s) 15 | { 16 | p[0] = (s >> 8) & 0xff; 17 | p[1] = s & 0xff; 18 | } 19 | void tonet_long(uint8_t* p, unsigned long l) 20 | { 21 | p[0] = (l >> 24) & 0xff; 22 | p[1] = (l >> 16) & 0xff; 23 | p[2] = (l >> 8) & 0xff; 24 | p[3] = l & 0xff; 25 | } 26 | 27 | static const char* Base64Mapping = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; 28 | static const int max_packet_size = 1200; 29 | 30 | NSString* encodeLong(unsigned long val, int nPad) 31 | { 32 | char ch[4]; 33 | int cch = 4 - nPad; 34 | for (int i = 0; i < cch; i++) 35 | { 36 | int shift = 6 * (cch - (i+1)); 37 | int bits = (val >> shift) & 0x3f; 38 | ch[i] = Base64Mapping[bits]; 39 | } 40 | for (int i = 0; i < nPad; i++) 41 | { 42 | ch[cch + i] = '='; 43 | } 44 | NSString* s = [[NSString alloc] initWithBytes:ch length:4 encoding:NSUTF8StringEncoding]; 45 | return s; 46 | } 47 | 48 | NSString* encodeToBase64(NSData* data) 49 | { 50 | NSString* s = @""; 51 | 52 | const uint8_t* p = (const uint8_t*) [data bytes]; 53 | int cBytes = (int)[data length]; 54 | while (cBytes >= 3) 55 | { 56 | unsigned long val = (p[0] << 16) + (p[1] << 8) + p[2]; 57 | p += 3; 58 | cBytes -= 3; 59 | 60 | s = [s stringByAppendingString:encodeLong(val, 0)]; 61 | } 62 | if (cBytes > 0) 63 | { 64 | int nPad; 65 | unsigned long val; 66 | if (cBytes == 1) 67 | { 68 | // pad 8 bits to 2 x 6 and add 2 == 69 | nPad = 2; 70 | val = p[0] << 4; 71 | } 72 | else 73 | { 74 | // must be two bytes -- pad 16 bits to 3 x 6 and add one = 75 | nPad = 1; 76 | val = (p[0] << 8) + p[1]; 77 | val = val << 2; 78 | } 79 | s = [s stringByAppendingString:encodeLong(val, nPad)]; 80 | } 81 | return s; 82 | } 83 | 84 | enum ServerState 85 | { 86 | ServerIdle, 87 | Setup, 88 | Playing, 89 | }; 90 | 91 | @interface RTSPClientConnection () 92 | { 93 | CFSocketRef _s; 94 | RTSPServer* _server; 95 | CFRunLoopSourceRef _rls; 96 | 97 | CFDataRef _addrRTP; 98 | CFSocketRef _sRTP; 99 | CFDataRef _addrRTCP; 100 | CFSocketRef _sRTCP; 101 | NSString* _session; 102 | ServerState _state; 103 | long _packets; 104 | long _bytesSent; 105 | long _ssrc; 106 | BOOL _bFirst; 107 | 108 | // time mapping using NTP 109 | uint64_t _ntpBase; 110 | uint64_t _rtpBase; 111 | double _ptsBase; 112 | 113 | // RTCP stats 114 | long _packetsReported; 115 | long _bytesReported; 116 | NSDate* _sentRTCP; 117 | 118 | // reader reports 119 | CFSocketRef _recvRTCP; 120 | CFRunLoopSourceRef _rlsRTCP; 121 | } 122 | 123 | - (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle) s Server:(RTSPServer*) server; 124 | - (void) onSocketData:(CFDataRef)data; 125 | - (void) onRTCP:(CFDataRef) data; 126 | 127 | @end 128 | 129 | static void onSocket ( 130 | CFSocketRef s, 131 | CFSocketCallBackType callbackType, 132 | CFDataRef address, 133 | const void *data, 134 | void *info 135 | ) 136 | { 137 | RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info; 138 | switch (callbackType) 139 | { 140 | case kCFSocketDataCallBack: 141 | [conn onSocketData:(CFDataRef) data]; 142 | break; 143 | 144 | default: 145 | NSLog(@"unexpected socket event"); 146 | break; 147 | } 148 | 149 | } 150 | 151 | static void onRTCP(CFSocketRef s, 152 | CFSocketCallBackType callbackType, 153 | CFDataRef address, 154 | const void *data, 155 | void *info 156 | ) 157 | { 158 | RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info; 159 | switch (callbackType) 160 | { 161 | case kCFSocketDataCallBack: 162 | [conn onRTCP:(CFDataRef) data]; 163 | break; 164 | 165 | default: 166 | NSLog(@"unexpected socket event"); 167 | break; 168 | } 169 | } 170 | 171 | @implementation RTSPClientConnection 172 | 173 | + (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server 174 | { 175 | RTSPClientConnection* conn = [RTSPClientConnection alloc]; 176 | if ([conn initWithSocket:s Server:server] != nil) 177 | { 178 | return conn; 179 | } 180 | return nil; 181 | } 182 | 183 | - (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle)s Server:(RTSPServer *)server 184 | { 185 | _state = ServerIdle; 186 | _server = server; 187 | CFSocketContext info; 188 | memset(&info, 0, sizeof(info)); 189 | info.info = (void*)CFBridgingRetain(self); 190 | 191 | _s = CFSocketCreateWithNative(nil, s, kCFSocketDataCallBack, onSocket, &info); 192 | 193 | _rls = CFSocketCreateRunLoopSource(nil, _s, 0); 194 | CFRunLoopAddSource(CFRunLoopGetMain(), _rls, kCFRunLoopCommonModes); 195 | 196 | return self; 197 | } 198 | 199 | - (void) onSocketData:(CFDataRef)data 200 | { 201 | if (CFDataGetLength(data) == 0) 202 | { 203 | [self tearDown]; 204 | CFSocketInvalidate(_s); 205 | _s = nil; 206 | [_server shutdownConnection:self]; 207 | return; 208 | } 209 | RTSPMessage* msg = [RTSPMessage createWithData:data]; 210 | if (msg != nil) 211 | { 212 | NSString* response = nil; 213 | NSString* cmd = msg.command; 214 | if ([cmd caseInsensitiveCompare:@"options"] == NSOrderedSame) 215 | { 216 | response = [msg createResponse:200 text:@"OK"]; 217 | response = [response stringByAppendingString:@"Server: AVEncoderDemo/1.0\r\n"]; 218 | response = [response stringByAppendingString:@"Public: DESCRIBE, SETUP, TEARDOWN, PLAY, OPTIONS\r\n\r\n"]; 219 | } 220 | else if ([cmd caseInsensitiveCompare:@"describe"] == NSOrderedSame) 221 | { 222 | NSString* sdp = [self makeSDP]; 223 | response = [msg createResponse:200 text:@"OK"]; 224 | NSString* date = [NSDateFormatter localizedStringFromDate:[NSDate date] dateStyle:NSDateFormatterLongStyle timeStyle:NSDateFormatterLongStyle]; 225 | CFDataRef dlocaladdr = CFSocketCopyAddress(_s); 226 | struct sockaddr_in* localaddr = (struct sockaddr_in*) CFDataGetBytePtr(dlocaladdr); 227 | 228 | response = [response stringByAppendingFormat:@"Content-base: rtsp://%s/\r\n", inet_ntoa(localaddr->sin_addr)]; 229 | CFRelease(dlocaladdr); 230 | response = [response stringByAppendingFormat:@"Date: %@\r\nContent-Type: application/sdp\r\nContent-Length: %d\r\n\r\n", date, (int)[sdp length] ]; 231 | response = [response stringByAppendingString:sdp]; 232 | } 233 | else if ([cmd caseInsensitiveCompare:@"setup"] == NSOrderedSame) 234 | { 235 | NSString* transport = [msg valueForOption:@"transport"]; 236 | NSArray* props = [transport componentsSeparatedByString:@";"]; 237 | NSArray* ports = nil; 238 | for (NSString* s in props) 239 | { 240 | if ([s length] > 14) 241 | { 242 | if ([s compare:@"client_port=" options:0 range:NSMakeRange(0, 12)] == NSOrderedSame) 243 | { 244 | NSString* val = [s substringFromIndex:12]; 245 | ports = [val componentsSeparatedByString:@"-"]; 246 | break; 247 | } 248 | } 249 | } 250 | if ([ports count] == 2) 251 | { 252 | int portRTP = (int)[ports[0] integerValue]; 253 | int portRTCP = (int) [ports[1] integerValue]; 254 | 255 | NSString* session_name = [self createSession:portRTP rtcp:portRTCP]; 256 | if (session_name != nil) 257 | { 258 | response = [msg createResponse:200 text:@"OK"]; 259 | response = [response stringByAppendingFormat:@"Session: %@\r\nTransport: RTP/AVP;unicast;client_port=%d-%d;server_port=6970-6971\r\n\r\n", 260 | session_name, 261 | portRTP,portRTCP]; 262 | } 263 | } 264 | if (response == nil) 265 | { 266 | // !! 267 | response = [msg createResponse:451 text:@"Need better error string here"]; 268 | } 269 | } 270 | else if ([cmd caseInsensitiveCompare:@"play"] == NSOrderedSame) 271 | { 272 | @synchronized(self) 273 | { 274 | if (_state != Setup) 275 | { 276 | response = [msg createResponse:451 text:@"Wrong state"]; 277 | } 278 | else 279 | { 280 | _state = Playing; 281 | _bFirst = YES; 282 | response = [msg createResponse:200 text:@"OK"]; 283 | response = [response stringByAppendingFormat:@"Session: %@\r\n\r\n", _session]; 284 | } 285 | } 286 | } 287 | else if ([cmd caseInsensitiveCompare:@"teardown"] == NSOrderedSame) 288 | { 289 | [self tearDown]; 290 | response = [msg createResponse:200 text:@"OK"]; 291 | } 292 | else 293 | { 294 | NSLog(@"RTSP method %@ not handled", cmd); 295 | response = [msg createResponse:451 text:@"Method not recognised"]; 296 | } 297 | if (response != nil) 298 | { 299 | NSData* dataResponse = [response dataUsingEncoding:NSUTF8StringEncoding]; 300 | CFSocketError e = CFSocketSendData(_s, NULL, (__bridge CFDataRef)(dataResponse), 2); 301 | if (e) 302 | { 303 | NSLog(@"send %ld", e); 304 | } 305 | } 306 | } 307 | } 308 | 309 | - (NSString*) makeSDP 310 | { 311 | NSData* config = [_server getConfigData]; 312 | 313 | avcCHeader avcC((const BYTE*)[config bytes], (int)[config length]); 314 | SeqParamSet seqParams; 315 | seqParams.Parse(avcC.sps()); 316 | int cx = (int)seqParams.EncodedWidth(); 317 | int cy = (int)seqParams.EncodedHeight(); 318 | 319 | NSString* profile_level_id = [NSString stringWithFormat:@"%02x%02x%02x", seqParams.Profile(), seqParams.Compat(), seqParams.Level()]; 320 | 321 | NSData* data = [NSData dataWithBytes:avcC.sps()->Start() length:avcC.sps()->Length()]; 322 | NSString* sps = encodeToBase64(data); 323 | data = [NSData dataWithBytes:avcC.pps()->Start() length:avcC.pps()->Length()]; 324 | NSString* pps = encodeToBase64(data); 325 | 326 | // !! o=, s=, u=, c=, b=? control for track? 327 | unsigned long verid = random(); 328 | 329 | CFDataRef dlocaladdr = CFSocketCopyAddress(_s); 330 | struct sockaddr_in* localaddr = (struct sockaddr_in*) CFDataGetBytePtr(dlocaladdr); 331 | NSString* sdp = [NSString stringWithFormat:@"v=0\r\no=- %ld %ld IN IP4 %s\r\ns=Live stream from iOS\r\nc=IN IP4 0.0.0.0\r\nt=0 0\r\na=control:*\r\n", verid, verid, inet_ntoa(localaddr->sin_addr)]; 332 | CFRelease(dlocaladdr); 333 | 334 | int packets = (_server.bitrate / (max_packet_size * 8)) + 1; 335 | 336 | sdp = [sdp stringByAppendingFormat:@"m=video 0 RTP/AVP 96\r\nb=TIAS:%d\r\na=maxprate:%d.0000\r\na=control:streamid=1\r\n", _server.bitrate, packets]; 337 | sdp = [sdp stringByAppendingFormat:@"a=rtpmap:96 H264/90000\r\na=mimetype:string;\"video/H264\"\r\na=framesize:96 %d-%d\r\na=Width:integer;%d\r\na=Height:integer;%d\r\n", cx, cy, cx, cy]; 338 | sdp = [sdp stringByAppendingFormat:@"a=fmtp:96 packetization-mode=1;profile-level-id=%@;sprop-parameter-sets=%@,%@\r\n", profile_level_id, sps, pps]; 339 | return sdp; 340 | } 341 | 342 | - (NSString*) createSession:(int) portRTP rtcp:(int) portRTCP 343 | { 344 | // !! most basic possible for initial testing 345 | @synchronized(self) 346 | { 347 | CFDataRef data = CFSocketCopyPeerAddress(_s); 348 | struct sockaddr_in* paddr = (struct sockaddr_in*) CFDataGetBytePtr(data); 349 | paddr->sin_port = htons(portRTP); 350 | _addrRTP = CFDataCreate(nil, (uint8_t*) paddr, sizeof(struct sockaddr_in)); 351 | _sRTP = CFSocketCreate(nil, PF_INET, SOCK_DGRAM, IPPROTO_UDP, 0, nil, nil); 352 | 353 | paddr->sin_port = htons(portRTCP); 354 | _addrRTCP = CFDataCreate(nil, (uint8_t*) paddr, sizeof(struct sockaddr_in)); 355 | _sRTCP = CFSocketCreate(nil, PF_INET, SOCK_DGRAM, IPPROTO_UDP, 0, nil, nil); 356 | CFRelease(data); 357 | 358 | // reader reports received here 359 | CFSocketContext info; 360 | memset(&info, 0, sizeof(info)); 361 | info.info = (void*)CFBridgingRetain(self); 362 | _recvRTCP = CFSocketCreate(nil, PF_INET, SOCK_DGRAM, IPPROTO_UDP, kCFSocketDataCallBack, onRTCP, &info); 363 | 364 | struct sockaddr_in addr; 365 | addr.sin_addr.s_addr = INADDR_ANY; 366 | addr.sin_family = AF_INET; 367 | addr.sin_port = htons(6971); 368 | CFDataRef dataAddr = CFDataCreate(nil, (const uint8_t*)&addr, sizeof(addr)); 369 | CFSocketSetAddress(_recvRTCP, dataAddr); 370 | CFRelease(dataAddr); 371 | 372 | _rlsRTCP = CFSocketCreateRunLoopSource(nil, _recvRTCP, 0); 373 | CFRunLoopAddSource(CFRunLoopGetMain(), _rlsRTCP, kCFRunLoopCommonModes); 374 | 375 | // flag that setup is valid 376 | long sessionid = random(); 377 | _session = [NSString stringWithFormat:@"%ld", sessionid]; 378 | _state = Setup; 379 | _ssrc = random(); 380 | _packets = 0; 381 | _bytesSent = 0; 382 | _rtpBase = 0; 383 | 384 | _sentRTCP = nil; 385 | _packetsReported = 0; 386 | _bytesReported = 0; 387 | } 388 | return _session; 389 | } 390 | 391 | - (void) onVideoData:(NSArray*) data time:(double) pts 392 | { 393 | @synchronized(self) 394 | { 395 | if (_state != Playing) 396 | { 397 | return; 398 | } 399 | } 400 | 401 | const int rtp_header_size = 12; 402 | const int max_single_packet = max_packet_size - rtp_header_size; 403 | const int max_fragment_packet = max_single_packet - 2; 404 | unsigned char packet[max_packet_size]; 405 | 406 | int nNALUs = (int)[data count]; 407 | for (int i = 0; i < nNALUs; i++) 408 | { 409 | NSData* nalu = [data objectAtIndex:i]; 410 | int cBytes = (int)[nalu length]; 411 | BOOL bLast = (i == nNALUs-1); 412 | 413 | const unsigned char* pSource = (unsigned char*)[nalu bytes]; 414 | 415 | if (_bFirst) 416 | { 417 | if ((pSource[0] & 0x1f) != 5) 418 | { 419 | continue; 420 | } 421 | _bFirst = NO; 422 | NSLog(@"Playback starting at first IDR"); 423 | } 424 | 425 | if (cBytes < max_single_packet) 426 | { 427 | [self writeHeader:packet marker:bLast time:pts]; 428 | memcpy(packet + rtp_header_size, [nalu bytes], cBytes); 429 | [self sendPacket:packet length:(cBytes + rtp_header_size)]; 430 | } 431 | else 432 | { 433 | unsigned char NALU_Header = pSource[0]; 434 | pSource += 1; 435 | cBytes -= 1; 436 | BOOL bStart = YES; 437 | 438 | while (cBytes) 439 | { 440 | int cThis = (cBytes < max_fragment_packet)? cBytes : max_fragment_packet; 441 | BOOL bEnd = (cThis == cBytes); 442 | [self writeHeader:packet marker:(bLast && bEnd) time:pts]; 443 | unsigned char* pDest = packet + rtp_header_size; 444 | 445 | pDest[0] = (NALU_Header & 0xe0) + 28; // FU_A type 446 | unsigned char fu_header = (NALU_Header & 0x1f); 447 | if (bStart) 448 | { 449 | fu_header |= 0x80; 450 | bStart = false; 451 | } 452 | else if (bEnd) 453 | { 454 | fu_header |= 0x40; 455 | } 456 | pDest[1] = fu_header; 457 | pDest += 2; 458 | memcpy(pDest, pSource, cThis); 459 | pDest += cThis; 460 | [self sendPacket:packet length:(int)(pDest - packet)]; 461 | 462 | pSource += cThis; 463 | cBytes -= cThis; 464 | } 465 | } 466 | } 467 | } 468 | 469 | - (void) writeHeader:(uint8_t*) packet marker:(BOOL) bMarker time:(double) pts 470 | { 471 | packet[0] = 0x80; // v= 2 472 | if (bMarker) 473 | { 474 | packet[1] = 96 | 0x80; 475 | } 476 | else 477 | { 478 | packet[1] = 96; 479 | } 480 | unsigned short seq = _packets & 0xffff; 481 | tonet_short(packet+2, seq); 482 | 483 | // map time 484 | while (_rtpBase == 0) 485 | { 486 | _rtpBase = random(); 487 | _ptsBase = pts; 488 | NSDate* now = [NSDate date]; 489 | // ntp is based on 1900. There's a known fixed offset from 1900 to 1970. 490 | NSDate* ref = [NSDate dateWithTimeIntervalSince1970:-2208988800L]; 491 | double interval = [now timeIntervalSinceDate:ref]; 492 | _ntpBase = (uint64_t)(interval * (1LL << 32)); 493 | } 494 | pts -= _ptsBase; 495 | uint64_t rtp = (uint64_t)(pts * 90000); 496 | rtp += _rtpBase; 497 | tonet_long(packet + 4, rtp); 498 | tonet_long(packet + 8, _ssrc); 499 | } 500 | 501 | - (void) sendPacket:(uint8_t*) packet length:(int) cBytes 502 | { 503 | @synchronized(self) 504 | { 505 | if (_sRTP) 506 | { 507 | CFDataRef data = CFDataCreate(nil, packet, cBytes); 508 | CFSocketSendData(_sRTP, _addrRTP, data, 0); 509 | CFRelease(data); 510 | } 511 | _packets++; 512 | _bytesSent += cBytes; 513 | 514 | // RTCP packets 515 | NSDate* now = [NSDate date]; 516 | if ((_sentRTCP == nil) || ([now timeIntervalSinceDate:_sentRTCP] >= 1)) 517 | { 518 | uint8_t buf[7 * sizeof(uint32_t)]; 519 | buf[0] = 0x80; 520 | buf[1] = 200; // type == SR 521 | tonet_short(buf+2, 6); // length (count of uint32_t minus 1) 522 | tonet_long(buf+4, _ssrc); 523 | tonet_long(buf+8, (_ntpBase >> 32)); 524 | tonet_long(buf+12, _ntpBase); 525 | tonet_long(buf+16, _rtpBase); 526 | tonet_long(buf+20, (_packets - _packetsReported)); 527 | tonet_long(buf+24, (_bytesSent - _bytesReported)); 528 | int lenRTCP = 28; 529 | if (_sRTCP) 530 | { 531 | CFDataRef dataRTCP = CFDataCreate(nil, buf, lenRTCP); 532 | CFSocketSendData(_sRTCP, _addrRTCP, dataRTCP, lenRTCP); 533 | CFRelease(dataRTCP); 534 | } 535 | 536 | _sentRTCP = now; 537 | _packetsReported = _packets; 538 | _bytesReported = _bytesSent; 539 | } 540 | } 541 | } 542 | 543 | - (void) onRTCP:(CFDataRef) data 544 | { 545 | // NSLog(@"RTCP recv"); 546 | } 547 | 548 | - (void) tearDown 549 | { 550 | @synchronized(self) 551 | { 552 | if (_sRTP) 553 | { 554 | CFSocketInvalidate(_sRTP); 555 | _sRTP = nil; 556 | } 557 | if (_sRTCP) 558 | { 559 | CFSocketInvalidate(_sRTCP); 560 | _sRTCP = nil; 561 | } 562 | if (_recvRTCP) 563 | { 564 | CFSocketInvalidate(_recvRTCP); 565 | _recvRTCP = nil; 566 | } 567 | _session = nil; 568 | } 569 | } 570 | 571 | - (void) shutdown 572 | { 573 | [self tearDown]; 574 | @synchronized(self) 575 | { 576 | CFSocketInvalidate(_s); 577 | _s = nil; 578 | } 579 | } 580 | @end 581 | -------------------------------------------------------------------------------- /Encoder Demo/RTSPMessage.h: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPMessage.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 24/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | 11 | @interface RTSPMessage : NSObject 12 | 13 | 14 | + (RTSPMessage*) createWithData:(CFDataRef) data; 15 | 16 | - (NSString*) valueForOption:(NSString*) option; 17 | - (NSString*) createResponse:(int) code text:(NSString*) desc; 18 | 19 | @property NSString* command; 20 | @property int sequence; 21 | 22 | @end 23 | -------------------------------------------------------------------------------- /Encoder Demo/RTSPMessage.m: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPMessage.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 24/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "RTSPMessage.h" 10 | 11 | @interface RTSPMessage () 12 | 13 | { 14 | NSArray* _lines; 15 | NSString* _request; 16 | int _cseq; 17 | } 18 | 19 | - (RTSPMessage*) initWithData:(CFDataRef) data; 20 | 21 | @end 22 | 23 | @implementation RTSPMessage 24 | 25 | @synthesize command = _request; 26 | @synthesize sequence = _cseq; 27 | 28 | + (RTSPMessage*) createWithData:(CFDataRef) data 29 | { 30 | RTSPMessage* msg = [[RTSPMessage alloc] initWithData:data]; 31 | return msg; 32 | } 33 | 34 | - (RTSPMessage*) initWithData:(CFDataRef) data 35 | { 36 | self = [super init]; 37 | NSString* msg = [[NSString alloc] initWithData:(__bridge NSData*)data encoding:NSUTF8StringEncoding]; 38 | _lines = [msg componentsSeparatedByString:@"\r\n"]; 39 | if ([_lines count] < 2) 40 | { 41 | NSLog(@"msg parse error"); 42 | return nil; 43 | } 44 | NSArray* lineone = [[_lines objectAtIndex:0] componentsSeparatedByString:@" "]; 45 | _request = [lineone objectAtIndex:0]; 46 | NSString* strSeq = [self valueForOption:@"CSeq"]; 47 | if (strSeq == nil) 48 | { 49 | NSLog(@"no cseq"); 50 | return nil; 51 | } 52 | _cseq = [strSeq intValue]; 53 | 54 | return self; 55 | } 56 | 57 | - (NSString*) valueForOption:(NSString*) option 58 | { 59 | for (int i = 1; i < [_lines count]; i++) 60 | { 61 | NSString* line = [_lines objectAtIndex:i]; 62 | NSArray* comps = [line componentsSeparatedByString:@":"]; 63 | if ([comps count] == 2) 64 | { 65 | if ([option caseInsensitiveCompare:[comps objectAtIndex:0]] == NSOrderedSame) 66 | { 67 | NSString* val = [comps objectAtIndex:1]; 68 | val = [val stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]]; 69 | return val; 70 | } 71 | } 72 | } 73 | return nil; 74 | } 75 | 76 | - (NSString*) createResponse:(int) code text:(NSString*) desc 77 | { 78 | NSString* val = [NSString stringWithFormat:@"RTSP/1.0 %d %@\r\nCSeq: %d\r\n", code, desc, self.sequence]; 79 | return val; 80 | } 81 | 82 | @end 83 | -------------------------------------------------------------------------------- /Encoder Demo/RTSPServer.h: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPServer.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 17/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import 11 | #include 12 | #include 13 | 14 | @interface RTSPServer : NSObject 15 | 16 | 17 | + (NSString*) getIPAddress; 18 | + (RTSPServer*) setupListener:(NSData*) configData; 19 | 20 | - (NSData*) getConfigData; 21 | - (void) onVideoData:(NSArray*) data time:(double) pts; 22 | - (void) shutdownConnection:(id) conn; 23 | - (void) shutdownServer; 24 | 25 | @property (readwrite, atomic) int bitrate; 26 | 27 | @end 28 | -------------------------------------------------------------------------------- /Encoder Demo/RTSPServer.m: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPServer.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 17/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "RTSPServer.h" 10 | #import "RTSPClientConnection.h" 11 | #import "ifaddrs.h" 12 | #import "arpa/inet.h" 13 | 14 | @interface RTSPServer () 15 | 16 | { 17 | CFSocketRef _listener; 18 | NSMutableArray* _connections; 19 | NSData* _configData; 20 | int _bitrate; 21 | } 22 | 23 | - (RTSPServer*) init:(NSData*) configData; 24 | - (void) onAccept:(CFSocketNativeHandle) childHandle; 25 | 26 | @end 27 | 28 | static void onSocket ( 29 | CFSocketRef s, 30 | CFSocketCallBackType callbackType, 31 | CFDataRef address, 32 | const void *data, 33 | void *info 34 | ) 35 | { 36 | RTSPServer* server = (__bridge RTSPServer*)info; 37 | switch (callbackType) 38 | { 39 | case kCFSocketAcceptCallBack: 40 | { 41 | CFSocketNativeHandle* pH = (CFSocketNativeHandle*) data; 42 | [server onAccept:*pH]; 43 | break; 44 | } 45 | default: 46 | NSLog(@"unexpected socket event"); 47 | break; 48 | } 49 | 50 | } 51 | 52 | @implementation RTSPServer 53 | 54 | @synthesize bitrate = _bitrate; 55 | 56 | + (RTSPServer*) setupListener:(NSData*) configData 57 | { 58 | RTSPServer* obj = [RTSPServer alloc]; 59 | if (![obj init:configData]) 60 | { 61 | return nil; 62 | } 63 | return obj; 64 | } 65 | 66 | - (RTSPServer*) init:(NSData*) configData 67 | { 68 | _configData = configData; 69 | _connections = [NSMutableArray arrayWithCapacity:10]; 70 | 71 | CFSocketContext info; 72 | memset(&info, 0, sizeof(info)); 73 | info.info = (void*)CFBridgingRetain(self); 74 | 75 | _listener = CFSocketCreate(nil, PF_INET, SOCK_STREAM, IPPROTO_TCP, kCFSocketAcceptCallBack, onSocket, &info); 76 | 77 | // must set SO_REUSEADDR in case a client is still holding this address 78 | int t = 1; 79 | setsockopt(CFSocketGetNative(_listener), SOL_SOCKET, SO_REUSEADDR, &t, sizeof(t)); 80 | 81 | struct sockaddr_in addr; 82 | addr.sin_addr.s_addr = INADDR_ANY; 83 | addr.sin_family = AF_INET; 84 | addr.sin_port = htons(554); 85 | CFDataRef dataAddr = CFDataCreate(nil, (const uint8_t*)&addr, sizeof(addr)); 86 | CFSocketError e = CFSocketSetAddress(_listener, dataAddr); 87 | CFRelease(dataAddr); 88 | 89 | if (e) 90 | { 91 | NSLog(@"bind error %d", (int) e); 92 | } 93 | 94 | CFRunLoopSourceRef rls = CFSocketCreateRunLoopSource(nil, _listener, 0); 95 | CFRunLoopAddSource(CFRunLoopGetMain(), rls, kCFRunLoopCommonModes); 96 | CFRelease(rls); 97 | 98 | return self; 99 | } 100 | 101 | - (NSData*) getConfigData 102 | { 103 | return _configData; 104 | } 105 | 106 | - (void) onAccept:(CFSocketNativeHandle) childHandle 107 | { 108 | RTSPClientConnection* conn = [RTSPClientConnection createWithSocket:childHandle server:self]; 109 | if (conn != nil) 110 | { 111 | @synchronized(self) 112 | { 113 | NSLog(@"Client connected"); 114 | [_connections addObject:conn]; 115 | } 116 | } 117 | 118 | } 119 | 120 | - (void) onVideoData:(NSArray*) data time:(double) pts 121 | { 122 | @synchronized(self) 123 | { 124 | for (RTSPClientConnection* conn in _connections) 125 | { 126 | [conn onVideoData:data time:pts]; 127 | } 128 | } 129 | } 130 | 131 | - (void) shutdownConnection:(id)conn 132 | { 133 | @synchronized(self) 134 | { 135 | NSLog(@"Client disconnected"); 136 | [_connections removeObject:conn]; 137 | } 138 | } 139 | 140 | - (void) shutdownServer 141 | { 142 | @synchronized(self) 143 | { 144 | for (RTSPClientConnection* conn in _connections) 145 | { 146 | [conn shutdown]; 147 | } 148 | _connections = [NSMutableArray arrayWithCapacity:10]; 149 | if (_listener != nil) 150 | { 151 | CFSocketInvalidate(_listener); 152 | _listener = nil; 153 | } 154 | } 155 | } 156 | 157 | + (NSString*) getIPAddress 158 | { 159 | NSString* address; 160 | struct ifaddrs *interfaces = nil; 161 | 162 | // get all our interfaces and find the one that corresponds to wifi 163 | if (!getifaddrs(&interfaces)) 164 | { 165 | for (struct ifaddrs* addr = interfaces; addr != NULL; addr = addr->ifa_next) 166 | { 167 | if (([[NSString stringWithUTF8String:addr->ifa_name] isEqualToString:@"en0"]) && 168 | (addr->ifa_addr->sa_family == AF_INET)) 169 | { 170 | struct sockaddr_in* sa = (struct sockaddr_in*) addr->ifa_addr; 171 | address = [NSString stringWithUTF8String:inet_ntoa(sa->sin_addr)]; 172 | break; 173 | } 174 | } 175 | } 176 | freeifaddrs(interfaces); 177 | return address; 178 | } 179 | 180 | @end 181 | -------------------------------------------------------------------------------- /Encoder Demo/VideoEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // VideoEncoder.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 14/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import "AVFoundation/AVAssetWriter.h" 11 | #import "AVFoundation/AVAssetWriterInput.h" 12 | #import "AVFoundation/AVMediaFormat.h" 13 | #import "AVFoundation/AVVideoSettings.h" 14 | 15 | @interface VideoEncoder : NSObject 16 | { 17 | AVAssetWriter* _writer; 18 | AVAssetWriterInput* _writerInput; 19 | NSString* _path; 20 | } 21 | 22 | @property NSString* path; 23 | 24 | + (VideoEncoder*) encoderForPath:(NSString*) path Height:(int) height andWidth:(int) width; 25 | 26 | - (void) initPath:(NSString*)path Height:(int) height andWidth:(int) width; 27 | - (void) finishWithCompletionHandler:(void (^)(void))handler; 28 | - (BOOL) encodeFrame:(CMSampleBufferRef) sampleBuffer; 29 | 30 | 31 | @end 32 | -------------------------------------------------------------------------------- /Encoder Demo/VideoEncoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // VideoEncoder.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 14/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "VideoEncoder.h" 10 | 11 | @implementation VideoEncoder 12 | 13 | @synthesize path = _path; 14 | 15 | + (VideoEncoder*) encoderForPath:(NSString*) path Height:(int) height andWidth:(int) width 16 | { 17 | VideoEncoder* enc = [VideoEncoder alloc]; 18 | [enc initPath:path Height:height andWidth:width]; 19 | return enc; 20 | } 21 | 22 | 23 | - (void) initPath:(NSString*)path Height:(int) height andWidth:(int) width 24 | { 25 | self.path = path; 26 | 27 | [[NSFileManager defaultManager] removeItemAtPath:self.path error:nil]; 28 | NSURL* url = [NSURL fileURLWithPath:self.path]; 29 | 30 | _writer = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeQuickTimeMovie error:nil]; 31 | NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys: 32 | AVVideoCodecH264, AVVideoCodecKey, 33 | [NSNumber numberWithInt: width], AVVideoWidthKey, 34 | [NSNumber numberWithInt:height], AVVideoHeightKey, 35 | [NSDictionary dictionaryWithObjectsAndKeys: 36 | @YES, AVVideoAllowFrameReorderingKey, nil], 37 | AVVideoCompressionPropertiesKey, 38 | nil]; 39 | _writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings]; 40 | _writerInput.expectsMediaDataInRealTime = YES; 41 | [_writer addInput:_writerInput]; 42 | } 43 | 44 | - (void) finishWithCompletionHandler:(void (^)(void))handler 45 | { 46 | [_writer finishWritingWithCompletionHandler: handler]; 47 | } 48 | 49 | - (BOOL) encodeFrame:(CMSampleBufferRef) sampleBuffer 50 | { 51 | if (CMSampleBufferDataIsReady(sampleBuffer)) 52 | { 53 | if (_writer.status == AVAssetWriterStatusUnknown) 54 | { 55 | CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 56 | [_writer startWriting]; 57 | [_writer startSessionAtSourceTime:startTime]; 58 | } 59 | if (_writer.status == AVAssetWriterStatusFailed) 60 | { 61 | NSLog(@"writer error %@", _writer.error.localizedDescription); 62 | return NO; 63 | } 64 | if (_writerInput.readyForMoreMediaData == YES) 65 | { 66 | [_writerInput appendSampleBuffer:sampleBuffer]; 67 | return YES; 68 | } 69 | } 70 | return NO; 71 | } 72 | 73 | @end 74 | -------------------------------------------------------------------------------- /Encoder Demo/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /Encoder Demo/en.lproj/MainStoryboard_iPad.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /Encoder Demo/en.lproj/MainStoryboard_iPhone.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /Encoder Demo/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 Geraint Davies. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #import "EncoderDemoAppDelegate.h" 12 | 13 | int main(int argc, char *argv[]) 14 | { 15 | @autoreleasepool { 16 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([EncoderDemoAppDelegate class])); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2017 Jorge Frisancho 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## RTSP-Server-ioS 2 | 3 | This repository contains a basic RTSP Server using FFMpeg wrapper for iOS based on the [Hardware Video Encoding on iPhone — RTSP Server example][1]. 4 | 5 | ### Disclaimer 6 | This repository contains a sample code intended to demonstrate the capabilities of the ffmpeg as a camera recorder. It is not intended to be used as-is in applications as a library dependency, and will not be maintained as such. Bug fix contributions are welcome, but issues and feature requests will not be addressed. 7 | 8 | ### Example Contents 9 | This sample code takes the following approach to this problem: 10 | 11 | - Only video is written using the `AVAssetWriter` instance, or it would be impossible to distinguish video from audio in the `mdat` atom. 12 | - Initially, I create two AVAssetWriter instances. The first frame is written to both, and then one instance is closed. Once the `moov` atom has been written to that file, I parse the file and assume that the parameters apply to both instances, since the initial conditions were the same. 13 | - Once I have the parameters, I use a dispatch_source object to trigger reads from the file whenever new data is written. The body of the `mdat` chunk consists of H264 NALUs, each preceded by a length field. Although the length of the `mdat` chunk is not known, we can safely assume that it will continue to the end of the file (until we finish the output file and the `moov` is added). 14 | - For RTP delivery of the data, we group the NALUs into frames by parsing the NALU headers. Since there are no AUDs marking the frame boundaries, this requires looking at several different elements of the NALU header. 15 | - Timestamps arrive with the uncompressed frames from the camera and are stored in a FIFO. These timestamps are applied to the compressed frames in the same order. Fortunately, the `AVAssetWriter` live encoder does not require re-ordering of frames. Update this is no longer true, and I now have a version that supports re-ordered frames. 16 | - When the file gets too large, a new instance of `AVAssetWriter` is used, so that the old temporary file can be deleted. Transition code must then wait for the old instance to be closed so that the remaining NALUs can be read from the `mdat` atom without reading past the end of that atom into the subsequent metadata. Finally, the new file is opened and timestamps are adjusted. The resulting compressed output is seamless. 17 | 18 | A little experimentation suggests that we are able to read compressed frames from file about 500ms or so after they are captured, and these frames then arrive around 200ms after that at the client app. 19 | 20 | ## Credits 21 | * [Hardware Video Encoding on iPhone][1] 22 | * [FFmpeg][2] 23 | 24 | ### Pre-requisites 25 | 26 | - FFmpeg 3.3 27 | - Xcode 8.3.2 28 | 29 | ## License 30 | 31 | The code supplied here is covered under the MIT Open Source License. 32 | 33 | [1]: http://www.gdcl.co.uk/2013/02/20/iOS-Video-Encoding.html 34 | [2]: https://www.ffmpeg.org/ 35 | --------------------------------------------------------------------------------