├── README.md ├── VideoToolBoxDecodeH264 ├── VideoToolBoxDecodeH264.xcodeproj │ ├── project.pbxproj │ ├── project.xcworkspace │ │ ├── contents.xcworkspacedata │ │ └── xcuserdata │ │ │ └── andong.xcuserdatad │ │ │ └── UserInterfaceState.xcuserstate │ └── xcuserdata │ │ └── andong.xcuserdatad │ │ ├── xcdebugger │ │ └── Breakpoints_v2.xcbkptlist │ │ └── xcschemes │ │ └── xcschememanagement.plist └── VideoToolBoxDecodeH264 │ ├── AAPLEAGLLayer.h │ ├── AAPLEAGLLayer.m │ ├── AppDelegate.h │ ├── AppDelegate.m │ ├── Assets.xcassets │ └── AppIcon.appiconset │ │ └── Contents.json │ ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard │ ├── H264DecodeTool.h │ ├── H264DecodeTool.m │ ├── H264EncodeTool.h │ ├── H264EncodeTool.m │ ├── Info.plist │ ├── ViewController.h │ ├── ViewController.m │ └── main.m └── VideoToolBoxEncodeH264 ├── VideoToolBoxEncodeH264.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ └── xcuserdata │ │ └── andong.xcuserdatad │ │ └── UserInterfaceState.xcuserstate └── xcuserdata │ └── andong.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ └── xcschememanagement.plist └── VideoToolBoxEncodeH264 ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets └── AppIcon.appiconset │ └── Contents.json ├── Base.lproj ├── LaunchScreen.storyboard └── Main.storyboard ├── Info.plist ├── ViewController.h ├── ViewController.m └── main.m /README.md: -------------------------------------------------------------------------------- 1 | # iOS-VideoToolBox-demo 2 | iOS VideoToolBox encode&decode demo 3 | 4 | 详细讲解:[iOS-VideoToolbox硬编码H264](https://www.jianshu.com/p/67d0dd931ed6) 5 | 6 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 7639A82F20E9F39F00C5D26A /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7639A82E20E9F39F00C5D26A /* AppDelegate.m */; }; 11 | 7639A83220E9F39F00C5D26A /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7639A83120E9F39F00C5D26A /* ViewController.m */; }; 12 | 7639A83520E9F39F00C5D26A /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7639A83320E9F39F00C5D26A /* Main.storyboard */; }; 13 | 7639A83720E9F39F00C5D26A /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7639A83620E9F39F00C5D26A /* Assets.xcassets */; }; 14 | 7639A83A20E9F39F00C5D26A /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7639A83820E9F39F00C5D26A /* LaunchScreen.storyboard */; }; 15 | 7639A83D20E9F39F00C5D26A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 7639A83C20E9F39F00C5D26A /* main.m */; }; 16 | 7639A84520E9F3D000C5D26A /* H264EncodeTool.m in Sources */ = {isa = PBXBuildFile; fileRef = 7639A84420E9F3D000C5D26A /* H264EncodeTool.m */; }; 17 | 7639A84820E9F3E000C5D26A /* H264DecodeTool.m in Sources */ = {isa = PBXBuildFile; fileRef = 7639A84720E9F3E000C5D26A /* H264DecodeTool.m */; }; 18 | 7639A84B20E9F85A00C5D26A /* AAPLEAGLLayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7639A84A20E9F85A00C5D26A /* AAPLEAGLLayer.m */; }; 19 | /* End PBXBuildFile section */ 20 | 21 | /* Begin PBXFileReference section */ 22 | 7639A82A20E9F39F00C5D26A /* VideoToolBoxDecodeH264.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = VideoToolBoxDecodeH264.app; sourceTree = BUILT_PRODUCTS_DIR; }; 23 | 7639A82D20E9F39F00C5D26A /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 24 | 7639A82E20E9F39F00C5D26A /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 25 | 7639A83020E9F39F00C5D26A /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 26 | 7639A83120E9F39F00C5D26A /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 27 | 7639A83420E9F39F00C5D26A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 28 | 7639A83620E9F39F00C5D26A /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 29 | 7639A83920E9F39F00C5D26A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 30 | 7639A83B20E9F39F00C5D26A /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 31 | 7639A83C20E9F39F00C5D26A /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 32 | 7639A84320E9F3D000C5D26A /* H264EncodeTool.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = H264EncodeTool.h; sourceTree = ""; }; 33 | 7639A84420E9F3D000C5D26A /* H264EncodeTool.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = H264EncodeTool.m; sourceTree = ""; }; 34 | 7639A84620E9F3E000C5D26A /* H264DecodeTool.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = H264DecodeTool.h; sourceTree = ""; }; 35 | 7639A84720E9F3E000C5D26A /* H264DecodeTool.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = H264DecodeTool.m; sourceTree = ""; }; 36 | 7639A84920E9F85A00C5D26A /* AAPLEAGLLayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AAPLEAGLLayer.h; sourceTree = ""; }; 37 | 7639A84A20E9F85A00C5D26A /* AAPLEAGLLayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AAPLEAGLLayer.m; sourceTree = ""; }; 38 | /* End PBXFileReference section */ 39 | 40 | /* Begin PBXFrameworksBuildPhase section */ 41 | 7639A82720E9F39F00C5D26A /* Frameworks */ = { 42 | isa = PBXFrameworksBuildPhase; 43 | buildActionMask = 2147483647; 44 | files = ( 45 | ); 46 | runOnlyForDeploymentPostprocessing = 0; 47 | }; 48 | /* End PBXFrameworksBuildPhase section */ 49 | 50 | /* Begin PBXGroup section */ 51 | 7639A82120E9F39E00C5D26A = { 52 | isa = PBXGroup; 53 | children = ( 54 | 7639A82C20E9F39F00C5D26A /* VideoToolBoxDecodeH264 */, 55 | 7639A82B20E9F39F00C5D26A /* Products */, 56 | ); 57 | sourceTree = ""; 58 | }; 59 | 7639A82B20E9F39F00C5D26A /* Products */ = { 60 | isa = PBXGroup; 61 | children = ( 62 | 7639A82A20E9F39F00C5D26A /* VideoToolBoxDecodeH264.app */, 63 | ); 64 | name = Products; 65 | sourceTree = ""; 66 | }; 67 | 7639A82C20E9F39F00C5D26A /* VideoToolBoxDecodeH264 */ = { 68 | isa = PBXGroup; 69 | children = ( 70 | 7639A82D20E9F39F00C5D26A /* AppDelegate.h */, 71 | 7639A82E20E9F39F00C5D26A /* AppDelegate.m */, 72 | 7639A84920E9F85A00C5D26A /* AAPLEAGLLayer.h */, 73 | 7639A84A20E9F85A00C5D26A /* AAPLEAGLLayer.m */, 74 | 7639A84320E9F3D000C5D26A /* H264EncodeTool.h */, 75 | 7639A84420E9F3D000C5D26A /* H264EncodeTool.m */, 76 | 7639A84620E9F3E000C5D26A /* H264DecodeTool.h */, 77 | 7639A84720E9F3E000C5D26A /* H264DecodeTool.m */, 78 | 7639A83020E9F39F00C5D26A /* ViewController.h */, 79 | 7639A83120E9F39F00C5D26A /* ViewController.m */, 80 | 7639A83320E9F39F00C5D26A /* Main.storyboard */, 81 | 7639A83620E9F39F00C5D26A /* Assets.xcassets */, 82 | 7639A83820E9F39F00C5D26A /* LaunchScreen.storyboard */, 83 | 7639A83B20E9F39F00C5D26A /* Info.plist */, 84 | 7639A83C20E9F39F00C5D26A /* main.m */, 85 | ); 86 | path = VideoToolBoxDecodeH264; 87 | sourceTree = ""; 88 | }; 89 | /* End PBXGroup section */ 90 | 91 | /* Begin PBXNativeTarget section */ 92 | 7639A82920E9F39F00C5D26A /* VideoToolBoxDecodeH264 */ = { 93 | isa = PBXNativeTarget; 94 | buildConfigurationList = 7639A84020E9F39F00C5D26A /* Build configuration list for PBXNativeTarget "VideoToolBoxDecodeH264" */; 95 | buildPhases = ( 96 | 7639A82620E9F39F00C5D26A /* Sources */, 97 | 7639A82720E9F39F00C5D26A /* Frameworks */, 98 | 7639A82820E9F39F00C5D26A /* Resources */, 99 | ); 100 | buildRules = ( 101 | ); 102 | dependencies = ( 103 | ); 104 | name = VideoToolBoxDecodeH264; 105 | productName = VideoToolBoxDecodeH264; 106 | productReference = 7639A82A20E9F39F00C5D26A /* VideoToolBoxDecodeH264.app */; 107 | productType = "com.apple.product-type.application"; 108 | }; 109 | /* End PBXNativeTarget section */ 110 | 111 | /* Begin PBXProject section */ 112 | 7639A82220E9F39F00C5D26A /* Project object */ = { 113 | isa = PBXProject; 114 | attributes = { 115 | LastUpgradeCheck = 0920; 116 | ORGANIZATIONNAME = AnDong; 117 | TargetAttributes = { 118 | 7639A82920E9F39F00C5D26A = { 119 | CreatedOnToolsVersion = 9.2; 120 | ProvisioningStyle = Automatic; 121 | }; 122 | }; 123 | }; 124 | buildConfigurationList = 7639A82520E9F39F00C5D26A /* Build configuration list for PBXProject "VideoToolBoxDecodeH264" */; 125 | compatibilityVersion = "Xcode 8.0"; 126 | developmentRegion = en; 127 | hasScannedForEncodings = 0; 128 | knownRegions = ( 129 | en, 130 | Base, 131 | ); 132 | mainGroup = 7639A82120E9F39E00C5D26A; 133 | productRefGroup = 7639A82B20E9F39F00C5D26A /* Products */; 134 | projectDirPath = ""; 135 | projectRoot = ""; 136 | targets = ( 137 | 7639A82920E9F39F00C5D26A /* VideoToolBoxDecodeH264 */, 138 | ); 139 | }; 140 | /* End PBXProject section */ 141 | 142 | /* Begin PBXResourcesBuildPhase section */ 143 | 7639A82820E9F39F00C5D26A /* Resources */ = { 144 | isa = PBXResourcesBuildPhase; 145 | buildActionMask = 2147483647; 146 | files = ( 147 | 7639A83A20E9F39F00C5D26A /* LaunchScreen.storyboard in Resources */, 148 | 7639A83720E9F39F00C5D26A /* Assets.xcassets in Resources */, 149 | 7639A83520E9F39F00C5D26A /* Main.storyboard in Resources */, 150 | ); 151 | runOnlyForDeploymentPostprocessing = 0; 152 | }; 153 | /* End PBXResourcesBuildPhase section */ 154 | 155 | /* Begin PBXSourcesBuildPhase section */ 156 | 7639A82620E9F39F00C5D26A /* Sources */ = { 157 | isa = PBXSourcesBuildPhase; 158 | buildActionMask = 2147483647; 159 | files = ( 160 | 7639A84B20E9F85A00C5D26A /* AAPLEAGLLayer.m in Sources */, 161 | 7639A83220E9F39F00C5D26A /* ViewController.m in Sources */, 162 | 7639A84820E9F3E000C5D26A /* H264DecodeTool.m in Sources */, 163 | 7639A83D20E9F39F00C5D26A /* main.m in Sources */, 164 | 7639A82F20E9F39F00C5D26A /* AppDelegate.m in Sources */, 165 | 7639A84520E9F3D000C5D26A /* H264EncodeTool.m in Sources */, 166 | ); 167 | runOnlyForDeploymentPostprocessing = 0; 168 | }; 169 | /* End PBXSourcesBuildPhase section */ 170 | 171 | /* Begin PBXVariantGroup section */ 172 | 7639A83320E9F39F00C5D26A /* Main.storyboard */ = { 173 | isa = PBXVariantGroup; 174 | children = ( 175 | 7639A83420E9F39F00C5D26A /* Base */, 176 | ); 177 | name = Main.storyboard; 178 | sourceTree = ""; 179 | }; 180 | 7639A83820E9F39F00C5D26A /* LaunchScreen.storyboard */ = { 181 | isa = PBXVariantGroup; 182 | children = ( 183 | 7639A83920E9F39F00C5D26A /* Base */, 184 | ); 185 | name = LaunchScreen.storyboard; 186 | sourceTree = ""; 187 | }; 188 | /* End PBXVariantGroup section */ 189 | 190 | /* Begin XCBuildConfiguration section */ 191 | 7639A83E20E9F39F00C5D26A /* Debug */ = { 192 | isa = XCBuildConfiguration; 193 | buildSettings = { 194 | ALWAYS_SEARCH_USER_PATHS = NO; 195 | CLANG_ANALYZER_NONNULL = YES; 196 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 197 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 198 | CLANG_CXX_LIBRARY = "libc++"; 199 | CLANG_ENABLE_MODULES = YES; 200 | CLANG_ENABLE_OBJC_ARC = YES; 201 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 202 | CLANG_WARN_BOOL_CONVERSION = YES; 203 | CLANG_WARN_COMMA = YES; 204 | CLANG_WARN_CONSTANT_CONVERSION = YES; 205 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 206 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 207 | CLANG_WARN_EMPTY_BODY = YES; 208 | CLANG_WARN_ENUM_CONVERSION = YES; 209 | CLANG_WARN_INFINITE_RECURSION = YES; 210 | CLANG_WARN_INT_CONVERSION = YES; 211 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 212 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 213 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 214 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 215 | CLANG_WARN_STRICT_PROTOTYPES = YES; 216 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 217 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 218 | CLANG_WARN_UNREACHABLE_CODE = YES; 219 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 220 | CODE_SIGN_IDENTITY = "iPhone Developer"; 221 | COPY_PHASE_STRIP = NO; 222 | DEBUG_INFORMATION_FORMAT = dwarf; 223 | ENABLE_STRICT_OBJC_MSGSEND = YES; 224 | ENABLE_TESTABILITY = YES; 225 | GCC_C_LANGUAGE_STANDARD = gnu11; 226 | GCC_DYNAMIC_NO_PIC = NO; 227 | GCC_NO_COMMON_BLOCKS = YES; 228 | GCC_OPTIMIZATION_LEVEL = 0; 229 | GCC_PREPROCESSOR_DEFINITIONS = ( 230 | "DEBUG=1", 231 | "$(inherited)", 232 | ); 233 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 234 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 235 | GCC_WARN_UNDECLARED_SELECTOR = YES; 236 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 237 | GCC_WARN_UNUSED_FUNCTION = YES; 238 | GCC_WARN_UNUSED_VARIABLE = YES; 239 | IPHONEOS_DEPLOYMENT_TARGET = 11.2; 240 | MTL_ENABLE_DEBUG_INFO = YES; 241 | ONLY_ACTIVE_ARCH = YES; 242 | SDKROOT = iphoneos; 243 | }; 244 | name = Debug; 245 | }; 246 | 7639A83F20E9F39F00C5D26A /* Release */ = { 247 | isa = XCBuildConfiguration; 248 | buildSettings = { 249 | ALWAYS_SEARCH_USER_PATHS = NO; 250 | CLANG_ANALYZER_NONNULL = YES; 251 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 252 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 253 | CLANG_CXX_LIBRARY = "libc++"; 254 | CLANG_ENABLE_MODULES = YES; 255 | CLANG_ENABLE_OBJC_ARC = YES; 256 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 257 | CLANG_WARN_BOOL_CONVERSION = YES; 258 | CLANG_WARN_COMMA = YES; 259 | CLANG_WARN_CONSTANT_CONVERSION = YES; 260 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 261 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 262 | CLANG_WARN_EMPTY_BODY = YES; 263 | CLANG_WARN_ENUM_CONVERSION = YES; 264 | CLANG_WARN_INFINITE_RECURSION = YES; 265 | CLANG_WARN_INT_CONVERSION = YES; 266 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 267 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 268 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 269 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 270 | CLANG_WARN_STRICT_PROTOTYPES = YES; 271 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 272 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 273 | CLANG_WARN_UNREACHABLE_CODE = YES; 274 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 275 | CODE_SIGN_IDENTITY = "iPhone Developer"; 276 | COPY_PHASE_STRIP = NO; 277 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 278 | ENABLE_NS_ASSERTIONS = NO; 279 | ENABLE_STRICT_OBJC_MSGSEND = YES; 280 | GCC_C_LANGUAGE_STANDARD = gnu11; 281 | GCC_NO_COMMON_BLOCKS = YES; 282 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 283 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 284 | GCC_WARN_UNDECLARED_SELECTOR = YES; 285 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 286 | GCC_WARN_UNUSED_FUNCTION = YES; 287 | GCC_WARN_UNUSED_VARIABLE = YES; 288 | IPHONEOS_DEPLOYMENT_TARGET = 11.2; 289 | MTL_ENABLE_DEBUG_INFO = NO; 290 | SDKROOT = iphoneos; 291 | VALIDATE_PRODUCT = YES; 292 | }; 293 | name = Release; 294 | }; 295 | 7639A84120E9F39F00C5D26A /* Debug */ = { 296 | isa = XCBuildConfiguration; 297 | buildSettings = { 298 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 299 | CODE_SIGN_STYLE = Automatic; 300 | DEVELOPMENT_TEAM = F2347ZTV6T; 301 | INFOPLIST_FILE = VideoToolBoxDecodeH264/Info.plist; 302 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 303 | PRODUCT_BUNDLE_IDENTIFIER = ACE.VideoToolBoxDecodeH264; 304 | PRODUCT_NAME = "$(TARGET_NAME)"; 305 | TARGETED_DEVICE_FAMILY = "1,2"; 306 | }; 307 | name = Debug; 308 | }; 309 | 7639A84220E9F39F00C5D26A /* Release */ = { 310 | isa = XCBuildConfiguration; 311 | buildSettings = { 312 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 313 | CODE_SIGN_STYLE = Automatic; 314 | DEVELOPMENT_TEAM = F2347ZTV6T; 315 | INFOPLIST_FILE = VideoToolBoxDecodeH264/Info.plist; 316 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 317 | PRODUCT_BUNDLE_IDENTIFIER = ACE.VideoToolBoxDecodeH264; 318 | PRODUCT_NAME = "$(TARGET_NAME)"; 319 | TARGETED_DEVICE_FAMILY = "1,2"; 320 | }; 321 | name = Release; 322 | }; 323 | /* End XCBuildConfiguration section */ 324 | 325 | /* Begin XCConfigurationList section */ 326 | 7639A82520E9F39F00C5D26A /* Build configuration list for PBXProject "VideoToolBoxDecodeH264" */ = { 327 | isa = XCConfigurationList; 328 | buildConfigurations = ( 329 | 7639A83E20E9F39F00C5D26A /* Debug */, 330 | 7639A83F20E9F39F00C5D26A /* Release */, 331 | ); 332 | defaultConfigurationIsVisible = 0; 333 | defaultConfigurationName = Release; 334 | }; 335 | 7639A84020E9F39F00C5D26A /* Build configuration list for PBXNativeTarget "VideoToolBoxDecodeH264" */ = { 336 | isa = XCConfigurationList; 337 | buildConfigurations = ( 338 | 7639A84120E9F39F00C5D26A /* Debug */, 339 | 7639A84220E9F39F00C5D26A /* Release */, 340 | ); 341 | defaultConfigurationIsVisible = 0; 342 | defaultConfigurationName = Release; 343 | }; 344 | /* End XCConfigurationList section */ 345 | }; 346 | rootObject = 7639A82220E9F39F00C5D26A /* Project object */; 347 | } 348 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264.xcodeproj/project.xcworkspace/xcuserdata/andong.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AceDong0803/iOS-VideoToolBox-demo/17ee6df9ac304ebfad229f73db29541444623bfa/VideoToolBoxDecodeH264/VideoToolBoxDecodeH264.xcodeproj/project.xcworkspace/xcuserdata/andong.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264.xcodeproj/xcuserdata/andong.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264.xcodeproj/xcuserdata/andong.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | VideoToolBoxDecodeH264.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/AAPLEAGLLayer.h: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (C) 2014 Apple Inc. All Rights Reserved. 3 | See LICENSE.txt for this sample’s licensing information 4 | 5 | Abstract: 6 | 7 | This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner. 8 | 9 | */ 10 | 11 | //@import QuartzCore; 12 | #include 13 | #include 14 | 15 | @interface AAPLEAGLLayer : CAEAGLLayer 16 | @property CVPixelBufferRef pixelBuffer; 17 | - (id)initWithFrame:(CGRect)frame; 18 | - (void)resetRenderBuffer; 19 | @end 20 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/AAPLEAGLLayer.m: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (C) 2014 Apple Inc. All Rights Reserved. 3 | See LICENSE.txt for this sample’s licensing information 4 | 5 | Abstract: 6 | 7 | This CAEAGLLayer subclass demonstrates how to draw a CVPixelBufferRef using OpenGLES and display the timecode associated with that pixel buffer in the top right corner. 8 | 9 | */ 10 | 11 | #import "AAPLEAGLLayer.h" 12 | 13 | #import 14 | #import 15 | #include 16 | #import 17 | #include 18 | #include 19 | #include 20 | 21 | // Uniform index. 22 | enum 23 | { 24 | UNIFORM_Y, 25 | UNIFORM_UV, 26 | UNIFORM_ROTATION_ANGLE, 27 | UNIFORM_COLOR_CONVERSION_MATRIX, 28 | NUM_UNIFORMS 29 | }; 30 | GLint uniforms[NUM_UNIFORMS]; 31 | 32 | // Attribute index. 33 | enum 34 | { 35 | ATTRIB_VERTEX, 36 | ATTRIB_TEXCOORD, 37 | NUM_ATTRIBUTES 38 | }; 39 | 40 | // Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range) 41 | 42 | // BT.601, which is the standard for SDTV. 43 | static const GLfloat kColorConversion601[] = { 44 | 1.164, 1.164, 1.164, 45 | 0.0, -0.392, 2.017, 46 | 1.596, -0.813, 0.0, 47 | }; 48 | 49 | // BT.709, which is the standard for HDTV. 50 | static const GLfloat kColorConversion709[] = { 51 | 1.164, 1.164, 1.164, 52 | 0.0, -0.213, 2.112, 53 | 1.793, -0.533, 0.0, 54 | }; 55 | 56 | 57 | 58 | @interface AAPLEAGLLayer () 59 | { 60 | // The pixel dimensions of the CAEAGLLayer. 61 | GLint _backingWidth; 62 | GLint _backingHeight; 63 | 64 | EAGLContext *_context; 65 | CVOpenGLESTextureRef _lumaTexture; 66 | CVOpenGLESTextureRef _chromaTexture; 67 | 68 | GLuint _frameBufferHandle; 69 | GLuint _colorBufferHandle; 70 | 71 | const GLfloat *_preferredConversion; 72 | } 73 | @property GLuint program; 74 | 75 | @end 76 | @implementation AAPLEAGLLayer 77 | @synthesize pixelBuffer = _pixelBuffer; 78 | 79 | -(CVPixelBufferRef) pixelBuffer 80 | { 81 | return _pixelBuffer; 82 | } 83 | 84 | - (void)setPixelBuffer:(CVPixelBufferRef)pb 85 | { 86 | if(_pixelBuffer) { 87 | CVPixelBufferRelease(_pixelBuffer); 88 | } 89 | _pixelBuffer = CVPixelBufferRetain(pb); 90 | 91 | int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer); 92 | int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer); 93 | [self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight]; 94 | } 95 | 96 | - (instancetype)initWithFrame:(CGRect)frame 97 | { 98 | self = [super init]; 99 | if (self) { 100 | CGFloat scale = [[UIScreen mainScreen] scale]; 101 | self.contentsScale = scale; 102 | 103 | self.opaque = TRUE; 104 | self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]}; 105 | 106 | [self setFrame:frame]; 107 | 108 | // Set the context into which the frames will be drawn. 109 | _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 110 | 111 | if (!_context) { 112 | return nil; 113 | } 114 | 115 | // Set the default conversion to BT.709, which is the standard for HDTV. 116 | _preferredConversion = kColorConversion709; 117 | 118 | [self setupGL]; 119 | } 120 | 121 | return self; 122 | } 123 | 124 | - (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight 125 | { 126 | if (!_context || ![EAGLContext setCurrentContext:_context]) { 127 | return; 128 | } 129 | 130 | if(pixelBuffer == NULL) { 131 | NSLog(@"Pixel buffer is null"); 132 | return; 133 | } 134 | 135 | CVReturn err; 136 | 137 | size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); 138 | 139 | /* 140 | Use the color attachment of the pixel buffer to determine the appropriate color conversion matrix. 141 | */ 142 | CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL); 143 | 144 | if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) { 145 | _preferredConversion = kColorConversion601; 146 | } 147 | else { 148 | _preferredConversion = kColorConversion709; 149 | } 150 | 151 | /* 152 | CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef. 153 | */ 154 | 155 | /* 156 | Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane. 157 | */ 158 | 159 | CVOpenGLESTextureCacheRef _videoTextureCache; 160 | 161 | // Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion. 162 | err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache); 163 | if (err != noErr) { 164 | NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err); 165 | return; 166 | } 167 | 168 | glActiveTexture(GL_TEXTURE0); 169 | 170 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 171 | _videoTextureCache, 172 | pixelBuffer, 173 | NULL, 174 | GL_TEXTURE_2D, 175 | GL_RED_EXT, 176 | frameWidth, 177 | frameHeight, 178 | GL_RED_EXT, 179 | GL_UNSIGNED_BYTE, 180 | 0, 181 | &_lumaTexture); 182 | if (err) { 183 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 184 | } 185 | 186 | glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture)); 187 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 188 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 189 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 190 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 191 | 192 | if(planeCount == 2) { 193 | // UV-plane. 194 | glActiveTexture(GL_TEXTURE1); 195 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 196 | _videoTextureCache, 197 | pixelBuffer, 198 | NULL, 199 | GL_TEXTURE_2D, 200 | GL_RG_EXT, 201 | frameWidth / 2, 202 | frameHeight / 2, 203 | GL_RG_EXT, 204 | GL_UNSIGNED_BYTE, 205 | 1, 206 | &_chromaTexture); 207 | if (err) { 208 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 209 | } 210 | 211 | glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture)); 212 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 213 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 214 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 215 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 216 | } 217 | 218 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle); 219 | 220 | // Set the view port to the entire view. 221 | glViewport(0, 0, _backingWidth, _backingHeight); 222 | 223 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f); 224 | glClear(GL_COLOR_BUFFER_BIT); 225 | 226 | // Use shader program. 227 | glUseProgram(self.program); 228 | // glUniform1f(uniforms[UNIFORM_LUMA_THRESHOLD], 1); 229 | // glUniform1f(uniforms[UNIFORM_CHROMA_THRESHOLD], 1); 230 | glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0); 231 | glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion); 232 | 233 | // Set up the quad vertices with respect to the orientation and aspect ratio of the video. 234 | CGRect viewBounds = self.bounds; 235 | CGSize contentSize = CGSizeMake(frameWidth, frameHeight); 236 | CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds); 237 | 238 | // Compute normalized quad coordinates to draw the frame into. 239 | CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0); 240 | CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width, 241 | vertexSamplingRect.size.height/viewBounds.size.height); 242 | 243 | // Normalize the quad vertices. 244 | if (cropScaleAmount.width > cropScaleAmount.height) { 245 | normalizedSamplingSize.width = 1.0; 246 | normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width; 247 | } 248 | else { 249 | normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height; 250 | normalizedSamplingSize.height = 1.0;; 251 | } 252 | 253 | /* 254 | The quad vertex data defines the region of 2D plane onto which we draw our pixel buffers. 255 | Vertex data formed using (-1,-1) and (1,1) as the bottom left and top right coordinates respectively, covers the entire screen. 256 | */ 257 | GLfloat quadVertexData [] = { 258 | -1 * normalizedSamplingSize.width, -1 * normalizedSamplingSize.height, 259 | normalizedSamplingSize.width, -1 * normalizedSamplingSize.height, 260 | -1 * normalizedSamplingSize.width, normalizedSamplingSize.height, 261 | normalizedSamplingSize.width, normalizedSamplingSize.height, 262 | }; 263 | 264 | // Update attribute values. 265 | glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData); 266 | glEnableVertexAttribArray(ATTRIB_VERTEX); 267 | 268 | /* 269 | The texture vertices are set up such that we flip the texture vertically. This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system. 270 | */ 271 | CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1); 272 | GLfloat quadTextureData[] = { 273 | CGRectGetMinX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect), 274 | CGRectGetMaxX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect), 275 | CGRectGetMinX(textureSamplingRect), CGRectGetMinY(textureSamplingRect), 276 | CGRectGetMaxX(textureSamplingRect), CGRectGetMinY(textureSamplingRect) 277 | }; 278 | 279 | glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData); 280 | glEnableVertexAttribArray(ATTRIB_TEXCOORD); 281 | 282 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); 283 | 284 | glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle); 285 | [_context presentRenderbuffer:GL_RENDERBUFFER]; 286 | 287 | [self cleanUpTextures]; 288 | // Periodic texture cache flush every frame 289 | CVOpenGLESTextureCacheFlush(_videoTextureCache, 0); 290 | 291 | if(_videoTextureCache) { 292 | CFRelease(_videoTextureCache); 293 | } 294 | } 295 | 296 | # pragma mark - OpenGL setup 297 | 298 | - (void)setupGL 299 | { 300 | if (!_context || ![EAGLContext setCurrentContext:_context]) { 301 | return; 302 | } 303 | 304 | [self setupBuffers]; 305 | [self loadShaders]; 306 | 307 | glUseProgram(self.program); 308 | 309 | // 0 and 1 are the texture IDs of _lumaTexture and _chromaTexture respectively. 310 | glUniform1i(uniforms[UNIFORM_Y], 0); 311 | glUniform1i(uniforms[UNIFORM_UV], 1); 312 | glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0); 313 | glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion); 314 | } 315 | 316 | #pragma mark - Utilities 317 | 318 | - (void)setupBuffers 319 | { 320 | glDisable(GL_DEPTH_TEST); 321 | 322 | glEnableVertexAttribArray(ATTRIB_VERTEX); 323 | glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0); 324 | 325 | glEnableVertexAttribArray(ATTRIB_TEXCOORD); 326 | glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0); 327 | 328 | [self createBuffers]; 329 | } 330 | 331 | - (void) createBuffers 332 | { 333 | glGenFramebuffers(1, &_frameBufferHandle); 334 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle); 335 | 336 | glGenRenderbuffers(1, &_colorBufferHandle); 337 | glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle); 338 | 339 | [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self]; 340 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth); 341 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight); 342 | 343 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle); 344 | if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { 345 | NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER)); 346 | } 347 | } 348 | 349 | - (void) releaseBuffers 350 | { 351 | if(_frameBufferHandle) { 352 | glDeleteFramebuffers(1, &_frameBufferHandle); 353 | _frameBufferHandle = 0; 354 | } 355 | 356 | if(_colorBufferHandle) { 357 | glDeleteRenderbuffers(1, &_colorBufferHandle); 358 | _colorBufferHandle = 0; 359 | } 360 | } 361 | 362 | - (void) resetRenderBuffer 363 | { 364 | if (!_context || ![EAGLContext setCurrentContext:_context]) { 365 | return; 366 | } 367 | 368 | [self releaseBuffers]; 369 | [self createBuffers]; 370 | } 371 | 372 | - (void) cleanUpTextures 373 | { 374 | if (_lumaTexture) { 375 | CFRelease(_lumaTexture); 376 | _lumaTexture = NULL; 377 | } 378 | 379 | if (_chromaTexture) { 380 | CFRelease(_chromaTexture); 381 | _chromaTexture = NULL; 382 | } 383 | } 384 | 385 | #pragma mark - OpenGL ES 2 shader compilation 386 | 387 | const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;" 388 | "precision mediump float;" 389 | "uniform sampler2D SamplerY;" 390 | "uniform sampler2D SamplerUV;" 391 | "uniform mat3 colorConversionMatrix;" 392 | "void main()" 393 | "{" 394 | " mediump vec3 yuv;" 395 | " lowp vec3 rgb;" 396 | // Subtract constants to map the video range start at 0 397 | " yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));" 398 | " yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));" 399 | " rgb = colorConversionMatrix * yuv;" 400 | " gl_FragColor = vec4(rgb, 1);" 401 | "}"; 402 | 403 | const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;" 404 | "attribute vec2 texCoord;" 405 | "uniform float preferredRotation;" 406 | "varying vec2 texCoordVarying;" 407 | "void main()" 408 | "{" 409 | " mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0," 410 | " sin(preferredRotation), cos(preferredRotation), 0.0, 0.0," 411 | " 0.0, 0.0, 1.0, 0.0," 412 | " 0.0, 0.0, 0.0, 1.0);" 413 | " gl_Position = position * rotationMatrix;" 414 | " texCoordVarying = texCoord;" 415 | "}"; 416 | 417 | - (BOOL)loadShaders 418 | { 419 | GLuint vertShader = 0, fragShader = 0; 420 | 421 | // Create the shader program. 422 | self.program = glCreateProgram(); 423 | 424 | if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) { 425 | NSLog(@"Failed to compile vertex shader"); 426 | return NO; 427 | } 428 | 429 | if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) { 430 | NSLog(@"Failed to compile fragment shader"); 431 | return NO; 432 | } 433 | 434 | // Attach vertex shader to program. 435 | glAttachShader(self.program, vertShader); 436 | 437 | // Attach fragment shader to program. 438 | glAttachShader(self.program, fragShader); 439 | 440 | // Bind attribute locations. This needs to be done prior to linking. 441 | glBindAttribLocation(self.program, ATTRIB_VERTEX, "position"); 442 | glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord"); 443 | 444 | // Link the program. 445 | if (![self linkProgram:self.program]) { 446 | NSLog(@"Failed to link program: %d", self.program); 447 | 448 | if (vertShader) { 449 | glDeleteShader(vertShader); 450 | vertShader = 0; 451 | } 452 | if (fragShader) { 453 | glDeleteShader(fragShader); 454 | fragShader = 0; 455 | } 456 | if (self.program) { 457 | glDeleteProgram(self.program); 458 | self.program = 0; 459 | } 460 | 461 | return NO; 462 | } 463 | 464 | // Get uniform locations. 465 | uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY"); 466 | uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV"); 467 | // uniforms[UNIFORM_LUMA_THRESHOLD] = glGetUniformLocation(self.program, "lumaThreshold"); 468 | // uniforms[UNIFORM_CHROMA_THRESHOLD] = glGetUniformLocation(self.program, "chromaThreshold"); 469 | uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation"); 470 | uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix"); 471 | 472 | // Release vertex and fragment shaders. 473 | if (vertShader) { 474 | glDetachShader(self.program, vertShader); 475 | glDeleteShader(vertShader); 476 | } 477 | if (fragShader) { 478 | glDetachShader(self.program, fragShader); 479 | glDeleteShader(fragShader); 480 | } 481 | 482 | return YES; 483 | } 484 | 485 | - (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString 486 | { 487 | *shader = glCreateShader(type); 488 | glShaderSource(*shader, 1, &shaderString, NULL); 489 | glCompileShader(*shader); 490 | 491 | #if defined(DEBUG) 492 | GLint logLength; 493 | glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); 494 | if (logLength > 0) { 495 | GLchar *log = (GLchar *)malloc(logLength); 496 | glGetShaderInfoLog(*shader, logLength, &logLength, log); 497 | NSLog(@"Shader compile log:\n%s", log); 498 | free(log); 499 | } 500 | #endif 501 | 502 | GLint status = 0; 503 | glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); 504 | if (status == 0) { 505 | glDeleteShader(*shader); 506 | return NO; 507 | } 508 | 509 | return YES; 510 | } 511 | 512 | - (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL 513 | { 514 | NSError *error; 515 | NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error]; 516 | if (sourceString == nil) { 517 | NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]); 518 | return NO; 519 | } 520 | 521 | const GLchar *source = (GLchar *)[sourceString UTF8String]; 522 | 523 | return [self compileShaderString:shader type:type shaderString:source]; 524 | } 525 | 526 | - (BOOL)linkProgram:(GLuint)prog 527 | { 528 | GLint status; 529 | glLinkProgram(prog); 530 | 531 | #if defined(DEBUG) 532 | GLint logLength; 533 | glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); 534 | if (logLength > 0) { 535 | GLchar *log = (GLchar *)malloc(logLength); 536 | glGetProgramInfoLog(prog, logLength, &logLength, log); 537 | NSLog(@"Program link log:\n%s", log); 538 | free(log); 539 | } 540 | #endif 541 | 542 | glGetProgramiv(prog, GL_LINK_STATUS, &status); 543 | if (status == 0) { 544 | return NO; 545 | } 546 | 547 | return YES; 548 | } 549 | 550 | - (BOOL)validateProgram:(GLuint)prog 551 | { 552 | GLint logLength, status; 553 | 554 | glValidateProgram(prog); 555 | glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); 556 | if (logLength > 0) { 557 | GLchar *log = (GLchar *)malloc(logLength); 558 | glGetProgramInfoLog(prog, logLength, &logLength, log); 559 | NSLog(@"Program validate log:\n%s", log); 560 | free(log); 561 | } 562 | 563 | glGetProgramiv(prog, GL_VALIDATE_STATUS, &status); 564 | if (status == 0) { 565 | return NO; 566 | } 567 | 568 | return YES; 569 | } 570 | 571 | - (void)dealloc 572 | { 573 | if (!_context || ![EAGLContext setCurrentContext:_context]) { 574 | return; 575 | } 576 | 577 | [self cleanUpTextures]; 578 | 579 | if(_pixelBuffer) { 580 | CVPixelBufferRelease(_pixelBuffer); 581 | } 582 | 583 | if (self.program) { 584 | glDeleteProgram(self.program); 585 | self.program = 0; 586 | } 587 | if(_context) { 588 | //[_context release]; 589 | _context = nil; 590 | } 591 | //[super dealloc]; 592 | } 593 | 594 | @end 595 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | 24 | - (void)applicationWillResignActive:(UIApplication *)application { 25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 27 | } 28 | 29 | 30 | - (void)applicationDidEnterBackground:(UIApplication *)application { 31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 33 | } 34 | 35 | 36 | - (void)applicationWillEnterForeground:(UIApplication *)application { 37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 38 | } 39 | 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application { 42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 43 | } 44 | 45 | 46 | - (void)applicationWillTerminate:(UIApplication *)application { 47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 48 | } 49 | 50 | 51 | @end 52 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | } 88 | ], 89 | "info" : { 90 | "version" : 1, 91 | "author" : "xcode" 92 | } 93 | } -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/H264DecodeTool.h: -------------------------------------------------------------------------------- 1 | // 2 | // H264DecodeTool.h 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | #import 12 | 13 | @protocol H264DecodeFrameCallbackDelegate 14 | 15 | //回调sps和pps数据 16 | - (void)gotDecodedFrame:(CVImageBufferRef )imageBuffer; 17 | 18 | @end 19 | 20 | @interface H264DecodeTool : NSObject 21 | 22 | -(BOOL)initH264Decoder; 23 | 24 | //解码nalu 25 | -(void)decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize; 26 | 27 | - (void)endDecode; 28 | 29 | @property (weak, nonatomic) id delegate; 30 | 31 | @end 32 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/H264DecodeTool.m: -------------------------------------------------------------------------------- 1 | // 2 | // H264DecodeTool.m 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import "H264DecodeTool.h" 10 | 11 | @interface H264DecodeTool(){ 12 | 13 | //解码session 14 | VTDecompressionSessionRef _decoderSession; 15 | 16 | //解码format 封装了sps和pps 17 | CMVideoFormatDescriptionRef _decoderFormatDescription; 18 | 19 | //sps & pps 20 | uint8_t *_sps; 21 | NSInteger _spsSize; 22 | uint8_t *_pps; 23 | NSInteger _ppsSize; 24 | 25 | } 26 | 27 | @end 28 | 29 | @implementation H264DecodeTool 30 | 31 | - (BOOL)initH264Decoder{ 32 | if(_decoderSession){ 33 | return YES; 34 | } 35 | 36 | const uint8_t* const parameterSetPointers[2] = { _sps, _pps }; 37 | const size_t parameterSetSizes[2] = { _spsSize, _ppsSize }; 38 | 39 | //用sps 和pps 实例化_decoderFormatDescription 40 | OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 41 | 2, //参数个数 42 | parameterSetPointers, 43 | parameterSetSizes, 44 | 4, //nal startcode开始的size 45 | &_decoderFormatDescription); 46 | 47 | if(status == noErr) { 48 | NSDictionary* destinationPixelBufferAttributes = @{ 49 | (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], 50 | //硬解必须是 kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange 51 | // 或者是kCVPixelFormatType_420YpCbCr8Planar 52 | //因为iOS是 nv12 其他是nv21 53 | (id)kCVPixelBufferWidthKey : [NSNumber numberWithInt:1280], 54 | (id)kCVPixelBufferHeightKey : [NSNumber numberWithInt:960], 55 | //这里宽高和编码反的 两倍关系 56 | (id)kCVPixelBufferOpenGLCompatibilityKey : [NSNumber numberWithBool:YES] 57 | }; 58 | 59 | 60 | 61 | VTDecompressionOutputCallbackRecord callBackRecord; 62 | callBackRecord.decompressionOutputCallback = didDecompress; 63 | callBackRecord.decompressionOutputRefCon = (__bridge void *)self; 64 | status = VTDecompressionSessionCreate(kCFAllocatorDefault, 65 | _decoderFormatDescription, 66 | NULL, 67 | (__bridge CFDictionaryRef)destinationPixelBufferAttributes, 68 | &callBackRecord, 69 | &_decoderSession); 70 | VTSessionSetProperty(_decoderSession, kVTDecompressionPropertyKey_ThreadCount, (__bridge CFTypeRef)[NSNumber numberWithInt:1]); 71 | VTSessionSetProperty(_decoderSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue); 72 | } else { 73 | NSLog(@"IOS8VT: reset decoder session failed status=%d", status); 74 | return NO; 75 | } 76 | 77 | return YES; 78 | } 79 | 80 | //解码回调 81 | static void didDecompress( void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){ 82 | CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon; 83 | 84 | //持有pixelBuffer数据,否则会被释放 85 | *outputPixelBuffer = CVPixelBufferRetain(pixelBuffer); 86 | H264DecodeTool *decoder = (__bridge H264DecodeTool *)decompressionOutputRefCon; 87 | if (decoder.delegate) 88 | { 89 | [decoder.delegate gotDecodedFrame:pixelBuffer]; 90 | } 91 | } 92 | 93 | 94 | //解码nalu裸数据 95 | -(void) decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize 96 | { 97 | // NSLog(@"------------开始解码"); 98 | 99 | //获取nalu type 100 | int nalu_type = (frame[4] & 0x1F); 101 | CVPixelBufferRef pixelBuffer = NULL; 102 | 103 | //填充nalu size 去掉start code 替换成nalu size 104 | uint32_t nalSize = (uint32_t)(frameSize - 4); 105 | uint8_t *pNalSize = (uint8_t*)(&nalSize); 106 | frame[0] = *(pNalSize + 3); 107 | frame[1] = *(pNalSize + 2); 108 | frame[2] = *(pNalSize + 1); 109 | frame[3] = *(pNalSize); 110 | 111 | switch (nalu_type) 112 | { 113 | case 0x05: 114 | //关键帧 115 | if([self initH264Decoder]) 116 | { 117 | pixelBuffer = [self decode:frame size:frameSize]; 118 | } 119 | break; 120 | case 0x07: 121 | //sps 122 | _spsSize = frameSize - 4; 123 | _sps = malloc(_spsSize); 124 | memcpy(_sps, &frame[4], _spsSize); 125 | break; 126 | case 0x08: 127 | { 128 | //pps 129 | _ppsSize = frameSize - 4; 130 | _pps = malloc(_ppsSize); 131 | memcpy(_pps, &frame[4], _ppsSize); 132 | break; 133 | } 134 | default: 135 | { 136 | // B/P frame 137 | if([self initH264Decoder]) 138 | { 139 | pixelBuffer = [self decode:frame size:frameSize]; 140 | } 141 | break; 142 | } 143 | 144 | 145 | } 146 | } 147 | 148 | 149 | //解码帧数据 150 | - (CVPixelBufferRef)decode:(uint8_t *)frame size:(uint32_t)frameSize{ 151 | CVPixelBufferRef outputPixelBuffer = NULL; 152 | 153 | CMBlockBufferRef blockBuffer = NULL; 154 | 155 | //创建CMBlockBufferRef 156 | OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL, 157 | (void *)frame, 158 | frameSize, 159 | kCFAllocatorNull, 160 | NULL, 161 | 0, 162 | frameSize, 163 | FALSE, 164 | &blockBuffer); 165 | if (status == kCMBlockBufferNoErr) { 166 | 167 | CMSampleBufferRef sampleBuffer = NULL; 168 | const size_t sampleSizeArray[] = {frameSize}; 169 | 170 | //创建sampleBuffer 171 | status = CMSampleBufferCreateReady(kCFAllocatorDefault, 172 | blockBuffer, 173 | _decoderFormatDescription , 174 | 1, 0, NULL, 1, sampleSizeArray, 175 | &sampleBuffer); 176 | 177 | if (status == kCMBlockBufferNoErr && sampleBuffer) { 178 | VTDecodeFrameFlags flags = 0; 179 | VTDecodeInfoFlags flagOut = 0; 180 | //CMSampleBufferRef丢进去解码 181 | OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(_decoderSession, 182 | sampleBuffer, 183 | flags, 184 | &outputPixelBuffer, 185 | &flagOut); 186 | 187 | if(decodeStatus == kVTInvalidSessionErr) { 188 | NSLog(@"IOS8VT: Invalid session, reset decoder session"); 189 | } else if(decodeStatus == kVTVideoDecoderBadDataErr) { 190 | NSLog(@"IOS8VT: decode failed status=%d(Bad data)", decodeStatus); 191 | } else if(decodeStatus != noErr) { 192 | NSLog(@"IOS8VT: decode failed status=%d", decodeStatus); 193 | } 194 | CFRelease(sampleBuffer); 195 | } 196 | CFRelease(blockBuffer); 197 | } 198 | //返回pixelBuffer数据 199 | return outputPixelBuffer; 200 | } 201 | 202 | - (void)endDecode{ 203 | 204 | if(_decoderSession) { 205 | VTDecompressionSessionInvalidate(_decoderSession); 206 | CFRelease(_decoderSession); 207 | _decoderSession = NULL; 208 | } 209 | 210 | if(_decoderFormatDescription) { 211 | CFRelease(_decoderFormatDescription); 212 | _decoderFormatDescription = NULL; 213 | } 214 | 215 | if (_sps) { 216 | free(_sps); 217 | } 218 | 219 | if (_pps) { 220 | free(_pps); 221 | } 222 | 223 | _ppsSize = _spsSize = 0; 224 | } 225 | 226 | 227 | 228 | @end 229 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/H264EncodeTool.h: -------------------------------------------------------------------------------- 1 | // 2 | // H264EncodeTool.h 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @protocol H264EncodeCallBackDelegate 13 | 14 | //回调sps和pps数据 15 | - (void)gotSpsPps:(NSData*)sps pps:(NSData*)pps; 16 | 17 | //回调H264数据和是否是关键帧 18 | - (void)gotEncodedData:(NSData*)data isKeyFrame:(BOOL)isKeyFrame; 19 | 20 | @end 21 | 22 | @interface H264EncodeTool : NSObject 23 | 24 | //初始化视频宽高 25 | - (void) initEncode:(int)width height:(int)height; 26 | 27 | //编码CMSampleBufferRef 28 | - (void) encode:(CMSampleBufferRef )sampleBuffer; 29 | 30 | //停止编码 31 | - (void) stopEncode; 32 | 33 | @property (weak, nonatomic) id delegate; 34 | 35 | @end 36 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/H264EncodeTool.m: -------------------------------------------------------------------------------- 1 | // 2 | // H264EncodeTool.m 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import "H264EncodeTool.h" 10 | #import 11 | 12 | @interface H264EncodeTool (){ 13 | 14 | //帧号 15 | int frameNO; 16 | 17 | //编码队列 18 | dispatch_queue_t encodeQueue; 19 | 20 | //编码session 21 | VTCompressionSessionRef encodingSession; 22 | 23 | //sps和pps 24 | NSData *sps; 25 | NSData *pps; 26 | } 27 | 28 | 29 | @end 30 | 31 | @implementation H264EncodeTool 32 | 33 | - (instancetype)init{ 34 | 35 | if (self = [super init]) { 36 | frameNO = 0; 37 | encodeQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); 38 | sps = nil; 39 | pps = nil; 40 | } 41 | return self; 42 | } 43 | 44 | - (void)initEncode:(int)width height:(int)height{ 45 | 46 | dispatch_async(encodeQueue, ^{ 47 | OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self), &encodingSession); 48 | NSLog(@"H264: VTCompressionSessionCreate %d", (int)status); 49 | if (status != 0) 50 | { 51 | NSLog(@"H264: Unable to create a H264 session"); 52 | return ; 53 | } 54 | 55 | // 设置实时编码输出(避免延迟) 56 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue); 57 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel); 58 | 59 | // 设置关键帧(GOPsize)间隔 60 | int frameInterval = 24; 61 | CFNumberRef frameIntervalRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &frameInterval); 62 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, frameIntervalRef); 63 | 64 | //设置期望帧率 65 | int fps = 24; 66 | CFNumberRef fpsRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &fps); 67 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_ExpectedFrameRate, fpsRef); 68 | 69 | 70 | //设置码率,均值,单位是byte 71 | int bitRate = width * height * 3 * 4 * 8; 72 | CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRate); 73 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef); 74 | 75 | //设置码率,上限,单位是bps 76 | int bitRateLimit = width * height * 3 * 4; 77 | CFNumberRef bitRateLimitRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRateLimit); 78 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_DataRateLimits, bitRateLimitRef); 79 | 80 | //开始编码 81 | VTCompressionSessionPrepareToEncodeFrames(encodingSession); 82 | }); 83 | 84 | } 85 | 86 | // 编码完成回调 87 | void didCompressH264(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer) { 88 | NSLog(@"didCompressH264 called with status %d infoFlags %d", (int)status, (int)infoFlags); 89 | if (status != 0) { 90 | return; 91 | } 92 | if (!CMSampleBufferDataIsReady(sampleBuffer)) { 93 | NSLog(@"didCompressH264 data is not ready "); 94 | return; 95 | } 96 | H264EncodeTool* encoder = (__bridge H264EncodeTool*)outputCallbackRefCon; 97 | bool keyframe = !CFDictionaryContainsKey( (CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), kCMSampleAttachmentKey_NotSync); 98 | 99 | // 判断当前帧是否为关键帧 100 | // 获取sps & pps数据 101 | if (keyframe) 102 | { 103 | CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); 104 | size_t sparameterSetSize, sparameterSetCount; 105 | const uint8_t *sparameterSet; 106 | OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0 ); 107 | if (statusCode == noErr) 108 | { 109 | // 获得了sps,再获取pps 110 | size_t pparameterSetSize, pparameterSetCount; 111 | const uint8_t *pparameterSet; 112 | OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0 ); 113 | if (statusCode == noErr) 114 | { 115 | // 获取SPS和PPS data 116 | NSData *sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize]; 117 | NSData *pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize]; 118 | if (encoder.delegate) 119 | { 120 | //回调解码完成的sps和pps数据 121 | [encoder.delegate gotSpsPps:sps pps:pps]; 122 | } 123 | } 124 | } 125 | } 126 | 127 | CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); 128 | size_t length, totalLength; 129 | char *dataPointer; 130 | 131 | //这里获取了数据指针,和NALU的帧总长度,前四个字节里面保存的 132 | OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer); 133 | if (statusCodeRet == noErr) { 134 | size_t bufferOffset = 0; 135 | static const int AVCCHeaderLength = 4; // 返回的nalu数据前四个字节不是0001的startcode,而是大端模式的帧长度length 136 | 137 | // 循环获取nalu数据 138 | while (bufferOffset < totalLength - AVCCHeaderLength) { 139 | uint32_t NALUnitLength = 0; 140 | // 读取NALU长度的数据 141 | memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength); 142 | 143 | // 从大端转系统端 144 | NALUnitLength = CFSwapInt32BigToHost(NALUnitLength); 145 | 146 | NSData* data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength]; 147 | if (encoder.delegate) { 148 | [encoder.delegate gotEncodedData:data isKeyFrame:keyframe]; 149 | } 150 | // 移动到下一个NALU单元 151 | bufferOffset += AVCCHeaderLength + NALUnitLength; 152 | } 153 | } 154 | 155 | } 156 | 157 | 158 | //编码sampleBuffer 159 | - (void) encode:(CMSampleBufferRef )sampleBuffer 160 | { 161 | CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer); 162 | // 帧时间,如果不设置会导致时间轴过长。 163 | CMTime presentationTimeStamp = CMTimeMake(frameNO++, 1000); 164 | VTEncodeInfoFlags flags; 165 | OSStatus statusCode = VTCompressionSessionEncodeFrame(encodingSession, 166 | imageBuffer, 167 | presentationTimeStamp, 168 | kCMTimeInvalid, 169 | NULL, NULL, &flags); 170 | if (statusCode != noErr) { 171 | NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode); 172 | 173 | if (encodingSession) { 174 | VTCompressionSessionInvalidate(encodingSession); 175 | CFRelease(encodingSession); 176 | encodingSession = NULL; 177 | } 178 | return; 179 | } 180 | NSLog(@"H264: VTCompressionSessionEncodeFrame Success"); 181 | } 182 | 183 | - (void)stopEncode 184 | { 185 | if (encodingSession) { 186 | VTCompressionSessionCompleteFrames(encodingSession, kCMTimeInvalid); 187 | VTCompressionSessionInvalidate(encodingSession); 188 | CFRelease(encodingSession); 189 | encodingSession = NULL; 190 | frameNO = 0; 191 | } 192 | } 193 | 194 | @end 195 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | NSCameraUsageDescription 6 | Use Camera To Capture Videos 7 | CFBundleDevelopmentRegion 8 | $(DEVELOPMENT_LANGUAGE) 9 | CFBundleExecutable 10 | $(EXECUTABLE_NAME) 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIMainStoryboardFile 28 | Main 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | 39 | UISupportedInterfaceOrientations~ipad 40 | 41 | UIInterfaceOrientationPortrait 42 | UIInterfaceOrientationPortraitUpsideDown 43 | UIInterfaceOrientationLandscapeLeft 44 | UIInterfaceOrientationLandscapeRight 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | #import 11 | #import 12 | #import "AAPLEAGLLayer.h" 13 | #import "H264EncodeTool.h" 14 | #import "H264DecodeTool.h" 15 | 16 | @interface ViewController (){ 17 | //录制队列 18 | dispatch_queue_t captureQueue; 19 | } 20 | 21 | 22 | @property (nonatomic,strong)AVCaptureSession *captureSession; //输入和输出数据传输session 23 | @property (nonatomic,strong)AVCaptureDeviceInput *captureDeviceInput; //从AVdevice获得输入数据 24 | @property (nonatomic,strong)AVCaptureVideoDataOutput *captureDeviceOutput; //获取输出数据 25 | @property (nonatomic,strong)AVCaptureConnection *connection; //connection 26 | @property (nonatomic,strong)AVCaptureVideoPreviewLayer *previewLayer; //摄像头预览layer 27 | @property (nonatomic,strong)AAPLEAGLLayer *playLayer; //解码后播放layer 28 | 29 | @property (nonatomic,strong)UIButton *startBtn; 30 | @property (nonatomic,strong)UILabel *titleLabel; 31 | @property (nonatomic,strong)UILabel *firstLabel; 32 | @property (nonatomic,strong)UILabel *secondLabel; 33 | 34 | 35 | //编解码器 36 | @property (nonatomic,strong)H264DecodeTool *h264Decoder; 37 | @property (nonatomic,strong)H264EncodeTool *h264Encoder; 38 | 39 | @end 40 | 41 | @implementation ViewController 42 | 43 | - (void)viewDidLoad { 44 | [super viewDidLoad]; 45 | 46 | //初始化UI和参数 47 | [self initUIAndParameter]; 48 | 49 | [self configH264Decoder]; 50 | [self configH264Encoder]; 51 | } 52 | 53 | 54 | - (void)initUIAndParameter{ 55 | 56 | [self.view addSubview:self.startBtn]; 57 | [self.view addSubview:self.titleLabel]; 58 | [self.view addSubview:self.firstLabel]; 59 | [self.view addSubview:self.secondLabel]; 60 | 61 | //初始化队列 62 | captureQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); 63 | } 64 | 65 | - (void)configH264Encoder{ 66 | if (!self.h264Encoder) { 67 | self.h264Encoder = [[H264EncodeTool alloc]init]; 68 | //640 * 480 69 | [self.h264Encoder initEncode:640 height:480]; 70 | self.h264Encoder.delegate = self; 71 | } 72 | } 73 | 74 | - (void)configH264Decoder{ 75 | if (!self.h264Decoder) { 76 | self.h264Decoder = [[H264DecodeTool alloc] init]; 77 | self.h264Decoder.delegate = self; 78 | } 79 | } 80 | 81 | #pragma mark - EventHandle 82 | - (void)startBtnAction{ 83 | BOOL isRunning = self.captureSession && self.captureSession.running; 84 | 85 | if (isRunning) { 86 | //停止采集编码 87 | [self.startBtn setTitle:@"Start" forState:UIControlStateNormal]; 88 | [self endCaputureSession]; 89 | } 90 | else{ 91 | //开始采集编码 92 | [self.startBtn setTitle:@"End" forState:UIControlStateNormal]; 93 | [self startCaputureSession]; 94 | } 95 | } 96 | 97 | - (void)startCaputureSession{ 98 | //填充编码器和解码器 99 | // [self configH264Decoder]; 100 | [self configH264Encoder]; 101 | 102 | //填充预览 103 | [self initCapture]; 104 | [self initPreviewLayer]; 105 | [self initPlayLayer]; 106 | 107 | //开始采集 108 | [self.captureSession startRunning]; 109 | } 110 | 111 | - (void)endCaputureSession{ 112 | //停止采集 113 | [self.captureSession stopRunning]; 114 | [self.previewLayer removeFromSuperlayer]; 115 | [self.playLayer removeFromSuperlayer]; 116 | 117 | //停止编码 118 | [self.h264Encoder stopEncode]; 119 | 120 | //停止解码 121 | [self.h264Decoder endDecode]; 122 | 123 | self.h264Decoder = nil; 124 | self.h264Encoder = nil; 125 | } 126 | 127 | 128 | #pragma mark - 摄像头采集端 129 | 130 | //初始化摄像头采集端 131 | - (void)initCapture{ 132 | 133 | self.captureSession = [[AVCaptureSession alloc]init]; 134 | 135 | //设置录制720p 136 | self.captureSession.sessionPreset = AVCaptureSessionPreset640x480; 137 | 138 | AVCaptureDevice *inputCamera = [self cameraWithPostion:AVCaptureDevicePositionBack]; 139 | 140 | self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:inputCamera error:nil]; 141 | 142 | if ([self.captureSession canAddInput:self.captureDeviceInput]) { 143 | [self.captureSession addInput:self.captureDeviceInput]; 144 | } 145 | 146 | self.captureDeviceOutput = [[AVCaptureVideoDataOutput alloc] init]; 147 | [self.captureDeviceOutput setAlwaysDiscardsLateVideoFrames:NO]; 148 | 149 | //设置YUV420p输出 150 | [self.captureDeviceOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; 151 | 152 | [self.captureDeviceOutput setSampleBufferDelegate:self queue:captureQueue]; 153 | 154 | if ([self.captureSession canAddOutput:self.captureDeviceOutput]) { 155 | [self.captureSession addOutput:self.captureDeviceOutput]; 156 | } 157 | 158 | //建立连接 159 | self.connection = [self.captureDeviceOutput connectionWithMediaType:AVMediaTypeVideo]; 160 | [self.connection setVideoOrientation:AVCaptureVideoOrientationPortrait]; 161 | } 162 | 163 | //config 摄像头预览layer 164 | - (void)initPreviewLayer{ 165 | self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession]; 166 | CGFloat height = (self.view.frame.size.height - 100)/2.0 - 20; 167 | CGFloat width = self.view.frame.size.width - 100; 168 | [self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; 169 | [self.previewLayer setFrame:CGRectMake(100, 100,width,height)]; 170 | [self.view.layer addSublayer:self.previewLayer]; 171 | } 172 | 173 | - (void)initPlayLayer{ 174 | CGFloat height = (self.view.frame.size.height - 100)/2.0 - 20; 175 | CGFloat width = self.view.frame.size.width - 100; 176 | self.playLayer = [[AAPLEAGLLayer alloc] initWithFrame:CGRectMake(100, (self.view.frame.size.height - 100)/2.0 + 100,width,height)]; 177 | self.playLayer.backgroundColor = [UIColor whiteColor].CGColor; 178 | [self.view.layer addSublayer:self.playLayer]; 179 | } 180 | 181 | 182 | //兼容iOS10以上获取AVCaptureDevice 183 | - (AVCaptureDevice *)cameraWithPostion:(AVCaptureDevicePosition)position{ 184 | NSString *version = [UIDevice currentDevice].systemVersion; 185 | if (version.doubleValue >= 10.0) { 186 | // iOS10以上 187 | AVCaptureDeviceDiscoverySession *devicesIOS10 = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position]; 188 | NSArray *devicesIOS = devicesIOS10.devices; 189 | for (AVCaptureDevice *device in devicesIOS) { 190 | if ([device position] == position) { 191 | return device; 192 | } 193 | } 194 | return nil; 195 | } else { 196 | // iOS10以下 197 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 198 | for (AVCaptureDevice *device in devices) 199 | { 200 | if ([device position] == position) 201 | { 202 | return device; 203 | } 204 | } 205 | return nil; 206 | } 207 | } 208 | 209 | - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ 210 | if (connection == self.connection) { 211 | [self.h264Encoder encode:sampleBuffer]; 212 | } 213 | } 214 | 215 | #pragma mark - 编码回调 216 | - (void)gotSpsPps:(NSData *)sps pps:(NSData *)pps{ 217 | const char bytes[] = "\x00\x00\x00\x01"; 218 | size_t length = (sizeof bytes) - 1; 219 | NSData *ByteHeader = [NSData dataWithBytes:bytes length:length]; 220 | //sps 221 | NSMutableData *h264Data = [[NSMutableData alloc] init]; 222 | [h264Data appendData:ByteHeader]; 223 | [h264Data appendData:sps]; 224 | [self.h264Decoder decodeNalu:(uint8_t *)[h264Data bytes] size:(uint32_t)h264Data.length]; 225 | 226 | 227 | //pps 228 | [h264Data resetBytesInRange:NSMakeRange(0, [h264Data length])]; 229 | [h264Data setLength:0]; 230 | [h264Data appendData:ByteHeader]; 231 | [h264Data appendData:pps]; 232 | [self.h264Decoder decodeNalu:(uint8_t *)[h264Data bytes] size:(uint32_t)h264Data.length]; 233 | } 234 | 235 | - (void)gotEncodedData:(NSData *)data isKeyFrame:(BOOL)isKeyFrame{ 236 | const char bytes[] = "\x00\x00\x00\x01"; 237 | size_t length = (sizeof bytes) - 1; 238 | NSData *ByteHeader = [NSData dataWithBytes:bytes length:length]; 239 | NSMutableData *h264Data = [[NSMutableData alloc] init]; 240 | [h264Data appendData:ByteHeader]; 241 | [h264Data appendData:data]; 242 | [self.h264Decoder decodeNalu:(uint8_t *)[h264Data bytes] size:(uint32_t)h264Data.length]; 243 | } 244 | 245 | 246 | #pragma mark - 解码回调 247 | - (void)gotDecodedFrame:(CVImageBufferRef)imageBuffer{ 248 | if(imageBuffer) 249 | { 250 | //解码回来的数据绘制播放 251 | self.playLayer.pixelBuffer = imageBuffer; 252 | CVPixelBufferRelease(imageBuffer); 253 | } 254 | } 255 | 256 | #pragma mark - Getters 257 | 258 | - (UIButton *)startBtn{ 259 | if (!_startBtn) { 260 | _startBtn = [[UIButton alloc]initWithFrame:CGRectMake(220, 30, 100, 50)]; 261 | [_startBtn setBackgroundColor:[UIColor cyanColor]]; 262 | [_startBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal]; 263 | [_startBtn setTitle:@"start" forState:UIControlStateNormal]; 264 | [_startBtn addTarget:self action:@selector(startBtnAction) forControlEvents:UIControlEventTouchUpInside]; 265 | } 266 | return _startBtn; 267 | } 268 | 269 | - (UILabel *)titleLabel{ 270 | if (!_titleLabel) { 271 | _titleLabel = [[UILabel alloc]initWithFrame:CGRectMake(50, 40, 150, 30)]; 272 | _titleLabel.textColor = [UIColor blackColor]; 273 | _titleLabel.text = @"测试H264解码"; 274 | } 275 | return _titleLabel; 276 | } 277 | 278 | 279 | - (UILabel *)firstLabel{ 280 | if (!_firstLabel) { 281 | _firstLabel = [[UILabel alloc]initWithFrame:CGRectMake(0, self.view.frame.size.height/4.0f, 100, 30)]; 282 | _firstLabel.textColor = [UIColor blackColor]; 283 | _firstLabel.font = [UIFont boldSystemFontOfSize:14.0f]; 284 | _firstLabel.text = @"摄像头采集数据"; 285 | } 286 | return _firstLabel; 287 | } 288 | 289 | 290 | - (UILabel *)secondLabel{ 291 | if (!_secondLabel) { 292 | _secondLabel = [[UILabel alloc]initWithFrame:CGRectMake(0, self.view.frame.size.height*3/4.0f, 100, 30)]; 293 | _secondLabel.textColor = [UIColor blackColor]; 294 | _secondLabel.font = [UIFont boldSystemFontOfSize:14.0f]; 295 | _secondLabel.text = @"解码后播放数据"; 296 | } 297 | return _secondLabel; 298 | } 299 | 300 | 301 | @end 302 | -------------------------------------------------------------------------------- /VideoToolBoxDecodeH264/VideoToolBoxDecodeH264/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // VideoToolBoxDecodeH264 4 | // 5 | // Created by AnDong on 2018/7/2. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 7692F9FF20E0B76900B6FAD1 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7692F9FE20E0B76900B6FAD1 /* AppDelegate.m */; }; 11 | 7692FA0220E0B76900B6FAD1 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7692FA0120E0B76900B6FAD1 /* ViewController.m */; }; 12 | 7692FA0520E0B76900B6FAD1 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7692FA0320E0B76900B6FAD1 /* Main.storyboard */; }; 13 | 7692FA0720E0B76900B6FAD1 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7692FA0620E0B76900B6FAD1 /* Assets.xcassets */; }; 14 | 7692FA0A20E0B76900B6FAD1 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7692FA0820E0B76900B6FAD1 /* LaunchScreen.storyboard */; }; 15 | 7692FA0D20E0B76900B6FAD1 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 7692FA0C20E0B76900B6FAD1 /* main.m */; }; 16 | 7692FA1520E0B78800B6FAD1 /* VideoToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 7692FA1420E0B78800B6FAD1 /* VideoToolbox.framework */; }; 17 | /* End PBXBuildFile section */ 18 | 19 | /* Begin PBXFileReference section */ 20 | 7692F9FA20E0B76900B6FAD1 /* VideoToolBoxEncodeH264.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = VideoToolBoxEncodeH264.app; sourceTree = BUILT_PRODUCTS_DIR; }; 21 | 7692F9FD20E0B76900B6FAD1 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 22 | 7692F9FE20E0B76900B6FAD1 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 23 | 7692FA0020E0B76900B6FAD1 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 24 | 7692FA0120E0B76900B6FAD1 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 25 | 7692FA0420E0B76900B6FAD1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 26 | 7692FA0620E0B76900B6FAD1 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 27 | 7692FA0920E0B76900B6FAD1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 28 | 7692FA0B20E0B76900B6FAD1 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 29 | 7692FA0C20E0B76900B6FAD1 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 30 | 7692FA1420E0B78800B6FAD1 /* VideoToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = VideoToolbox.framework; path = System/Library/Frameworks/VideoToolbox.framework; sourceTree = SDKROOT; }; 31 | /* End PBXFileReference section */ 32 | 33 | /* Begin PBXFrameworksBuildPhase section */ 34 | 7692F9F720E0B76900B6FAD1 /* Frameworks */ = { 35 | isa = PBXFrameworksBuildPhase; 36 | buildActionMask = 2147483647; 37 | files = ( 38 | 7692FA1520E0B78800B6FAD1 /* VideoToolbox.framework in Frameworks */, 39 | ); 40 | runOnlyForDeploymentPostprocessing = 0; 41 | }; 42 | /* End PBXFrameworksBuildPhase section */ 43 | 44 | /* Begin PBXGroup section */ 45 | 7692F9F120E0B76900B6FAD1 = { 46 | isa = PBXGroup; 47 | children = ( 48 | 7692F9FC20E0B76900B6FAD1 /* VideoToolBoxEncodeH264 */, 49 | 7692F9FB20E0B76900B6FAD1 /* Products */, 50 | 7692FA1320E0B78800B6FAD1 /* Frameworks */, 51 | ); 52 | sourceTree = ""; 53 | }; 54 | 7692F9FB20E0B76900B6FAD1 /* Products */ = { 55 | isa = PBXGroup; 56 | children = ( 57 | 7692F9FA20E0B76900B6FAD1 /* VideoToolBoxEncodeH264.app */, 58 | ); 59 | name = Products; 60 | sourceTree = ""; 61 | }; 62 | 7692F9FC20E0B76900B6FAD1 /* VideoToolBoxEncodeH264 */ = { 63 | isa = PBXGroup; 64 | children = ( 65 | 7692F9FD20E0B76900B6FAD1 /* AppDelegate.h */, 66 | 7692F9FE20E0B76900B6FAD1 /* AppDelegate.m */, 67 | 7692FA0020E0B76900B6FAD1 /* ViewController.h */, 68 | 7692FA0120E0B76900B6FAD1 /* ViewController.m */, 69 | 7692FA0320E0B76900B6FAD1 /* Main.storyboard */, 70 | 7692FA0620E0B76900B6FAD1 /* Assets.xcassets */, 71 | 7692FA0820E0B76900B6FAD1 /* LaunchScreen.storyboard */, 72 | 7692FA0B20E0B76900B6FAD1 /* Info.plist */, 73 | 7692FA0C20E0B76900B6FAD1 /* main.m */, 74 | ); 75 | path = VideoToolBoxEncodeH264; 76 | sourceTree = ""; 77 | }; 78 | 7692FA1320E0B78800B6FAD1 /* Frameworks */ = { 79 | isa = PBXGroup; 80 | children = ( 81 | 7692FA1420E0B78800B6FAD1 /* VideoToolbox.framework */, 82 | ); 83 | name = Frameworks; 84 | sourceTree = ""; 85 | }; 86 | /* End PBXGroup section */ 87 | 88 | /* Begin PBXNativeTarget section */ 89 | 7692F9F920E0B76900B6FAD1 /* VideoToolBoxEncodeH264 */ = { 90 | isa = PBXNativeTarget; 91 | buildConfigurationList = 7692FA1020E0B76900B6FAD1 /* Build configuration list for PBXNativeTarget "VideoToolBoxEncodeH264" */; 92 | buildPhases = ( 93 | 7692F9F620E0B76900B6FAD1 /* Sources */, 94 | 7692F9F720E0B76900B6FAD1 /* Frameworks */, 95 | 7692F9F820E0B76900B6FAD1 /* Resources */, 96 | ); 97 | buildRules = ( 98 | ); 99 | dependencies = ( 100 | ); 101 | name = VideoToolBoxEncodeH264; 102 | productName = VideoToolBoxEncodeH264; 103 | productReference = 7692F9FA20E0B76900B6FAD1 /* VideoToolBoxEncodeH264.app */; 104 | productType = "com.apple.product-type.application"; 105 | }; 106 | /* End PBXNativeTarget section */ 107 | 108 | /* Begin PBXProject section */ 109 | 7692F9F220E0B76900B6FAD1 /* Project object */ = { 110 | isa = PBXProject; 111 | attributes = { 112 | LastUpgradeCheck = 0920; 113 | ORGANIZATIONNAME = AnDong; 114 | TargetAttributes = { 115 | 7692F9F920E0B76900B6FAD1 = { 116 | CreatedOnToolsVersion = 9.2; 117 | ProvisioningStyle = Automatic; 118 | }; 119 | }; 120 | }; 121 | buildConfigurationList = 7692F9F520E0B76900B6FAD1 /* Build configuration list for PBXProject "VideoToolBoxEncodeH264" */; 122 | compatibilityVersion = "Xcode 8.0"; 123 | developmentRegion = en; 124 | hasScannedForEncodings = 0; 125 | knownRegions = ( 126 | en, 127 | Base, 128 | ); 129 | mainGroup = 7692F9F120E0B76900B6FAD1; 130 | productRefGroup = 7692F9FB20E0B76900B6FAD1 /* Products */; 131 | projectDirPath = ""; 132 | projectRoot = ""; 133 | targets = ( 134 | 7692F9F920E0B76900B6FAD1 /* VideoToolBoxEncodeH264 */, 135 | ); 136 | }; 137 | /* End PBXProject section */ 138 | 139 | /* Begin PBXResourcesBuildPhase section */ 140 | 7692F9F820E0B76900B6FAD1 /* Resources */ = { 141 | isa = PBXResourcesBuildPhase; 142 | buildActionMask = 2147483647; 143 | files = ( 144 | 7692FA0A20E0B76900B6FAD1 /* LaunchScreen.storyboard in Resources */, 145 | 7692FA0720E0B76900B6FAD1 /* Assets.xcassets in Resources */, 146 | 7692FA0520E0B76900B6FAD1 /* Main.storyboard in Resources */, 147 | ); 148 | runOnlyForDeploymentPostprocessing = 0; 149 | }; 150 | /* End PBXResourcesBuildPhase section */ 151 | 152 | /* Begin PBXSourcesBuildPhase section */ 153 | 7692F9F620E0B76900B6FAD1 /* Sources */ = { 154 | isa = PBXSourcesBuildPhase; 155 | buildActionMask = 2147483647; 156 | files = ( 157 | 7692FA0220E0B76900B6FAD1 /* ViewController.m in Sources */, 158 | 7692FA0D20E0B76900B6FAD1 /* main.m in Sources */, 159 | 7692F9FF20E0B76900B6FAD1 /* AppDelegate.m in Sources */, 160 | ); 161 | runOnlyForDeploymentPostprocessing = 0; 162 | }; 163 | /* End PBXSourcesBuildPhase section */ 164 | 165 | /* Begin PBXVariantGroup section */ 166 | 7692FA0320E0B76900B6FAD1 /* Main.storyboard */ = { 167 | isa = PBXVariantGroup; 168 | children = ( 169 | 7692FA0420E0B76900B6FAD1 /* Base */, 170 | ); 171 | name = Main.storyboard; 172 | sourceTree = ""; 173 | }; 174 | 7692FA0820E0B76900B6FAD1 /* LaunchScreen.storyboard */ = { 175 | isa = PBXVariantGroup; 176 | children = ( 177 | 7692FA0920E0B76900B6FAD1 /* Base */, 178 | ); 179 | name = LaunchScreen.storyboard; 180 | sourceTree = ""; 181 | }; 182 | /* End PBXVariantGroup section */ 183 | 184 | /* Begin XCBuildConfiguration section */ 185 | 7692FA0E20E0B76900B6FAD1 /* Debug */ = { 186 | isa = XCBuildConfiguration; 187 | buildSettings = { 188 | ALWAYS_SEARCH_USER_PATHS = NO; 189 | CLANG_ANALYZER_NONNULL = YES; 190 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 191 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 192 | CLANG_CXX_LIBRARY = "libc++"; 193 | CLANG_ENABLE_MODULES = YES; 194 | CLANG_ENABLE_OBJC_ARC = YES; 195 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 196 | CLANG_WARN_BOOL_CONVERSION = YES; 197 | CLANG_WARN_COMMA = YES; 198 | CLANG_WARN_CONSTANT_CONVERSION = YES; 199 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 200 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 201 | CLANG_WARN_EMPTY_BODY = YES; 202 | CLANG_WARN_ENUM_CONVERSION = YES; 203 | CLANG_WARN_INFINITE_RECURSION = YES; 204 | CLANG_WARN_INT_CONVERSION = YES; 205 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 206 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 207 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 208 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 209 | CLANG_WARN_STRICT_PROTOTYPES = YES; 210 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 211 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 212 | CLANG_WARN_UNREACHABLE_CODE = YES; 213 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 214 | CODE_SIGN_IDENTITY = "iPhone Developer"; 215 | COPY_PHASE_STRIP = NO; 216 | DEBUG_INFORMATION_FORMAT = dwarf; 217 | ENABLE_STRICT_OBJC_MSGSEND = YES; 218 | ENABLE_TESTABILITY = YES; 219 | GCC_C_LANGUAGE_STANDARD = gnu11; 220 | GCC_DYNAMIC_NO_PIC = NO; 221 | GCC_NO_COMMON_BLOCKS = YES; 222 | GCC_OPTIMIZATION_LEVEL = 0; 223 | GCC_PREPROCESSOR_DEFINITIONS = ( 224 | "DEBUG=1", 225 | "$(inherited)", 226 | ); 227 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 228 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 229 | GCC_WARN_UNDECLARED_SELECTOR = YES; 230 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 231 | GCC_WARN_UNUSED_FUNCTION = YES; 232 | GCC_WARN_UNUSED_VARIABLE = YES; 233 | IPHONEOS_DEPLOYMENT_TARGET = 11.2; 234 | MTL_ENABLE_DEBUG_INFO = YES; 235 | ONLY_ACTIVE_ARCH = YES; 236 | SDKROOT = iphoneos; 237 | }; 238 | name = Debug; 239 | }; 240 | 7692FA0F20E0B76900B6FAD1 /* Release */ = { 241 | isa = XCBuildConfiguration; 242 | buildSettings = { 243 | ALWAYS_SEARCH_USER_PATHS = NO; 244 | CLANG_ANALYZER_NONNULL = YES; 245 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 246 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 247 | CLANG_CXX_LIBRARY = "libc++"; 248 | CLANG_ENABLE_MODULES = YES; 249 | CLANG_ENABLE_OBJC_ARC = YES; 250 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 251 | CLANG_WARN_BOOL_CONVERSION = YES; 252 | CLANG_WARN_COMMA = YES; 253 | CLANG_WARN_CONSTANT_CONVERSION = YES; 254 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 255 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 256 | CLANG_WARN_EMPTY_BODY = YES; 257 | CLANG_WARN_ENUM_CONVERSION = YES; 258 | CLANG_WARN_INFINITE_RECURSION = YES; 259 | CLANG_WARN_INT_CONVERSION = YES; 260 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 261 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 262 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 263 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 264 | CLANG_WARN_STRICT_PROTOTYPES = YES; 265 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 266 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 267 | CLANG_WARN_UNREACHABLE_CODE = YES; 268 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 269 | CODE_SIGN_IDENTITY = "iPhone Developer"; 270 | COPY_PHASE_STRIP = NO; 271 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 272 | ENABLE_NS_ASSERTIONS = NO; 273 | ENABLE_STRICT_OBJC_MSGSEND = YES; 274 | GCC_C_LANGUAGE_STANDARD = gnu11; 275 | GCC_NO_COMMON_BLOCKS = YES; 276 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 277 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 278 | GCC_WARN_UNDECLARED_SELECTOR = YES; 279 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 280 | GCC_WARN_UNUSED_FUNCTION = YES; 281 | GCC_WARN_UNUSED_VARIABLE = YES; 282 | IPHONEOS_DEPLOYMENT_TARGET = 11.2; 283 | MTL_ENABLE_DEBUG_INFO = NO; 284 | SDKROOT = iphoneos; 285 | VALIDATE_PRODUCT = YES; 286 | }; 287 | name = Release; 288 | }; 289 | 7692FA1120E0B76900B6FAD1 /* Debug */ = { 290 | isa = XCBuildConfiguration; 291 | buildSettings = { 292 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 293 | CODE_SIGN_STYLE = Automatic; 294 | DEVELOPMENT_TEAM = F2347ZTV6T; 295 | INFOPLIST_FILE = VideoToolBoxEncodeH264/Info.plist; 296 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 297 | PRODUCT_BUNDLE_IDENTIFIER = ACE.VideoToolBoxEncodeH264; 298 | PRODUCT_NAME = "$(TARGET_NAME)"; 299 | TARGETED_DEVICE_FAMILY = "1,2"; 300 | }; 301 | name = Debug; 302 | }; 303 | 7692FA1220E0B76900B6FAD1 /* Release */ = { 304 | isa = XCBuildConfiguration; 305 | buildSettings = { 306 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 307 | CODE_SIGN_STYLE = Automatic; 308 | DEVELOPMENT_TEAM = F2347ZTV6T; 309 | INFOPLIST_FILE = VideoToolBoxEncodeH264/Info.plist; 310 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 311 | PRODUCT_BUNDLE_IDENTIFIER = ACE.VideoToolBoxEncodeH264; 312 | PRODUCT_NAME = "$(TARGET_NAME)"; 313 | TARGETED_DEVICE_FAMILY = "1,2"; 314 | }; 315 | name = Release; 316 | }; 317 | /* End XCBuildConfiguration section */ 318 | 319 | /* Begin XCConfigurationList section */ 320 | 7692F9F520E0B76900B6FAD1 /* Build configuration list for PBXProject "VideoToolBoxEncodeH264" */ = { 321 | isa = XCConfigurationList; 322 | buildConfigurations = ( 323 | 7692FA0E20E0B76900B6FAD1 /* Debug */, 324 | 7692FA0F20E0B76900B6FAD1 /* Release */, 325 | ); 326 | defaultConfigurationIsVisible = 0; 327 | defaultConfigurationName = Release; 328 | }; 329 | 7692FA1020E0B76900B6FAD1 /* Build configuration list for PBXNativeTarget "VideoToolBoxEncodeH264" */ = { 330 | isa = XCConfigurationList; 331 | buildConfigurations = ( 332 | 7692FA1120E0B76900B6FAD1 /* Debug */, 333 | 7692FA1220E0B76900B6FAD1 /* Release */, 334 | ); 335 | defaultConfigurationIsVisible = 0; 336 | defaultConfigurationName = Release; 337 | }; 338 | /* End XCConfigurationList section */ 339 | }; 340 | rootObject = 7692F9F220E0B76900B6FAD1 /* Project object */; 341 | } 342 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264.xcodeproj/project.xcworkspace/xcuserdata/andong.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AceDong0803/iOS-VideoToolBox-demo/17ee6df9ac304ebfad229f73db29541444623bfa/VideoToolBoxEncodeH264/VideoToolBoxEncodeH264.xcodeproj/project.xcworkspace/xcuserdata/andong.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264.xcodeproj/xcuserdata/andong.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264.xcodeproj/xcuserdata/andong.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | VideoToolBoxEncodeH264.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // VideoToolBoxEncodeH264 4 | // 5 | // Created by AnDong on 2018/6/25. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // VideoToolBoxEncodeH264 4 | // 5 | // Created by AnDong on 2018/6/25. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | 24 | - (void)applicationWillResignActive:(UIApplication *)application { 25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 27 | } 28 | 29 | 30 | - (void)applicationDidEnterBackground:(UIApplication *)application { 31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 33 | } 34 | 35 | 36 | - (void)applicationWillEnterForeground:(UIApplication *)application { 37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 38 | } 39 | 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application { 42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 43 | } 44 | 45 | 46 | - (void)applicationWillTerminate:(UIApplication *)application { 47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 48 | } 49 | 50 | 51 | @end 52 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | } 88 | ], 89 | "info" : { 90 | "version" : 1, 91 | "author" : "xcode" 92 | } 93 | } -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | NSCameraUsageDescription 8 | Use Camera To Capture Videos 9 | CFBundleExecutable 10 | $(EXECUTABLE_NAME) 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIMainStoryboardFile 28 | Main 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | 39 | UISupportedInterfaceOrientations~ipad 40 | 41 | UIInterfaceOrientationPortrait 42 | UIInterfaceOrientationPortraitUpsideDown 43 | UIInterfaceOrientationLandscapeLeft 44 | UIInterfaceOrientationLandscapeRight 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // VideoToolBoxEncodeH264 4 | // 5 | // Created by AnDong on 2018/6/25. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // VideoToolBoxEncodeH264 4 | // 5 | // Created by AnDong on 2018/6/25. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | #import 11 | #import 12 | 13 | static NSString *const H264FilePath = @"test.h264"; 14 | 15 | @interface ViewController () 16 | { 17 | int frameNO;//帧号 18 | //录制队列 19 | dispatch_queue_t captureQueue; 20 | 21 | //编码队列 22 | dispatch_queue_t encodeQueue; 23 | 24 | //编码session 25 | VTCompressionSessionRef encodingSession; 26 | } 27 | 28 | @property (nonatomic,strong)AVCaptureSession *captureSession; //输入和输出数据传输session 29 | @property (nonatomic,strong)AVCaptureDeviceInput *captureDeviceInput; //从AVdevice获得输入数据 30 | @property (nonatomic,strong)AVCaptureVideoDataOutput *captureDeviceOutput; //获取输出数据 31 | @property (nonatomic,strong)AVCaptureVideoPreviewLayer *previewLayer; //预览layer 32 | 33 | @property (nonatomic,strong)NSFileHandle *h264FileHandle; //句柄 34 | 35 | @property (nonatomic,strong)UIButton *startBtn; 36 | @property (nonatomic,strong)UILabel *titleLabel; 37 | 38 | @property ()int abc; 39 | 40 | @end 41 | 42 | @implementation ViewController 43 | 44 | - (void)viewDidLoad { 45 | [super viewDidLoad]; 46 | 47 | //初始化UI和参数 48 | [self initUIAndParameter]; 49 | 50 | } 51 | 52 | 53 | - (void)initUIAndParameter{ 54 | 55 | [self.view addSubview:self.startBtn]; 56 | [self.view addSubview:self.titleLabel]; 57 | 58 | //初始化队列 59 | captureQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); 60 | encodeQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); 61 | } 62 | 63 | #pragma mark - EventHanle 64 | 65 | - (void)startBtnAction{ 66 | BOOL isRunning = self.captureSession && self.captureSession.running; 67 | 68 | if (isRunning) { 69 | //停止采集编码 70 | [self.startBtn setTitle:@"Start" forState:UIControlStateNormal]; 71 | [self endCaputureSession]; 72 | } 73 | else{ 74 | //开始采集编码 75 | [self.startBtn setTitle:@"End" forState:UIControlStateNormal]; 76 | [self startCaputureSession]; 77 | } 78 | } 79 | 80 | 81 | - (void)startCaputureSession{ 82 | 83 | [self initCapture]; 84 | [self initPreviewLayer]; 85 | [self initVideoToolBox]; 86 | [self configFileHandle]; 87 | 88 | //开始采集 89 | [self.captureSession startRunning]; 90 | } 91 | 92 | - (void)endCaputureSession{ 93 | //停止采集 94 | [self.captureSession stopRunning]; 95 | [self.previewLayer removeFromSuperlayer]; 96 | [self EndVideoToolBox]; 97 | [self closeFileHandle]; 98 | } 99 | 100 | #pragma mark - 摄像头采集端 101 | 102 | //初始化摄像头采集端 103 | - (void)initCapture{ 104 | 105 | self.captureSession = [[AVCaptureSession alloc]init]; 106 | 107 | //设置录制720p 108 | self.captureSession.sessionPreset = AVCaptureSessionPreset640x480; 109 | 110 | AVCaptureDevice *inputCamera = [self cameraWithPostion:AVCaptureDevicePositionBack]; 111 | 112 | self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:inputCamera error:nil]; 113 | 114 | if ([self.captureSession canAddInput:self.captureDeviceInput]) { 115 | [self.captureSession addInput:self.captureDeviceInput]; 116 | } 117 | 118 | self.captureDeviceOutput = [[AVCaptureVideoDataOutput alloc] init]; 119 | [self.captureDeviceOutput setAlwaysDiscardsLateVideoFrames:NO]; 120 | 121 | //设置YUV420p输出 122 | [self.captureDeviceOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; 123 | 124 | [self.captureDeviceOutput setSampleBufferDelegate:self queue:captureQueue]; 125 | 126 | if ([self.captureSession canAddOutput:self.captureDeviceOutput]) { 127 | [self.captureSession addOutput:self.captureDeviceOutput]; 128 | } 129 | 130 | //建立连接 131 | AVCaptureConnection *connection = [self.captureDeviceOutput connectionWithMediaType:AVMediaTypeVideo]; 132 | [connection setVideoOrientation:AVCaptureVideoOrientationPortrait]; 133 | } 134 | 135 | //config 摄像头预览layer 136 | - (void)initPreviewLayer{ 137 | self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession]; 138 | [self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; 139 | [self.previewLayer setFrame:self.view.bounds]; 140 | [self.view.layer addSublayer:self.previewLayer]; 141 | } 142 | 143 | 144 | //兼容iOS10以上获取AVCaptureDevice 145 | - (AVCaptureDevice *)cameraWithPostion:(AVCaptureDevicePosition)position{ 146 | NSString *version = [UIDevice currentDevice].systemVersion; 147 | if (version.doubleValue >= 10.0) { 148 | // iOS10以上 149 | AVCaptureDeviceDiscoverySession *devicesIOS10 = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position]; 150 | NSArray *devicesIOS = devicesIOS10.devices; 151 | for (AVCaptureDevice *device in devicesIOS) { 152 | if ([device position] == position) { 153 | return device; 154 | } 155 | } 156 | return nil; 157 | } else { 158 | // iOS10以下 159 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 160 | for (AVCaptureDevice *device in devices) 161 | { 162 | if ([device position] == position) 163 | { 164 | return device; 165 | } 166 | } 167 | return nil; 168 | } 169 | } 170 | 171 | - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ 172 | dispatch_sync(encodeQueue, ^{ 173 | [self encode:sampleBuffer]; 174 | }); 175 | } 176 | 177 | 178 | 179 | #pragma mark - VideoToolBox编码 180 | 181 | - (void)initVideoToolBox { 182 | dispatch_sync(encodeQueue , ^{ 183 | frameNO = 0; 184 | int width = 480, height = 640; 185 | OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self), &encodingSession); 186 | NSLog(@"H264: VTCompressionSessionCreate %d", (int)status); 187 | if (status != 0) 188 | { 189 | NSLog(@"H264: Unable to create a H264 session"); 190 | return ; 191 | } 192 | 193 | // 设置实时编码输出(避免延迟) 194 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue); 195 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel); 196 | 197 | // 设置关键帧(GOPsize)间隔 198 | int frameInterval = 24; 199 | CFNumberRef frameIntervalRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &frameInterval); 200 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, frameIntervalRef); 201 | 202 | //设置期望帧率 203 | int fps = 24; 204 | CFNumberRef fpsRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &fps); 205 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_ExpectedFrameRate, fpsRef); 206 | 207 | 208 | //设置码率,均值,单位是byte 209 | int bitRate = width * height * 3 * 4 * 8; 210 | CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRate); 211 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef); 212 | 213 | //设置码率,上限,单位是bps 214 | int bitRateLimit = width * height * 3 * 4; 215 | CFNumberRef bitRateLimitRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRateLimit); 216 | VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_DataRateLimits, bitRateLimitRef); 217 | 218 | //开始编码 219 | VTCompressionSessionPrepareToEncodeFrames(encodingSession); 220 | }); 221 | } 222 | 223 | //编码sampleBuffer 224 | - (void) encode:(CMSampleBufferRef )sampleBuffer 225 | { 226 | CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer); 227 | // 帧时间,如果不设置会导致时间轴过长。 228 | CMTime presentationTimeStamp = CMTimeMake(frameNO++, 1000); 229 | VTEncodeInfoFlags flags; 230 | OSStatus statusCode = VTCompressionSessionEncodeFrame(encodingSession, 231 | imageBuffer, 232 | presentationTimeStamp, 233 | kCMTimeInvalid, 234 | NULL, NULL, &flags); 235 | if (statusCode != noErr) { 236 | NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode); 237 | 238 | VTCompressionSessionInvalidate(encodingSession); 239 | CFRelease(encodingSession); 240 | encodingSession = NULL; 241 | return; 242 | } 243 | NSLog(@"H264: VTCompressionSessionEncodeFrame Success"); 244 | } 245 | 246 | // 编码完成回调 247 | void didCompressH264(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer) { 248 | NSLog(@"didCompressH264 called with status %d infoFlags %d", (int)status, (int)infoFlags); 249 | if (status != 0) { 250 | return; 251 | } 252 | if (!CMSampleBufferDataIsReady(sampleBuffer)) { 253 | NSLog(@"didCompressH264 data is not ready "); 254 | return; 255 | } 256 | ViewController* encoder = (__bridge ViewController*)outputCallbackRefCon; 257 | bool keyframe = !CFDictionaryContainsKey( (CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), kCMSampleAttachmentKey_NotSync); 258 | 259 | // 判断当前帧是否为关键帧 260 | // 获取sps & pps数据 261 | if (keyframe) 262 | { 263 | CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); 264 | size_t sparameterSetSize, sparameterSetCount; 265 | const uint8_t *sparameterSet; 266 | OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0 ); 267 | if (statusCode == noErr) 268 | { 269 | // 获得了sps,再获取pps 270 | size_t pparameterSetSize, pparameterSetCount; 271 | const uint8_t *pparameterSet; 272 | OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0 ); 273 | if (statusCode == noErr) 274 | { 275 | // 获取SPS和PPS data 276 | NSData *sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize]; 277 | NSData *pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize]; 278 | if (encoder) 279 | { 280 | [encoder gotSpsPps:sps pps:pps]; 281 | } 282 | } 283 | } 284 | } 285 | 286 | CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); 287 | size_t length, totalLength; 288 | char *dataPointer; 289 | 290 | //这里获取了数据指针,和NALU的帧总长度,前四个字节里面保存的 291 | OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer); 292 | if (statusCodeRet == noErr) { 293 | size_t bufferOffset = 0; 294 | static const int AVCCHeaderLength = 4; // 返回的nalu数据前四个字节不是0001的startcode,而是大端模式的帧长度length 295 | 296 | // 循环获取nalu数据 297 | while (bufferOffset < totalLength - AVCCHeaderLength) { 298 | uint32_t NALUnitLength = 0; 299 | // 读取NALU长度的数据 300 | memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength); 301 | 302 | // 从大端转系统端 303 | NALUnitLength = CFSwapInt32BigToHost(NALUnitLength); 304 | 305 | NSData* data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength]; 306 | [encoder gotEncodedData:data]; 307 | 308 | // 移动到下一个NALU单元 309 | bufferOffset += AVCCHeaderLength + NALUnitLength; 310 | } 311 | } 312 | 313 | } 314 | 315 | //填充SPS和PPS数据 316 | - (void)gotSpsPps:(NSData*)sps pps:(NSData*)pps 317 | { 318 | NSLog(@"gotSpsPps %d %d", (int)[sps length], (int)[pps length]); 319 | const char bytes[] = "\x00\x00\x00\x01"; 320 | size_t length = (sizeof bytes) - 1; //string literals have implicit trailing '\0' 321 | NSData *ByteHeader = [NSData dataWithBytes:bytes length:length]; 322 | [self.h264FileHandle writeData:ByteHeader]; 323 | [self.h264FileHandle writeData:sps]; 324 | [self.h264FileHandle writeData:ByteHeader]; 325 | [self.h264FileHandle writeData:pps]; 326 | 327 | } 328 | 329 | //填充NALU数据 330 | - (void)gotEncodedData:(NSData*)data 331 | { 332 | NSLog(@"gotEncodedData %d", (int)[data length]); 333 | if (self.h264FileHandle != NULL) 334 | { 335 | const char bytes[] = "\x00\x00\x00\x01"; 336 | size_t length = (sizeof bytes) - 1; //string literals have implicit trailing '\0' 337 | NSData *ByteHeader = [NSData dataWithBytes:bytes length:length]; 338 | [self.h264FileHandle writeData:ByteHeader]; 339 | [self.h264FileHandle writeData:data]; 340 | } 341 | } 342 | 343 | - (void)EndVideoToolBox 344 | { 345 | VTCompressionSessionCompleteFrames(encodingSession, kCMTimeInvalid); 346 | VTCompressionSessionInvalidate(encodingSession); 347 | CFRelease(encodingSession); 348 | encodingSession = NULL; 349 | } 350 | 351 | 352 | 353 | #pragma mark - private Methods 354 | - (void)configFileHandle{ 355 | NSString *filePath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject] stringByAppendingPathComponent:H264FilePath]; 356 | NSFileManager *fileManager = [NSFileManager defaultManager]; 357 | //文件存在的话先删除文件 358 | if ([fileManager fileExistsAtPath:filePath]) { 359 | [fileManager removeItemAtPath:filePath error:nil]; 360 | } 361 | [fileManager createFileAtPath:filePath contents:nil attributes:nil]; 362 | self.h264FileHandle = [NSFileHandle fileHandleForWritingAtPath:filePath]; 363 | 364 | if (!self.h264FileHandle) { 365 | NSLog(@"创建H264文件句柄失败"); 366 | } 367 | } 368 | 369 | - (void)closeFileHandle{ 370 | if (self.h264FileHandle) { 371 | [self.h264FileHandle closeFile]; 372 | self.h264FileHandle = nil; 373 | } 374 | } 375 | 376 | #pragma mark - Getters 377 | 378 | - (UIButton *)startBtn{ 379 | if (!_startBtn) { 380 | _startBtn = [[UIButton alloc]initWithFrame:CGRectMake(220, 30, 100, 50)]; 381 | [_startBtn setBackgroundColor:[UIColor cyanColor]]; 382 | [_startBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal]; 383 | [_startBtn setTitle:@"start" forState:UIControlStateNormal]; 384 | [_startBtn addTarget:self action:@selector(startBtnAction) forControlEvents:UIControlEventTouchUpInside]; 385 | } 386 | return _startBtn; 387 | } 388 | 389 | - (UILabel *)titleLabel{ 390 | if (!_titleLabel) { 391 | _titleLabel = [[UILabel alloc]initWithFrame:CGRectMake(50, 30, 150, 30)]; 392 | _titleLabel.textColor = [UIColor blackColor]; 393 | _titleLabel.text = @"测试H264编码"; 394 | } 395 | return _titleLabel; 396 | } 397 | 398 | @end 399 | -------------------------------------------------------------------------------- /VideoToolBoxEncodeH264/VideoToolBoxEncodeH264/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // VideoToolBoxEncodeH264 4 | // 5 | // Created by AnDong on 2018/6/25. 6 | // Copyright © 2018年 AnDong. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | --------------------------------------------------------------------------------