├── .gitignore ├── LICENSE ├── README.md ├── example ├── CSVideoReverse.xcodeproj │ ├── project.pbxproj │ └── project.xcworkspace │ │ └── contents.xcworkspacedata └── CSVideoReverse │ ├── AppDelegate.h │ ├── AppDelegate.m │ ├── Assets.xcassets │ └── AppIcon.appiconset │ │ └── Contents.json │ ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard │ ├── Info.plist │ ├── ViewController.h │ ├── ViewController.m │ ├── input.mov │ └── main.m ├── input.gif ├── output.gif └── src ├── CSVideoReverse.h └── CSVideoReverse.m /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | # 3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 4 | 5 | ## Build generated 6 | build/ 7 | DerivedData/ 8 | 9 | ## Various settings 10 | *.pbxuser 11 | !default.pbxuser 12 | *.mode1v3 13 | !default.mode1v3 14 | *.mode2v3 15 | !default.mode2v3 16 | *.perspectivev3 17 | !default.perspectivev3 18 | xcuserdata/ 19 | 20 | ## Other 21 | *.moved-aside 22 | *.xcuserstate 23 | 24 | ## Obj-C/Swift specific 25 | *.hmap 26 | *.ipa 27 | *.dSYM.zip 28 | *.dSYM 29 | 30 | # CocoaPods 31 | # 32 | # We recommend against adding the Pods directory to your .gitignore. However 33 | # you should judge for yourself, the pros and cons are mentioned at: 34 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control 35 | # 36 | # Pods/ 37 | 38 | # Carthage 39 | # 40 | # Add this line if you want to avoid checking in source code from Carthage dependencies. 41 | # Carthage/Checkouts 42 | 43 | Carthage/Build 44 | 45 | # fastlane 46 | # 47 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 48 | # screenshots whenever they are needed. 49 | # For more information about the recommended setup visit: 50 | # https://github.com/fastlane/fastlane/blob/master/fastlane/docs/Gitignore.md 51 | 52 | fastlane/report.xml 53 | fastlane/screenshots 54 | 55 | #Code Injection 56 | # 57 | # After new code Injection tools there's a generated folder /iOSInjectionProject 58 | # https://github.com/johnno1962/injectionforxcode 59 | 60 | iOSInjectionProject/ 61 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Christopher Sung 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | CSVideoReverse 2 | ============== 3 | 4 | A simple Objective-C class for creating a reversed (silent) version of a video file. Reversal occurs in its own thread, input frames are read in passes to reduce memory usage, and a delegate can be called upon completion or error. 5 | 6 | ![Input video](https://github.com/chrissung/CSVideoReverse/blob/master/input.gif) 7 | 8 | ![Output video](https://github.com/chrissung/CSVideoReverse/blob/master/output.gif) 9 | 10 | Usage Example 11 | ------------- 12 | Create an instance of the class, set any custom reader settings, and call the main method with the `inputPath` of the video to be reversed, and the `outputPath` where the finished result will reside: 13 | 14 | ``` objective-c 15 | NSString *inputPath = [[NSBundle mainBundle] pathForResource:@"input" ofType:@"mov"]; 16 | 17 | // create a path for our reversed output video 18 | NSString *documentsPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0]; 19 | outputPath = [documentsPath stringByAppendingFormat:@"/reversed.mov"]; 20 | 21 | // get instance of our reverse video class 22 | CSVideoReverse *reverser = [[CSVideoReverse alloc] init]; 23 | reverser.delegate = self; 24 | 25 | // if custom reader settings are desired 26 | reverser.readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil]; 27 | 28 | // now start the reversal process 29 | [reverser reverseVideoAtPath:inputPath outputPath:outputPath]; 30 | ``` 31 | 32 | Then implement the delegate method of the class to get the result: 33 | 34 | ``` objective-c 35 | #pragma mark CSVideoReverseDelegate Methods 36 | - (void)didFinishReverse:(bool)success withError:(NSError *)error { 37 | if (!success) { 38 | NSLog(@"%s error: %@", __FUNCTION__, error.localizedDescription); 39 | } 40 | else { 41 | // show the reversed video located at outputPath 42 | } 43 | } 44 | ``` 45 | 46 | Example Project 47 | --------------- 48 | Build the XCode project at `example/CSVideoReverse.xcodeproj` to see it in action with reverse video playback - you may want to sub in your own .mov or .mp4 file in `example/CSVideoReverse/ViewController.m`. At some point, I'll add a UIImagePickerController so you can simply choose from Camera Roll instead of having to reference an asset within the app bundle itself. 49 | 50 | Licenses 51 | -------- 52 | 53 | All source code is licensed under the [MIT-License](https://github.com/chrissung/CSVideoReverse/blob/master/LICENSE). 54 | 55 | -------------------------------------------------------------------------------- /example/CSVideoReverse.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 83B1F58A1E6CC43500D87C25 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 83B1F5891E6CC43500D87C25 /* main.m */; }; 11 | 83B1F58D1E6CC43500D87C25 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 83B1F58C1E6CC43500D87C25 /* AppDelegate.m */; }; 12 | 83B1F5901E6CC43500D87C25 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 83B1F58F1E6CC43500D87C25 /* ViewController.m */; }; 13 | 83B1F5931E6CC43500D87C25 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 83B1F5911E6CC43500D87C25 /* Main.storyboard */; }; 14 | 83B1F5951E6CC43500D87C25 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 83B1F5941E6CC43500D87C25 /* Assets.xcassets */; }; 15 | 83B1F5981E6CC43500D87C25 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 83B1F5961E6CC43500D87C25 /* LaunchScreen.storyboard */; }; 16 | 83B1F5A41E6CD27A00D87C25 /* input.mov in Resources */ = {isa = PBXBuildFile; fileRef = 83B1F5A31E6CD27A00D87C25 /* input.mov */; }; 17 | 83B1F5AF1E6D055500D87C25 /* CSVideoReverse.m in Sources */ = {isa = PBXBuildFile; fileRef = 83B1F5AE1E6D055500D87C25 /* CSVideoReverse.m */; }; 18 | /* End PBXBuildFile section */ 19 | 20 | /* Begin PBXFileReference section */ 21 | 83B1F5851E6CC43500D87C25 /* CSVideoReverse.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = CSVideoReverse.app; sourceTree = BUILT_PRODUCTS_DIR; }; 22 | 83B1F5891E6CC43500D87C25 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 23 | 83B1F58B1E6CC43500D87C25 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 24 | 83B1F58C1E6CC43500D87C25 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 25 | 83B1F58E1E6CC43500D87C25 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 26 | 83B1F58F1E6CC43500D87C25 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 27 | 83B1F5921E6CC43500D87C25 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 28 | 83B1F5941E6CC43500D87C25 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 29 | 83B1F5971E6CC43500D87C25 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 30 | 83B1F5991E6CC43500D87C25 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 31 | 83B1F5A31E6CD27A00D87C25 /* input.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = input.mov; sourceTree = ""; }; 32 | 83B1F5AD1E6D055500D87C25 /* CSVideoReverse.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CSVideoReverse.h; sourceTree = ""; }; 33 | 83B1F5AE1E6D055500D87C25 /* CSVideoReverse.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CSVideoReverse.m; sourceTree = ""; }; 34 | /* End PBXFileReference section */ 35 | 36 | /* Begin PBXFrameworksBuildPhase section */ 37 | 83B1F5821E6CC43500D87C25 /* Frameworks */ = { 38 | isa = PBXFrameworksBuildPhase; 39 | buildActionMask = 2147483647; 40 | files = ( 41 | ); 42 | runOnlyForDeploymentPostprocessing = 0; 43 | }; 44 | /* End PBXFrameworksBuildPhase section */ 45 | 46 | /* Begin PBXGroup section */ 47 | 83B1F57C1E6CC43500D87C25 = { 48 | isa = PBXGroup; 49 | children = ( 50 | 83B1F5AC1E6D055500D87C25 /* src */, 51 | 83B1F5871E6CC43500D87C25 /* CSVideoReverse */, 52 | 83B1F5861E6CC43500D87C25 /* Products */, 53 | ); 54 | sourceTree = ""; 55 | }; 56 | 83B1F5861E6CC43500D87C25 /* Products */ = { 57 | isa = PBXGroup; 58 | children = ( 59 | 83B1F5851E6CC43500D87C25 /* CSVideoReverse.app */, 60 | ); 61 | name = Products; 62 | sourceTree = ""; 63 | }; 64 | 83B1F5871E6CC43500D87C25 /* CSVideoReverse */ = { 65 | isa = PBXGroup; 66 | children = ( 67 | 83B1F5A31E6CD27A00D87C25 /* input.mov */, 68 | 83B1F58B1E6CC43500D87C25 /* AppDelegate.h */, 69 | 83B1F58C1E6CC43500D87C25 /* AppDelegate.m */, 70 | 83B1F58E1E6CC43500D87C25 /* ViewController.h */, 71 | 83B1F58F1E6CC43500D87C25 /* ViewController.m */, 72 | 83B1F5911E6CC43500D87C25 /* Main.storyboard */, 73 | 83B1F5941E6CC43500D87C25 /* Assets.xcassets */, 74 | 83B1F5961E6CC43500D87C25 /* LaunchScreen.storyboard */, 75 | 83B1F5991E6CC43500D87C25 /* Info.plist */, 76 | 83B1F5881E6CC43500D87C25 /* Supporting Files */, 77 | ); 78 | path = CSVideoReverse; 79 | sourceTree = ""; 80 | }; 81 | 83B1F5881E6CC43500D87C25 /* Supporting Files */ = { 82 | isa = PBXGroup; 83 | children = ( 84 | 83B1F5891E6CC43500D87C25 /* main.m */, 85 | ); 86 | name = "Supporting Files"; 87 | sourceTree = ""; 88 | }; 89 | 83B1F5AC1E6D055500D87C25 /* src */ = { 90 | isa = PBXGroup; 91 | children = ( 92 | 83B1F5AD1E6D055500D87C25 /* CSVideoReverse.h */, 93 | 83B1F5AE1E6D055500D87C25 /* CSVideoReverse.m */, 94 | ); 95 | name = src; 96 | path = ../src; 97 | sourceTree = ""; 98 | }; 99 | /* End PBXGroup section */ 100 | 101 | /* Begin PBXNativeTarget section */ 102 | 83B1F5841E6CC43500D87C25 /* CSVideoReverse */ = { 103 | isa = PBXNativeTarget; 104 | buildConfigurationList = 83B1F59C1E6CC43500D87C25 /* Build configuration list for PBXNativeTarget "CSVideoReverse" */; 105 | buildPhases = ( 106 | 83B1F5811E6CC43500D87C25 /* Sources */, 107 | 83B1F5821E6CC43500D87C25 /* Frameworks */, 108 | 83B1F5831E6CC43500D87C25 /* Resources */, 109 | ); 110 | buildRules = ( 111 | ); 112 | dependencies = ( 113 | ); 114 | name = CSVideoReverse; 115 | productName = CSVideoReverse; 116 | productReference = 83B1F5851E6CC43500D87C25 /* CSVideoReverse.app */; 117 | productType = "com.apple.product-type.application"; 118 | }; 119 | /* End PBXNativeTarget section */ 120 | 121 | /* Begin PBXProject section */ 122 | 83B1F57D1E6CC43500D87C25 /* Project object */ = { 123 | isa = PBXProject; 124 | attributes = { 125 | LastUpgradeCheck = 0820; 126 | ORGANIZATIONNAME = chrissung; 127 | TargetAttributes = { 128 | 83B1F5841E6CC43500D87C25 = { 129 | CreatedOnToolsVersion = 8.2.1; 130 | DevelopmentTeam = 7SAU9GW3RA; 131 | ProvisioningStyle = Automatic; 132 | }; 133 | }; 134 | }; 135 | buildConfigurationList = 83B1F5801E6CC43500D87C25 /* Build configuration list for PBXProject "CSVideoReverse" */; 136 | compatibilityVersion = "Xcode 3.2"; 137 | developmentRegion = English; 138 | hasScannedForEncodings = 0; 139 | knownRegions = ( 140 | en, 141 | Base, 142 | ); 143 | mainGroup = 83B1F57C1E6CC43500D87C25; 144 | productRefGroup = 83B1F5861E6CC43500D87C25 /* Products */; 145 | projectDirPath = ""; 146 | projectRoot = ""; 147 | targets = ( 148 | 83B1F5841E6CC43500D87C25 /* CSVideoReverse */, 149 | ); 150 | }; 151 | /* End PBXProject section */ 152 | 153 | /* Begin PBXResourcesBuildPhase section */ 154 | 83B1F5831E6CC43500D87C25 /* Resources */ = { 155 | isa = PBXResourcesBuildPhase; 156 | buildActionMask = 2147483647; 157 | files = ( 158 | 83B1F5981E6CC43500D87C25 /* LaunchScreen.storyboard in Resources */, 159 | 83B1F5A41E6CD27A00D87C25 /* input.mov in Resources */, 160 | 83B1F5951E6CC43500D87C25 /* Assets.xcassets in Resources */, 161 | 83B1F5931E6CC43500D87C25 /* Main.storyboard in Resources */, 162 | ); 163 | runOnlyForDeploymentPostprocessing = 0; 164 | }; 165 | /* End PBXResourcesBuildPhase section */ 166 | 167 | /* Begin PBXSourcesBuildPhase section */ 168 | 83B1F5811E6CC43500D87C25 /* Sources */ = { 169 | isa = PBXSourcesBuildPhase; 170 | buildActionMask = 2147483647; 171 | files = ( 172 | 83B1F5AF1E6D055500D87C25 /* CSVideoReverse.m in Sources */, 173 | 83B1F5901E6CC43500D87C25 /* ViewController.m in Sources */, 174 | 83B1F58D1E6CC43500D87C25 /* AppDelegate.m in Sources */, 175 | 83B1F58A1E6CC43500D87C25 /* main.m in Sources */, 176 | ); 177 | runOnlyForDeploymentPostprocessing = 0; 178 | }; 179 | /* End PBXSourcesBuildPhase section */ 180 | 181 | /* Begin PBXVariantGroup section */ 182 | 83B1F5911E6CC43500D87C25 /* Main.storyboard */ = { 183 | isa = PBXVariantGroup; 184 | children = ( 185 | 83B1F5921E6CC43500D87C25 /* Base */, 186 | ); 187 | name = Main.storyboard; 188 | sourceTree = ""; 189 | }; 190 | 83B1F5961E6CC43500D87C25 /* LaunchScreen.storyboard */ = { 191 | isa = PBXVariantGroup; 192 | children = ( 193 | 83B1F5971E6CC43500D87C25 /* Base */, 194 | ); 195 | name = LaunchScreen.storyboard; 196 | sourceTree = ""; 197 | }; 198 | /* End PBXVariantGroup section */ 199 | 200 | /* Begin XCBuildConfiguration section */ 201 | 83B1F59A1E6CC43500D87C25 /* Debug */ = { 202 | isa = XCBuildConfiguration; 203 | buildSettings = { 204 | ALWAYS_SEARCH_USER_PATHS = NO; 205 | CLANG_ANALYZER_NONNULL = YES; 206 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 207 | CLANG_CXX_LIBRARY = "libc++"; 208 | CLANG_ENABLE_MODULES = YES; 209 | CLANG_ENABLE_OBJC_ARC = YES; 210 | CLANG_WARN_BOOL_CONVERSION = YES; 211 | CLANG_WARN_CONSTANT_CONVERSION = YES; 212 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 213 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 214 | CLANG_WARN_EMPTY_BODY = YES; 215 | CLANG_WARN_ENUM_CONVERSION = YES; 216 | CLANG_WARN_INFINITE_RECURSION = YES; 217 | CLANG_WARN_INT_CONVERSION = YES; 218 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 219 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 220 | CLANG_WARN_UNREACHABLE_CODE = YES; 221 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 222 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 223 | COPY_PHASE_STRIP = NO; 224 | DEBUG_INFORMATION_FORMAT = dwarf; 225 | ENABLE_STRICT_OBJC_MSGSEND = YES; 226 | ENABLE_TESTABILITY = YES; 227 | GCC_C_LANGUAGE_STANDARD = gnu99; 228 | GCC_DYNAMIC_NO_PIC = NO; 229 | GCC_NO_COMMON_BLOCKS = YES; 230 | GCC_OPTIMIZATION_LEVEL = 0; 231 | GCC_PREPROCESSOR_DEFINITIONS = ( 232 | "DEBUG=1", 233 | "$(inherited)", 234 | ); 235 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 236 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 237 | GCC_WARN_UNDECLARED_SELECTOR = YES; 238 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 239 | GCC_WARN_UNUSED_FUNCTION = YES; 240 | GCC_WARN_UNUSED_VARIABLE = YES; 241 | IPHONEOS_DEPLOYMENT_TARGET = 10.0; 242 | MTL_ENABLE_DEBUG_INFO = YES; 243 | ONLY_ACTIVE_ARCH = YES; 244 | SDKROOT = iphoneos; 245 | }; 246 | name = Debug; 247 | }; 248 | 83B1F59B1E6CC43500D87C25 /* Release */ = { 249 | isa = XCBuildConfiguration; 250 | buildSettings = { 251 | ALWAYS_SEARCH_USER_PATHS = NO; 252 | CLANG_ANALYZER_NONNULL = YES; 253 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 254 | CLANG_CXX_LIBRARY = "libc++"; 255 | CLANG_ENABLE_MODULES = YES; 256 | CLANG_ENABLE_OBJC_ARC = YES; 257 | CLANG_WARN_BOOL_CONVERSION = YES; 258 | CLANG_WARN_CONSTANT_CONVERSION = YES; 259 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 260 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 261 | CLANG_WARN_EMPTY_BODY = YES; 262 | CLANG_WARN_ENUM_CONVERSION = YES; 263 | CLANG_WARN_INFINITE_RECURSION = YES; 264 | CLANG_WARN_INT_CONVERSION = YES; 265 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 266 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 267 | CLANG_WARN_UNREACHABLE_CODE = YES; 268 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 269 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 270 | COPY_PHASE_STRIP = NO; 271 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 272 | ENABLE_NS_ASSERTIONS = NO; 273 | ENABLE_STRICT_OBJC_MSGSEND = YES; 274 | GCC_C_LANGUAGE_STANDARD = gnu99; 275 | GCC_NO_COMMON_BLOCKS = YES; 276 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 277 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 278 | GCC_WARN_UNDECLARED_SELECTOR = YES; 279 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 280 | GCC_WARN_UNUSED_FUNCTION = YES; 281 | GCC_WARN_UNUSED_VARIABLE = YES; 282 | IPHONEOS_DEPLOYMENT_TARGET = 10.0; 283 | MTL_ENABLE_DEBUG_INFO = NO; 284 | SDKROOT = iphoneos; 285 | VALIDATE_PRODUCT = YES; 286 | }; 287 | name = Release; 288 | }; 289 | 83B1F59D1E6CC43500D87C25 /* Debug */ = { 290 | isa = XCBuildConfiguration; 291 | buildSettings = { 292 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 293 | DEVELOPMENT_TEAM = 7SAU9GW3RA; 294 | INFOPLIST_FILE = CSVideoReverse/Info.plist; 295 | IPHONEOS_DEPLOYMENT_TARGET = 10.0; 296 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 297 | PRODUCT_BUNDLE_IDENTIFIER = com.chrissung.CSVideoReverse; 298 | PRODUCT_NAME = "$(TARGET_NAME)"; 299 | TARGETED_DEVICE_FAMILY = "1,2"; 300 | }; 301 | name = Debug; 302 | }; 303 | 83B1F59E1E6CC43500D87C25 /* Release */ = { 304 | isa = XCBuildConfiguration; 305 | buildSettings = { 306 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 307 | DEVELOPMENT_TEAM = 7SAU9GW3RA; 308 | INFOPLIST_FILE = CSVideoReverse/Info.plist; 309 | IPHONEOS_DEPLOYMENT_TARGET = 10.0; 310 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 311 | PRODUCT_BUNDLE_IDENTIFIER = com.chrissung.CSVideoReverse; 312 | PRODUCT_NAME = "$(TARGET_NAME)"; 313 | TARGETED_DEVICE_FAMILY = "1,2"; 314 | }; 315 | name = Release; 316 | }; 317 | /* End XCBuildConfiguration section */ 318 | 319 | /* Begin XCConfigurationList section */ 320 | 83B1F5801E6CC43500D87C25 /* Build configuration list for PBXProject "CSVideoReverse" */ = { 321 | isa = XCConfigurationList; 322 | buildConfigurations = ( 323 | 83B1F59A1E6CC43500D87C25 /* Debug */, 324 | 83B1F59B1E6CC43500D87C25 /* Release */, 325 | ); 326 | defaultConfigurationIsVisible = 0; 327 | defaultConfigurationName = Release; 328 | }; 329 | 83B1F59C1E6CC43500D87C25 /* Build configuration list for PBXNativeTarget "CSVideoReverse" */ = { 330 | isa = XCConfigurationList; 331 | buildConfigurations = ( 332 | 83B1F59D1E6CC43500D87C25 /* Debug */, 333 | 83B1F59E1E6CC43500D87C25 /* Release */, 334 | ); 335 | defaultConfigurationIsVisible = 0; 336 | defaultConfigurationName = Release; 337 | }; 338 | /* End XCConfigurationList section */ 339 | }; 340 | rootObject = 83B1F57D1E6CC43500D87C25 /* Project object */; 341 | } 342 | -------------------------------------------------------------------------------- /example/CSVideoReverse.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /example/CSVideoReverse/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // CSVideoReverse 4 | // 5 | // Created by Chris Sung on 3/5/17. 6 | // Copyright © 2017 chrissung. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /example/CSVideoReverse/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // CSVideoReverse 4 | // 5 | // Created by Chris Sung on 3/5/17. 6 | // Copyright © 2017 chrissung. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | 24 | - (void)applicationWillResignActive:(UIApplication *)application { 25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 27 | } 28 | 29 | 30 | - (void)applicationDidEnterBackground:(UIApplication *)application { 31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 33 | } 34 | 35 | 36 | - (void)applicationWillEnterForeground:(UIApplication *)application { 37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 38 | } 39 | 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application { 42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 43 | } 44 | 45 | 46 | - (void)applicationWillTerminate:(UIApplication *)application { 47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 48 | } 49 | 50 | 51 | @end 52 | -------------------------------------------------------------------------------- /example/CSVideoReverse/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "29x29", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "29x29", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "40x40", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "40x40", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "60x60", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "60x60", 31 | "scale" : "3x" 32 | } 33 | ], 34 | "info" : { 35 | "version" : 1, 36 | "author" : "xcode" 37 | } 38 | } -------------------------------------------------------------------------------- /example/CSVideoReverse/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /example/CSVideoReverse/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /example/CSVideoReverse/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | CSVideoReverse 9 | CFBundleExecutable 10 | $(EXECUTABLE_NAME) 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIMainStoryboardFile 28 | Main 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /example/CSVideoReverse/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // CSVideoReverse 4 | // 5 | // Created by Chris Sung on 3/5/17. 6 | // Copyright © 2017 chrissung. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #import "CSVideoReverse.h" 12 | 13 | @interface ViewController : UIViewController 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /example/CSVideoReverse/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // Example usage for CSVideoReverse class 4 | // 5 | // Created by Chris Sung on 3/5/17. 6 | // Copyright © 2017 chrissung. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | 11 | @interface ViewController () 12 | 13 | @end 14 | 15 | @implementation ViewController { 16 | NSString *outputPath; 17 | AVPlayer *avPlayer; 18 | } 19 | 20 | - (void)viewDidLoad { 21 | [super viewDidLoad]; 22 | 23 | // get our test input file 24 | NSString *inputPath = [[NSBundle mainBundle] pathForResource:@"input" ofType:@"mov"]; 25 | 26 | // create a path for our reversed output video 27 | NSString *documentsPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0]; 28 | outputPath = [documentsPath stringByAppendingFormat:@"/reversed.mov"]; 29 | 30 | // get instance of our reverse video class 31 | CSVideoReverse *reverser = [[CSVideoReverse alloc] init]; 32 | reverser.delegate = self; 33 | reverser.showDebug = YES; // NSLog the details from the reversal processing? 34 | 35 | // if custom reader settings are desired 36 | // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange seems to be the most common pixel type among Instagram, Facebook, Twitter, et al 37 | reverser.readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil]; 38 | 39 | // now start the reversal process 40 | [reverser reverseVideoAtPath:inputPath outputPath:outputPath]; 41 | } 42 | 43 | #pragma mark CSVideoReverseDelegate Methods 44 | - (void)didFinishReverse:(bool)success withError:(NSError *)error { 45 | if (!success) { 46 | NSLog(@"%s error: %@", __FUNCTION__, error.localizedDescription); 47 | return; 48 | } 49 | 50 | // othewise, let's show the reversed video 51 | [self showReversedVideo]; 52 | } 53 | 54 | - (void)showReversedVideo { 55 | NSURL *outputUrl = [NSURL fileURLWithPath:outputPath isDirectory:NO]; 56 | AVURLAsset *asset = [AVURLAsset URLAssetWithURL:outputUrl options:nil]; 57 | 58 | AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset]; 59 | avPlayer = [[AVPlayer alloc] initWithPlayerItem:item]; 60 | 61 | AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:avPlayer]; 62 | 63 | // get the view size 64 | CGSize size = self.view.bounds.size; 65 | 66 | // get the video size 67 | AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 68 | CGFloat outputWidth = videoTrack.naturalSize.width; 69 | CGFloat outputHeight = videoTrack.naturalSize.height; 70 | 71 | // handle any orientation specifics 72 | CGAffineTransform txf = [videoTrack preferredTransform]; 73 | bool isPortrait = NO; 74 | 75 | if (txf.a == 0 && txf.b == 1.0 && txf.c == -1.0 && txf.d == 0) { // PortraitUp 76 | isPortrait = YES; 77 | } 78 | else if (txf.a == 0 && txf.b == -1.0 && txf.c == 1.0 && txf.d == 0) { // PortraitDown 79 | isPortrait = YES; 80 | } 81 | 82 | // swap dims if relevant 83 | if (isPortrait && outputWidth > outputHeight) { 84 | outputWidth = videoTrack.naturalSize.height; 85 | outputHeight = videoTrack.naturalSize.width; 86 | } 87 | else if (!isPortrait && outputHeight > outputWidth) { 88 | outputWidth = videoTrack.naturalSize.height; 89 | outputHeight = videoTrack.naturalSize.width; 90 | } 91 | 92 | // scale the playerLayer to the view 93 | CGFloat widthScale = outputWidth / size.width; 94 | CGFloat heightScale = outputHeight / size.height; 95 | CGFloat maxScale = widthScale > heightScale ? widthScale : heightScale; 96 | 97 | CGFloat displayWidth = outputWidth / maxScale; 98 | CGFloat displayHeight = outputHeight / maxScale; 99 | 100 | float x = (size.width - displayWidth) / 2.0; 101 | float y = (size.height - displayHeight) / 2.0; 102 | 103 | playerLayer.frame = CGRectMake(x, y, displayWidth, displayHeight); 104 | [playerLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 105 | [self.view.layer addSublayer:playerLayer]; 106 | 107 | // set up looping 108 | [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; 109 | [[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification 110 | object:nil 111 | queue:nil 112 | usingBlock:^(NSNotification *note) { 113 | [avPlayer seekToTime:kCMTimeZero]; // loop 114 | [avPlayer play]; 115 | }]; 116 | 117 | // add tap events to the view 118 | [[self view] addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleTap:)]]; 119 | 120 | [avPlayer play]; 121 | } 122 | 123 | // dual purpose tap for start/stop of video 124 | - (void)handleTap:(UITapGestureRecognizer *)sender { 125 | if (sender.state == UIGestureRecognizerStateEnded) { 126 | if (avPlayer.rate > 0 && !avPlayer.error) { // playing, so pause 127 | [avPlayer pause]; 128 | } 129 | else { // stopped, so play 130 | [avPlayer play]; 131 | } 132 | } 133 | } 134 | 135 | - (void)didReceiveMemoryWarning { 136 | [super didReceiveMemoryWarning]; 137 | // Dispose of any resources that can be recreated. 138 | } 139 | 140 | @end 141 | -------------------------------------------------------------------------------- /example/CSVideoReverse/input.mov: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrissung/CSVideoReverse/9ba2dd6a4d86f418d2e1a9156b7d816a64b835a5/example/CSVideoReverse/input.mov -------------------------------------------------------------------------------- /example/CSVideoReverse/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // CSVideoReverse 4 | // 5 | // Created by Chris Sung on 3/5/17. 6 | // Copyright © 2017 chrissung. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /input.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrissung/CSVideoReverse/9ba2dd6a4d86f418d2e1a9156b7d816a64b835a5/input.gif -------------------------------------------------------------------------------- /output.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrissung/CSVideoReverse/9ba2dd6a4d86f418d2e1a9156b7d816a64b835a5/output.gif -------------------------------------------------------------------------------- /src/CSVideoReverse.h: -------------------------------------------------------------------------------- 1 | // 2 | // CSVideoReverse.h 3 | // 4 | // Created by Chris Sung on 3/5/17. 5 | // Copyright © 2017 chrissung. All rights reserved. 6 | // 7 | 8 | #import 9 | #import 10 | 11 | @protocol CSVideoReverseDelegate 12 | @optional 13 | - (void)didFinishReverse:(bool)success withError:(NSError *)error; 14 | @end 15 | 16 | @interface CSVideoReverse : NSObject { 17 | 18 | } 19 | 20 | /*---------------------------------------------------------------*/ 21 | // Properties 22 | /*---------------------------------------------------------------*/ 23 | 24 | @property (weak, nonatomic) id delegate; 25 | 26 | @property (readwrite, nonatomic) BOOL showDebug; 27 | 28 | @property (strong, nonatomic) NSDictionary* readerOutputSettings; 29 | 30 | 31 | /*---------------------------------------------------------------*/ 32 | // Methods 33 | /*---------------------------------------------------------------*/ 34 | 35 | - (void)reverseVideoAtPath:(NSString *)inputPath outputPath:(NSString *)outputPath; 36 | 37 | @end 38 | 39 | -------------------------------------------------------------------------------- /src/CSVideoReverse.m: -------------------------------------------------------------------------------- 1 | // 2 | // CSVideoReverse.m 3 | // 4 | // Created by Chris Sung on 3/5/17. 5 | // Copyright © 2017 chrissung. All rights reserved. 6 | // 7 | 8 | #import "CSVideoReverse.h" 9 | 10 | @interface CSVideoReverse () 11 | 12 | @end 13 | 14 | 15 | @implementation CSVideoReverse { 16 | AVAssetReader *assetReader; 17 | AVAssetWriter *assetWriter; 18 | 19 | AVAssetReaderTrackOutput *assetReaderOutput; 20 | AVAssetWriterInput *assetWriterInput; 21 | AVAssetWriterInputPixelBufferAdaptor *assetWriterInputAdaptor; 22 | } 23 | 24 | - (id)init { 25 | self = [super init]; 26 | if (self) { 27 | // Set default vals for member properties. 28 | } 29 | return self; 30 | } 31 | 32 | - (void)dealloc { 33 | if (self.showDebug) 34 | NSLog(@"%s", __FUNCTION__); 35 | } 36 | 37 | /*---------------------------------------------------------------*/ 38 | // delegate-related methods 39 | /*---------------------------------------------------------------*/ 40 | 41 | - (void)conveyErrorWithMessage:(NSString*)message { 42 | 43 | if (self.delegate != nil && [self.delegate respondsToSelector:@selector(didFinishReverse:withError:)]) { 44 | // convey on the main thread 45 | NSDictionary *userInfo = @{ 46 | NSLocalizedDescriptionKey: NSLocalizedString(message, nil) 47 | }; 48 | NSError *error = [NSError errorWithDomain:@"CSVideoReverse" 49 | code:-1 50 | userInfo:userInfo]; 51 | 52 | dispatch_async(dispatch_get_main_queue(),^{ 53 | [self.delegate didFinishReverse:NO withError:error]; 54 | }); 55 | } 56 | } 57 | 58 | - (void)conveySuccess { 59 | if (self.delegate != nil && [self.delegate respondsToSelector:@selector(didFinishReverse:withError:)]) { 60 | // convey on the main thread 61 | dispatch_async(dispatch_get_main_queue(),^{ 62 | [self.delegate didFinishReverse:YES withError:nil]; 63 | }); 64 | } 65 | } 66 | 67 | /*---------------------------------------------------------------*/ 68 | // main method 69 | /*---------------------------------------------------------------*/ 70 | 71 | // read input in multi-pass increments and write in reverse 72 | - (void)reverseVideoAtPath:(NSString *)inputPath outputPath:(NSString *)outputPath { 73 | 74 | // check input path 75 | if (![[NSFileManager defaultManager] fileExistsAtPath:inputPath]) { 76 | NSString *msg = [NSString stringWithFormat:@"input file does not exist: %@", inputPath]; 77 | NSLog(@"%s %@", __FUNCTION__, msg); 78 | [self conveyErrorWithMessage:msg]; 79 | return; 80 | } 81 | 82 | // make sure nothing exists at output path 83 | [[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil]; 84 | 85 | NSURL *inputUrl = [NSURL fileURLWithPath:inputPath isDirectory:NO]; 86 | AVURLAsset *inputAsset = [AVURLAsset URLAssetWithURL:inputUrl options:nil]; 87 | 88 | // make sure we have something to reverse 89 | NSArray *videoTracks = [inputAsset tracksWithMediaType:AVMediaTypeVideo]; 90 | if (videoTracks.count<1) { 91 | NSString *msg = [NSString stringWithFormat:@"no video tracks found in: %@", inputPath]; 92 | if (self.showDebug) NSLog(@"%s %@", __FUNCTION__, msg); 93 | [self conveyErrorWithMessage:msg]; 94 | return; 95 | } 96 | 97 | // create unique name for the processing thread 98 | NSString *reverseQueueDescription = [NSString stringWithFormat:@"%@ reverse queue", self]; 99 | 100 | // create main serialization queue 101 | dispatch_queue_t reverseQueue = dispatch_queue_create([reverseQueueDescription UTF8String], NULL); 102 | 103 | if (self.showDebug) 104 | NSLog(@"%s analyzing input", __FUNCTION__); 105 | 106 | // let's reverse this many frames in each pass 107 | int numSamplesInPass = 100; // write to output in frame increments 108 | 109 | dispatch_async(reverseQueue, ^{ 110 | NSError *error = nil; 111 | 112 | // for timing if desired 113 | NSDate *methodStart; 114 | NSTimeInterval reconTime; 115 | 116 | // Initialize the reader 117 | assetReader = [[AVAssetReader alloc] initWithAsset:inputAsset error:&error]; 118 | AVAssetTrack *videoTrack = [[inputAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 119 | 120 | float fps = videoTrack.nominalFrameRate; 121 | 122 | // initialize reader output with base config if not defined before method call 123 | if (self.readerOutputSettings == nil) 124 | self.readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil]; 125 | assetReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:self.readerOutputSettings]; 126 | assetReaderOutput.supportsRandomAccess = YES; 127 | [assetReader addOutput:assetReaderOutput]; 128 | [assetReader startReading]; 129 | 130 | if (self.showDebug) 131 | NSLog(@"%s size: %f x %f", __FUNCTION__, videoTrack.naturalSize.width, videoTrack.naturalSize.height); 132 | 133 | CGFloat outputWidth = videoTrack.naturalSize.width; 134 | CGFloat outputHeight = videoTrack.naturalSize.height; 135 | 136 | // main array to hold presentation times 137 | NSMutableArray *revSampleTimes = [[NSMutableArray alloc] init]; 138 | 139 | // for timing 140 | methodStart = [NSDate date]; 141 | 142 | // now go through the reader output to get some recon on frame presentation times 143 | CMSampleBufferRef sample; 144 | int localCount = 0; 145 | while((sample = [assetReaderOutput copyNextSampleBuffer])) { 146 | CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sample); 147 | NSValue *presentationValue = [NSValue valueWithBytes:&presentationTime objCType:@encode(CMTime)]; 148 | [revSampleTimes addObject:presentationValue]; 149 | CFRelease(sample); 150 | sample = NULL; 151 | 152 | localCount++; 153 | } 154 | 155 | if (self.showDebug) { 156 | reconTime = [[NSDate date] timeIntervalSinceDate:methodStart]; 157 | NSLog(@"%s full read time: %f", __FUNCTION__, reconTime); 158 | NSLog(@"%s frames: %d; array count: %lu", __FUNCTION__, localCount, (unsigned long)[revSampleTimes count]); 159 | NSLog(@"%s duration: %lld / %d", __FUNCTION__, inputAsset.duration.value, inputAsset.duration.timescale); 160 | } 161 | 162 | // if no frames, format the error and return 163 | if (revSampleTimes.count<1) { 164 | NSString *msg = [NSString stringWithFormat:@"no video frames found in: %@", inputPath]; 165 | if (self.showDebug) NSLog(@"%s %@", __FUNCTION__, msg); 166 | [self conveyErrorWithMessage:msg]; 167 | return; 168 | } 169 | 170 | // create pass info since the reversal may be too large to be achieved in one pass 171 | 172 | // each pass is defined by a time range which we can specify each time we re-init the asset reader 173 | 174 | // array that holds the pass info 175 | NSMutableArray *passDicts = [[NSMutableArray alloc] init]; 176 | 177 | NSValue *initEventValue = [revSampleTimes objectAtIndex:0]; 178 | CMTime initEventTime = [initEventValue CMTimeValue]; 179 | 180 | CMTime passStartTime = [initEventValue CMTimeValue]; 181 | CMTime passEndTime = [initEventValue CMTimeValue]; 182 | 183 | int timeStartIndex = -1; 184 | int timeEndIndex = -1; 185 | int frameStartIndex = -1; 186 | int frameEndIndex = -1; 187 | 188 | NSValue *timeEventValue, *frameEventValue; 189 | NSValue *passStartValue, *passEndValue; 190 | CMTime timeEventTime, frameEventTime; 191 | 192 | int totalPasses = (int)ceil((float)revSampleTimes.count / (float)numSamplesInPass); 193 | 194 | BOOL initNewPass = NO; 195 | for (NSInteger i=0; i0) { 210 | passStartValue = [NSValue valueWithBytes:&passStartTime objCType:@encode(CMTime)]; 211 | passEndValue = [NSValue valueWithBytes:&passEndTime objCType:@encode(CMTime)]; 212 | NSDictionary *dict = @{ 213 | @"passStartTime": passStartValue, 214 | @"passEndTime": passEndValue, 215 | @"timeStartIndex" : [NSNumber numberWithLong:timeStartIndex], 216 | @"timeEndIndex": [NSNumber numberWithLong:timeEndIndex], 217 | @"frameStartIndex" : [NSNumber numberWithLong:frameStartIndex], 218 | @"frameEndIndex": [NSNumber numberWithLong:frameEndIndex] 219 | }; 220 | [passDicts addObject:dict]; 221 | } 222 | initNewPass = YES; 223 | } 224 | 225 | // if new pass then init the main vars 226 | if (initNewPass) { 227 | passStartTime = timeEventTime; 228 | timeStartIndex = (int)i; 229 | frameStartIndex = (int)(revSampleTimes.count - 1 - i); 230 | initNewPass = NO; 231 | } 232 | } 233 | 234 | // handle last pass 235 | if ((passDicts.count < totalPasses) || revSampleTimes.count%numSamplesInPass != 0) { 236 | passStartValue = [NSValue valueWithBytes:&passStartTime objCType:@encode(CMTime)]; 237 | passEndValue = [NSValue valueWithBytes:&passEndTime objCType:@encode(CMTime)]; 238 | NSDictionary *dict = @{ 239 | @"passStartTime": passStartValue, 240 | @"passEndTime": passEndValue, 241 | @"timeStartIndex" : [NSNumber numberWithLong:timeStartIndex], 242 | @"timeEndIndex": [NSNumber numberWithLong:timeEndIndex], 243 | @"frameStartIndex" : [NSNumber numberWithLong:frameStartIndex], 244 | @"frameEndIndex": [NSNumber numberWithLong:frameEndIndex] 245 | }; 246 | [passDicts addObject:dict]; 247 | } 248 | 249 | //// writer setup 250 | 251 | // set the desired output URL for the file created by the export process 252 | NSURL *outputURL = [NSURL fileURLWithPath:outputPath isDirectory:NO]; 253 | 254 | // initialize the writer -- NOTE: this assumes a QT output file type 255 | assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL 256 | fileType:AVFileTypeQuickTimeMovie // AVFileTypeMPEG4 257 | error:&error]; 258 | NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: 259 | AVVideoCodecH264, AVVideoCodecKey, 260 | [NSNumber numberWithInt:outputWidth], AVVideoWidthKey, 261 | [NSNumber numberWithInt:outputHeight], AVVideoHeightKey, 262 | nil]; 263 | 264 | assetWriterInput = [AVAssetWriterInput 265 | assetWriterInputWithMediaType:AVMediaTypeVideo 266 | outputSettings:writerOutputSettings]; 267 | 268 | [assetWriterInput setExpectsMediaDataInRealTime:NO]; 269 | [assetWriterInput setTransform:[videoTrack preferredTransform]]; 270 | 271 | // create the pixel buffer adaptor needed to add presentation time to output frames 272 | AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor 273 | assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterInput 274 | sourcePixelBufferAttributes:nil]; 275 | [assetWriter addInput:assetWriterInput]; 276 | 277 | [assetWriter startWriting]; 278 | [assetWriter startSessionAtSourceTime:initEventTime]; 279 | 280 | int frameCount = 0; // master frame counter 281 | int fpsInt = (int)(fps + 0.5); 282 | 283 | if (self.showDebug) 284 | NSLog(@"%s --- writing ---", __FUNCTION__); 285 | 286 | // now go through the read passes and write to output 287 | for (NSInteger z=passDicts.count-1; z>=0; z--) { 288 | NSDictionary *dict = [passDicts objectAtIndex:z]; 289 | 290 | passStartValue = dict[@"passStartTime"]; 291 | passStartTime = [passStartValue CMTimeValue]; 292 | 293 | passEndValue = dict[@"passEndTime"]; 294 | passEndTime = [passEndValue CMTimeValue]; 295 | 296 | CMTime passDuration = CMTimeSubtract(passEndTime, passStartTime); 297 | 298 | int timeStartIx = (int)[dict[@"timeStartIndex"] longValue]; 299 | int timeEndIx = (int)[dict[@"timeEndIndex"] longValue]; 300 | 301 | int frameStartIx = (int)[dict[@"frameStartIndex"] longValue]; 302 | int frameEndIx = (int)[dict[@"frameEndIndex"] longValue]; 303 | 304 | if (self.showDebug) { 305 | NSLog(@"%s pass %ld: range: %lld to %lld", __FUNCTION__, (long)z, passStartTime.value, passEndTime.value); 306 | NSLog(@"%s pass %ld: duration: %lld / %d", __FUNCTION__, (long)z, passDuration.value, passDuration.timescale); 307 | NSLog(@"%s pass %ld: time: %d to %d", __FUNCTION__, (long)z, timeStartIx, timeEndIx); 308 | NSLog(@"%s pass %ld: frame: %d to %d", __FUNCTION__, (long)z, frameStartIx, frameEndIx); 309 | } 310 | 311 | CMTimeRange localRange = CMTimeRangeMake(passStartTime,passDuration); 312 | NSValue *localRangeValue = [NSValue valueWithBytes:&localRange objCType:@encode(CMTimeRange)]; 313 | NSMutableArray *localRanges = [[NSMutableArray alloc] init]; 314 | [localRanges addObject:localRangeValue]; 315 | 316 | // make sure we have no remaining samples from last time range 317 | while((sample = [assetReaderOutput copyNextSampleBuffer])) { 318 | CFRelease(sample); 319 | } 320 | 321 | // reset the reader to the range of the pass 322 | [assetReaderOutput resetForReadingTimeRanges:localRanges]; 323 | if (self.showDebug) 324 | NSLog(@"%s pass %ld: set time range", __FUNCTION__, (long)z); 325 | 326 | // read in the samples of the pass 327 | NSMutableArray *samples = [[NSMutableArray alloc] init]; 328 | while((sample = [assetReaderOutput copyNextSampleBuffer])) { 329 | [samples addObject:(__bridge id)sample]; 330 | CFRelease(sample); 331 | } 332 | 333 | // append samples to output using the recorded frame times 334 | for (NSInteger i=0; i= revSampleTimes.count) { 337 | NSLog(@"%s pass %ld: more samples than recorded frames! %d >= %lu ", __FUNCTION__, (long)z, frameCount, (unsigned long)revSampleTimes.count); 338 | break; 339 | } 340 | 341 | // get the orig presentation time (from start to end) 342 | NSValue *eventValue = [revSampleTimes objectAtIndex:frameCount]; 343 | CMTime eventTime = [eventValue CMTimeValue]; 344 | 345 | // take the image/pixel buffer from tail end of the array 346 | CVPixelBufferRef imageBufferRef = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]); 347 | 348 | // append frames to output 349 | BOOL append_ok = NO; 350 | int j = 0; 351 | while (!append_ok && j < fpsInt) { 352 | 353 | if (adaptor.assetWriterInput.readyForMoreMediaData) { 354 | append_ok = [adaptor appendPixelBuffer:imageBufferRef withPresentationTime:eventTime]; 355 | if (!append_ok) 356 | NSLog(@"%s Problem appending frame at time: %lld", __FUNCTION__, eventTime.value); 357 | } 358 | else { 359 | // adaptor not ready 360 | [NSThread sleepForTimeInterval:0.05]; 361 | } 362 | 363 | j++; 364 | } 365 | 366 | if (!append_ok) 367 | NSLog(@"%s error appending frame %d; times %d", __FUNCTION__, frameCount, j); 368 | 369 | frameCount++; 370 | } 371 | 372 | // release the samples array for this pass 373 | samples = nil; 374 | } 375 | 376 | // tell asset writer to finish 377 | [assetWriterInput markAsFinished]; 378 | 379 | [assetWriter finishWritingWithCompletionHandler:^(){ 380 | if (self.showDebug) 381 | NSLog(@"%s finished writing", __FUNCTION__); 382 | 383 | // display the total execution time 384 | NSDate *methodFinish = [NSDate date]; 385 | NSTimeInterval procTime = [methodFinish timeIntervalSinceDate:methodStart]; 386 | 387 | if (self.showDebug) 388 | NSLog(@"%s reversed %d frames in %f sec", __FUNCTION__, frameCount, procTime); 389 | 390 | [self conveySuccess]; 391 | }]; 392 | }); 393 | } 394 | 395 | @end 396 | --------------------------------------------------------------------------------