├── LivePhotos ├── LivePhotos.xcodeproj │ ├── project.pbxproj │ ├── project.xcworkspace │ │ ├── contents.xcworkspacedata │ │ ├── xcshareddata │ │ │ └── IDEWorkspaceChecks.plist │ │ └── xcuserdata │ │ │ └── bytedance.xcuserdatad │ │ │ └── UserInterfaceState.xcuserstate │ └── xcuserdata │ │ └── bytedance.xcuserdatad │ │ ├── xcdebugger │ │ └── Breakpoints_v2.xcbkptlist │ │ └── xcschemes │ │ └── xcschememanagement.plist └── LivePhotos │ ├── AppDelegate.swift │ ├── Assets.xcassets │ ├── AccentColor.colorset │ │ └── Contents.json │ ├── AppIcon.appiconset │ │ └── Contents.json │ └── Contents.json │ ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard │ ├── Info.plist │ ├── LivePhotos │ ├── AVAsset+Extension.swift │ └── LivePhotos.swift │ ├── SceneDelegate.swift │ ├── Tools │ └── Toast.swift │ └── ViewController │ ├── LivePhotosViewController+Asemble.swift │ ├── LivePhotosViewController+Disassemble.swift │ └── LivePhotosViewController.swift └── README.md /LivePhotos/LivePhotos.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 56; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 2B9E9F0329D5FC2B00964111 /* LivePhotos.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2B9E9F0229D5FC2B00964111 /* LivePhotos.swift */; }; 11 | 2B9E9F0529D752CE00964111 /* LivePhotosViewController+Disassemble.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2B9E9F0429D752CE00964111 /* LivePhotosViewController+Disassemble.swift */; }; 12 | 2B9E9F0729D752ED00964111 /* LivePhotosViewController+Asemble.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2B9E9F0629D752ED00964111 /* LivePhotosViewController+Asemble.swift */; }; 13 | 2BDDE72229D366EB00B70661 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2BDDE72129D366EB00B70661 /* AppDelegate.swift */; }; 14 | 2BDDE72429D366EB00B70661 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2BDDE72329D366EB00B70661 /* SceneDelegate.swift */; }; 15 | 2BDDE72629D366EB00B70661 /* LivePhotosViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2BDDE72529D366EB00B70661 /* LivePhotosViewController.swift */; }; 16 | 2BDDE72929D366EB00B70661 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 2BDDE72729D366EB00B70661 /* Main.storyboard */; }; 17 | 2BDDE72B29D366EC00B70661 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 2BDDE72A29D366EC00B70661 /* Assets.xcassets */; }; 18 | 2BDDE72E29D366EC00B70661 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 2BDDE72C29D366EC00B70661 /* LaunchScreen.storyboard */; }; 19 | 2BE2BEA629E2BC8E00539863 /* AVAsset+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2BE2BEA529E2BC8E00539863 /* AVAsset+Extension.swift */; }; 20 | 2BF9EFCB29DF3B2D00CF447F /* Toast.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2BF9EFCA29DF3B2D00CF447F /* Toast.swift */; }; 21 | /* End PBXBuildFile section */ 22 | 23 | /* Begin PBXFileReference section */ 24 | 2B9E9F0229D5FC2B00964111 /* LivePhotos.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivePhotos.swift; sourceTree = ""; }; 25 | 2B9E9F0429D752CE00964111 /* LivePhotosViewController+Disassemble.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "LivePhotosViewController+Disassemble.swift"; sourceTree = ""; }; 26 | 2B9E9F0629D752ED00964111 /* LivePhotosViewController+Asemble.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "LivePhotosViewController+Asemble.swift"; sourceTree = ""; }; 27 | 2BDDE71E29D366EB00B70661 /* LivePhotos.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = LivePhotos.app; sourceTree = BUILT_PRODUCTS_DIR; }; 28 | 2BDDE72129D366EB00B70661 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 29 | 2BDDE72329D366EB00B70661 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; 30 | 2BDDE72529D366EB00B70661 /* LivePhotosViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivePhotosViewController.swift; sourceTree = ""; }; 31 | 2BDDE72829D366EB00B70661 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 32 | 2BDDE72A29D366EC00B70661 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 33 | 2BDDE72D29D366EC00B70661 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 34 | 2BDDE72F29D366EC00B70661 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 35 | 2BE2BEA529E2BC8E00539863 /* AVAsset+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAsset+Extension.swift"; sourceTree = ""; }; 36 | 2BF9EFCA29DF3B2D00CF447F /* Toast.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Toast.swift; sourceTree = ""; }; 37 | /* End PBXFileReference section */ 38 | 39 | /* Begin PBXFrameworksBuildPhase section */ 40 | 2BDDE71B29D366EB00B70661 /* Frameworks */ = { 41 | isa = PBXFrameworksBuildPhase; 42 | buildActionMask = 2147483647; 43 | files = ( 44 | ); 45 | runOnlyForDeploymentPostprocessing = 0; 46 | }; 47 | /* End PBXFrameworksBuildPhase section */ 48 | 49 | /* Begin PBXGroup section */ 50 | 2B9E9F0829D752F400964111 /* ViewController */ = { 51 | isa = PBXGroup; 52 | children = ( 53 | 2B9E9F0429D752CE00964111 /* LivePhotosViewController+Disassemble.swift */, 54 | 2B9E9F0629D752ED00964111 /* LivePhotosViewController+Asemble.swift */, 55 | 2BDDE72529D366EB00B70661 /* LivePhotosViewController.swift */, 56 | ); 57 | path = ViewController; 58 | sourceTree = ""; 59 | }; 60 | 2B9E9F0929D7531200964111 /* LivePhotos */ = { 61 | isa = PBXGroup; 62 | children = ( 63 | 2B9E9F0229D5FC2B00964111 /* LivePhotos.swift */, 64 | 2BE2BEA529E2BC8E00539863 /* AVAsset+Extension.swift */, 65 | ); 66 | path = LivePhotos; 67 | sourceTree = ""; 68 | }; 69 | 2BDDE71529D366EB00B70661 = { 70 | isa = PBXGroup; 71 | children = ( 72 | 2BDDE72029D366EB00B70661 /* LivePhotos */, 73 | 2BDDE71F29D366EB00B70661 /* Products */, 74 | ); 75 | sourceTree = ""; 76 | }; 77 | 2BDDE71F29D366EB00B70661 /* Products */ = { 78 | isa = PBXGroup; 79 | children = ( 80 | 2BDDE71E29D366EB00B70661 /* LivePhotos.app */, 81 | ); 82 | name = Products; 83 | sourceTree = ""; 84 | }; 85 | 2BDDE72029D366EB00B70661 /* LivePhotos */ = { 86 | isa = PBXGroup; 87 | children = ( 88 | 2BDDE72F29D366EC00B70661 /* Info.plist */, 89 | 2BDDE72129D366EB00B70661 /* AppDelegate.swift */, 90 | 2BDDE72329D366EB00B70661 /* SceneDelegate.swift */, 91 | 2BDDE72A29D366EC00B70661 /* Assets.xcassets */, 92 | 2BDDE72C29D366EC00B70661 /* LaunchScreen.storyboard */, 93 | 2BDDE72729D366EB00B70661 /* Main.storyboard */, 94 | 2B9E9F0929D7531200964111 /* LivePhotos */, 95 | 2B9E9F0829D752F400964111 /* ViewController */, 96 | 2BF9EFCC29DF451600CF447F /* Tools */, 97 | ); 98 | path = LivePhotos; 99 | sourceTree = ""; 100 | }; 101 | 2BF9EFCC29DF451600CF447F /* Tools */ = { 102 | isa = PBXGroup; 103 | children = ( 104 | 2BF9EFCA29DF3B2D00CF447F /* Toast.swift */, 105 | ); 106 | path = Tools; 107 | sourceTree = ""; 108 | }; 109 | /* End PBXGroup section */ 110 | 111 | /* Begin PBXNativeTarget section */ 112 | 2BDDE71D29D366EB00B70661 /* LivePhotos */ = { 113 | isa = PBXNativeTarget; 114 | buildConfigurationList = 2BDDE73229D366EC00B70661 /* Build configuration list for PBXNativeTarget "LivePhotos" */; 115 | buildPhases = ( 116 | 2BDDE71A29D366EB00B70661 /* Sources */, 117 | 2BDDE71B29D366EB00B70661 /* Frameworks */, 118 | 2BDDE71C29D366EB00B70661 /* Resources */, 119 | ); 120 | buildRules = ( 121 | ); 122 | dependencies = ( 123 | ); 124 | name = LivePhotos; 125 | productName = LivePhotos; 126 | productReference = 2BDDE71E29D366EB00B70661 /* LivePhotos.app */; 127 | productType = "com.apple.product-type.application"; 128 | }; 129 | /* End PBXNativeTarget section */ 130 | 131 | /* Begin PBXProject section */ 132 | 2BDDE71629D366EB00B70661 /* Project object */ = { 133 | isa = PBXProject; 134 | attributes = { 135 | BuildIndependentTargetsInParallel = 1; 136 | LastSwiftUpdateCheck = 1410; 137 | LastUpgradeCheck = 1410; 138 | TargetAttributes = { 139 | 2BDDE71D29D366EB00B70661 = { 140 | CreatedOnToolsVersion = 14.1; 141 | }; 142 | }; 143 | }; 144 | buildConfigurationList = 2BDDE71929D366EB00B70661 /* Build configuration list for PBXProject "LivePhotos" */; 145 | compatibilityVersion = "Xcode 14.0"; 146 | developmentRegion = en; 147 | hasScannedForEncodings = 0; 148 | knownRegions = ( 149 | en, 150 | Base, 151 | ); 152 | mainGroup = 2BDDE71529D366EB00B70661; 153 | productRefGroup = 2BDDE71F29D366EB00B70661 /* Products */; 154 | projectDirPath = ""; 155 | projectRoot = ""; 156 | targets = ( 157 | 2BDDE71D29D366EB00B70661 /* LivePhotos */, 158 | ); 159 | }; 160 | /* End PBXProject section */ 161 | 162 | /* Begin PBXResourcesBuildPhase section */ 163 | 2BDDE71C29D366EB00B70661 /* Resources */ = { 164 | isa = PBXResourcesBuildPhase; 165 | buildActionMask = 2147483647; 166 | files = ( 167 | 2BDDE72E29D366EC00B70661 /* LaunchScreen.storyboard in Resources */, 168 | 2BDDE72B29D366EC00B70661 /* Assets.xcassets in Resources */, 169 | 2BDDE72929D366EB00B70661 /* Main.storyboard in Resources */, 170 | ); 171 | runOnlyForDeploymentPostprocessing = 0; 172 | }; 173 | /* End PBXResourcesBuildPhase section */ 174 | 175 | /* Begin PBXSourcesBuildPhase section */ 176 | 2BDDE71A29D366EB00B70661 /* Sources */ = { 177 | isa = PBXSourcesBuildPhase; 178 | buildActionMask = 2147483647; 179 | files = ( 180 | 2B9E9F0329D5FC2B00964111 /* LivePhotos.swift in Sources */, 181 | 2BE2BEA629E2BC8E00539863 /* AVAsset+Extension.swift in Sources */, 182 | 2BDDE72629D366EB00B70661 /* LivePhotosViewController.swift in Sources */, 183 | 2B9E9F0529D752CE00964111 /* LivePhotosViewController+Disassemble.swift in Sources */, 184 | 2B9E9F0729D752ED00964111 /* LivePhotosViewController+Asemble.swift in Sources */, 185 | 2BDDE72229D366EB00B70661 /* AppDelegate.swift in Sources */, 186 | 2BDDE72429D366EB00B70661 /* SceneDelegate.swift in Sources */, 187 | 2BF9EFCB29DF3B2D00CF447F /* Toast.swift in Sources */, 188 | ); 189 | runOnlyForDeploymentPostprocessing = 0; 190 | }; 191 | /* End PBXSourcesBuildPhase section */ 192 | 193 | /* Begin PBXVariantGroup section */ 194 | 2BDDE72729D366EB00B70661 /* Main.storyboard */ = { 195 | isa = PBXVariantGroup; 196 | children = ( 197 | 2BDDE72829D366EB00B70661 /* Base */, 198 | ); 199 | name = Main.storyboard; 200 | sourceTree = ""; 201 | }; 202 | 2BDDE72C29D366EC00B70661 /* LaunchScreen.storyboard */ = { 203 | isa = PBXVariantGroup; 204 | children = ( 205 | 2BDDE72D29D366EC00B70661 /* Base */, 206 | ); 207 | name = LaunchScreen.storyboard; 208 | sourceTree = ""; 209 | }; 210 | /* End PBXVariantGroup section */ 211 | 212 | /* Begin XCBuildConfiguration section */ 213 | 2BDDE73029D366EC00B70661 /* Debug */ = { 214 | isa = XCBuildConfiguration; 215 | buildSettings = { 216 | ALWAYS_SEARCH_USER_PATHS = NO; 217 | CLANG_ANALYZER_NONNULL = YES; 218 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 219 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 220 | CLANG_ENABLE_MODULES = YES; 221 | CLANG_ENABLE_OBJC_ARC = YES; 222 | CLANG_ENABLE_OBJC_WEAK = YES; 223 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 224 | CLANG_WARN_BOOL_CONVERSION = YES; 225 | CLANG_WARN_COMMA = YES; 226 | CLANG_WARN_CONSTANT_CONVERSION = YES; 227 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 228 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 229 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 230 | CLANG_WARN_EMPTY_BODY = YES; 231 | CLANG_WARN_ENUM_CONVERSION = YES; 232 | CLANG_WARN_INFINITE_RECURSION = YES; 233 | CLANG_WARN_INT_CONVERSION = YES; 234 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 235 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 236 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 237 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 238 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 239 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 240 | CLANG_WARN_STRICT_PROTOTYPES = YES; 241 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 242 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 243 | CLANG_WARN_UNREACHABLE_CODE = YES; 244 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 245 | COPY_PHASE_STRIP = NO; 246 | DEBUG_INFORMATION_FORMAT = dwarf; 247 | ENABLE_STRICT_OBJC_MSGSEND = YES; 248 | ENABLE_TESTABILITY = YES; 249 | GCC_C_LANGUAGE_STANDARD = gnu11; 250 | GCC_DYNAMIC_NO_PIC = NO; 251 | GCC_NO_COMMON_BLOCKS = YES; 252 | GCC_OPTIMIZATION_LEVEL = 0; 253 | GCC_PREPROCESSOR_DEFINITIONS = ( 254 | "DEBUG=1", 255 | "$(inherited)", 256 | ); 257 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 258 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 259 | GCC_WARN_UNDECLARED_SELECTOR = YES; 260 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 261 | GCC_WARN_UNUSED_FUNCTION = YES; 262 | GCC_WARN_UNUSED_VARIABLE = YES; 263 | IPHONEOS_DEPLOYMENT_TARGET = 16.1; 264 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 265 | MTL_FAST_MATH = YES; 266 | ONLY_ACTIVE_ARCH = YES; 267 | SDKROOT = iphoneos; 268 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 269 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 270 | }; 271 | name = Debug; 272 | }; 273 | 2BDDE73129D366EC00B70661 /* Release */ = { 274 | isa = XCBuildConfiguration; 275 | buildSettings = { 276 | ALWAYS_SEARCH_USER_PATHS = NO; 277 | CLANG_ANALYZER_NONNULL = YES; 278 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 279 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 280 | CLANG_ENABLE_MODULES = YES; 281 | CLANG_ENABLE_OBJC_ARC = YES; 282 | CLANG_ENABLE_OBJC_WEAK = YES; 283 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 284 | CLANG_WARN_BOOL_CONVERSION = YES; 285 | CLANG_WARN_COMMA = YES; 286 | CLANG_WARN_CONSTANT_CONVERSION = YES; 287 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 288 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 289 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 290 | CLANG_WARN_EMPTY_BODY = YES; 291 | CLANG_WARN_ENUM_CONVERSION = YES; 292 | CLANG_WARN_INFINITE_RECURSION = YES; 293 | CLANG_WARN_INT_CONVERSION = YES; 294 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 295 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 296 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 297 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 298 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 299 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 300 | CLANG_WARN_STRICT_PROTOTYPES = YES; 301 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 302 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 303 | CLANG_WARN_UNREACHABLE_CODE = YES; 304 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 305 | COPY_PHASE_STRIP = NO; 306 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 307 | ENABLE_NS_ASSERTIONS = NO; 308 | ENABLE_STRICT_OBJC_MSGSEND = YES; 309 | GCC_C_LANGUAGE_STANDARD = gnu11; 310 | GCC_NO_COMMON_BLOCKS = YES; 311 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 312 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 313 | GCC_WARN_UNDECLARED_SELECTOR = YES; 314 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 315 | GCC_WARN_UNUSED_FUNCTION = YES; 316 | GCC_WARN_UNUSED_VARIABLE = YES; 317 | IPHONEOS_DEPLOYMENT_TARGET = 16.1; 318 | MTL_ENABLE_DEBUG_INFO = NO; 319 | MTL_FAST_MATH = YES; 320 | SDKROOT = iphoneos; 321 | SWIFT_COMPILATION_MODE = wholemodule; 322 | SWIFT_OPTIMIZATION_LEVEL = "-O"; 323 | VALIDATE_PRODUCT = YES; 324 | }; 325 | name = Release; 326 | }; 327 | 2BDDE73329D366EC00B70661 /* Debug */ = { 328 | isa = XCBuildConfiguration; 329 | buildSettings = { 330 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 331 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 332 | CODE_SIGN_STYLE = Automatic; 333 | CURRENT_PROJECT_VERSION = 1; 334 | DEVELOPMENT_TEAM = Q2T8TN4ZW6; 335 | GENERATE_INFOPLIST_FILE = YES; 336 | INFOPLIST_FILE = LivePhotos/Info.plist; 337 | INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = QwQ; 338 | INFOPLIST_KEY_NSPhotoLibraryUsageDescription = QwQ; 339 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; 340 | INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; 341 | INFOPLIST_KEY_UIMainStoryboardFile = Main; 342 | INFOPLIST_KEY_UISupportedInterfaceOrientations = UIInterfaceOrientationPortrait; 343 | LD_RUNPATH_SEARCH_PATHS = ( 344 | "$(inherited)", 345 | "@executable_path/Frameworks", 346 | ); 347 | MARKETING_VERSION = 1.0; 348 | PRODUCT_BUNDLE_IDENTIFIER = layer.practice.LivePhotos; 349 | PRODUCT_NAME = "$(TARGET_NAME)"; 350 | SWIFT_EMIT_LOC_STRINGS = YES; 351 | SWIFT_VERSION = 5.0; 352 | TARGETED_DEVICE_FAMILY = "1,2"; 353 | }; 354 | name = Debug; 355 | }; 356 | 2BDDE73429D366EC00B70661 /* Release */ = { 357 | isa = XCBuildConfiguration; 358 | buildSettings = { 359 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 360 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 361 | CODE_SIGN_STYLE = Automatic; 362 | CURRENT_PROJECT_VERSION = 1; 363 | DEVELOPMENT_TEAM = Q2T8TN4ZW6; 364 | GENERATE_INFOPLIST_FILE = YES; 365 | INFOPLIST_FILE = LivePhotos/Info.plist; 366 | INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = QwQ; 367 | INFOPLIST_KEY_NSPhotoLibraryUsageDescription = QwQ; 368 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; 369 | INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; 370 | INFOPLIST_KEY_UIMainStoryboardFile = Main; 371 | INFOPLIST_KEY_UISupportedInterfaceOrientations = UIInterfaceOrientationPortrait; 372 | LD_RUNPATH_SEARCH_PATHS = ( 373 | "$(inherited)", 374 | "@executable_path/Frameworks", 375 | ); 376 | MARKETING_VERSION = 1.0; 377 | PRODUCT_BUNDLE_IDENTIFIER = layer.practice.LivePhotos; 378 | PRODUCT_NAME = "$(TARGET_NAME)"; 379 | SWIFT_EMIT_LOC_STRINGS = YES; 380 | SWIFT_VERSION = 5.0; 381 | TARGETED_DEVICE_FAMILY = "1,2"; 382 | }; 383 | name = Release; 384 | }; 385 | /* End XCBuildConfiguration section */ 386 | 387 | /* Begin XCConfigurationList section */ 388 | 2BDDE71929D366EB00B70661 /* Build configuration list for PBXProject "LivePhotos" */ = { 389 | isa = XCConfigurationList; 390 | buildConfigurations = ( 391 | 2BDDE73029D366EC00B70661 /* Debug */, 392 | 2BDDE73129D366EC00B70661 /* Release */, 393 | ); 394 | defaultConfigurationIsVisible = 0; 395 | defaultConfigurationName = Release; 396 | }; 397 | 2BDDE73229D366EC00B70661 /* Build configuration list for PBXNativeTarget "LivePhotos" */ = { 398 | isa = XCConfigurationList; 399 | buildConfigurations = ( 400 | 2BDDE73329D366EC00B70661 /* Debug */, 401 | 2BDDE73429D366EC00B70661 /* Release */, 402 | ); 403 | defaultConfigurationIsVisible = 0; 404 | defaultConfigurationName = Release; 405 | }; 406 | /* End XCConfigurationList section */ 407 | }; 408 | rootObject = 2BDDE71629D366EB00B70661 /* Project object */; 409 | } 410 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos.xcodeproj/project.xcworkspace/xcuserdata/bytedance.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LLLLLayer/Live-Photos/5998174e393b2b55673c6f9dcdd6b1eb461ae5ab/LivePhotos/LivePhotos.xcodeproj/project.xcworkspace/xcuserdata/bytedance.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /LivePhotos/LivePhotos.xcodeproj/xcuserdata/bytedance.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 9 | 21 | 22 | 36 | 37 | 51 | 52 | 53 | 54 | 55 | 57 | 69 | 70 | 84 | 85 | 99 | 100 | 114 | 115 | 129 | 130 | 131 | 132 | 133 | 135 | 147 | 148 | 149 | 150 | 151 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos.xcodeproj/xcuserdata/bytedance.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | LivePhotos.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // LivePhotos 4 | // 5 | // Created by yangjie.layer on 2023/3/29. 6 | // 7 | 8 | import UIKit 9 | 10 | @main 11 | class AppDelegate: UIResponder, UIApplicationDelegate { 12 | 13 | 14 | 15 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 16 | // Override point for customization after application launch. 17 | return true 18 | } 19 | 20 | // MARK: UISceneSession Lifecycle 21 | 22 | func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration { 23 | // Called when a new scene session is being created. 24 | // Use this method to select a configuration to create the new scene with. 25 | return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role) 26 | } 27 | 28 | func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set) { 29 | // Called when the user discards a scene session. 30 | // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions. 31 | // Use this method to release any resources that were specific to the discarded scenes, as they will not return. 32 | } 33 | 34 | 35 | } 36 | 37 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "platform" : "ios", 6 | "size" : "1024x1024" 7 | } 8 | ], 9 | "info" : { 10 | "author" : "xcode", 11 | "version" : 1 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | UIApplicationSceneManifest 6 | 7 | UIApplicationSupportsMultipleScenes 8 | 9 | UISceneConfigurations 10 | 11 | UIWindowSceneSessionRoleApplication 12 | 13 | 14 | UISceneConfigurationName 15 | Default Configuration 16 | UISceneDelegateClassName 17 | $(PRODUCT_MODULE_NAME).SceneDelegate 18 | UISceneStoryboardFile 19 | Main 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/LivePhotos/AVAsset+Extension.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AVAsset+Extension.swift 3 | // LivePhotos 4 | // 5 | // Created by yangjie.layer on 2023/4/9. 6 | // 7 | 8 | import UIKit 9 | import AVFoundation 10 | 11 | extension AVAsset { 12 | func frameCount(exact: Bool = false) async throws -> Int { 13 | let videoReader = try AVAssetReader(asset: self) 14 | guard let videoTrack = try await self.loadTracks(withMediaType: .video).first else { return 0 } 15 | if !exact { 16 | async let duration = CMTimeGetSeconds(self.load(.duration)) 17 | async let nominalFrameRate = Float64(videoTrack.load(.nominalFrameRate)) 18 | return try await Int(duration * nominalFrameRate) 19 | } 20 | let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil) 21 | videoReader.add(videoReaderOutput) 22 | videoReader.startReading() 23 | var frameCount = 0 24 | while let _ = videoReaderOutput.copyNextSampleBuffer() { 25 | frameCount += 1 26 | } 27 | videoReader.cancelReading() 28 | return frameCount 29 | } 30 | 31 | func makeStillImageTimeRange(percent: Float, inFrameCount: Int = 0) async throws -> CMTimeRange { 32 | var time = try await self.load(.duration) 33 | var frameCount = inFrameCount 34 | if frameCount == 0 { 35 | frameCount = try await self.frameCount(exact: true) 36 | } 37 | let duration = Int64(Float(time.value) / Float(frameCount)) 38 | time.value = Int64(Float(time.value) * percent) 39 | return CMTimeRangeMake(start: time, duration: CMTimeMake(value: duration, timescale: time.timescale)) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/LivePhotos/LivePhotos.swift: -------------------------------------------------------------------------------- 1 | // 2 | // LivePhotos.swift 3 | // LivePhotos 4 | // 5 | // Created by yangjie.layer on 2023/3/31. 6 | // 7 | 8 | import UIKit 9 | import Photos 10 | import AVFoundation 11 | import MobileCoreServices 12 | 13 | enum LivePhotosError: Error { 14 | case noCachesDirectory 15 | } 16 | 17 | enum LivePhotosDisassembleError: Error { 18 | case requestDataFailed 19 | case noFilenameExtension 20 | } 21 | 22 | enum LivePhotosAssembleError: Error { 23 | case addPhotoIdentifierFailed 24 | case createDestinationImageFailed 25 | case writingVideoFailed 26 | case writingAudioFailed 27 | case requestFailed 28 | case loadTracksFailed 29 | } 30 | 31 | actor LivePhotos { 32 | static let sharedInstance = LivePhotos() 33 | } 34 | 35 | // MARK: - disassemble 36 | 37 | extension LivePhotos { 38 | 39 | func disassemble(livePhoto: PHLivePhoto) async throws -> (URL, URL) { 40 | let assetResources = PHAssetResource.assetResources(for: livePhoto) 41 | let list = try await withThrowingTaskGroup(of: (PHAssetResource, Data).self) { taskGroup in 42 | for assetResource in assetResources { 43 | taskGroup.addTask { 44 | return try await withCheckedThrowingContinuation { continuation in 45 | let dataBuffer = NSMutableData() 46 | let options = PHAssetResourceRequestOptions() 47 | options.isNetworkAccessAllowed = true 48 | PHAssetResourceManager.default().requestData(for: assetResource, options: options) { data in 49 | dataBuffer.append(data) 50 | } completionHandler: { error in 51 | guard error == nil else { 52 | continuation.resume(throwing: LivePhotosDisassembleError.requestDataFailed) 53 | return 54 | } 55 | continuation.resume(returning: (assetResource, dataBuffer as Data)) 56 | } 57 | } 58 | } 59 | } 60 | var results: [(PHAssetResource, Data)] = [] 61 | for try await result in taskGroup { 62 | results.append(result) 63 | } 64 | return results 65 | } 66 | guard let photo = (list.first { $0.0.type == .photo }), 67 | let video = (list.first { $0.0.type == .pairedVideo }) else { 68 | throw LivePhotosDisassembleError.requestDataFailed 69 | } 70 | let cachesDirectory = try cachesDirectory() 71 | let photoURL = try save(photo.0, data: photo.1, to: cachesDirectory) 72 | let videoURL = try save(video.0, data: video.1, to: cachesDirectory) 73 | return (photoURL, videoURL) 74 | } 75 | 76 | private func save(_ assetResource: PHAssetResource, data: Data, to url: URL) throws -> URL { 77 | guard let ext = UTType(assetResource.uniformTypeIdentifier)?.preferredFilenameExtension else { 78 | throw LivePhotosDisassembleError.noFilenameExtension 79 | } 80 | let destinationURL = url.appendingPathComponent(NSUUID().uuidString).appendingPathExtension(ext as String) 81 | try data.write(to: destinationURL, options: [Data.WritingOptions.atomic]) 82 | return destinationURL 83 | } 84 | } 85 | 86 | // MARK: - Assemble 87 | 88 | extension LivePhotos { 89 | 90 | func assemble(photoURL: URL, videoURL: URL, progress: ((Float) -> Void)? = nil) async throws -> (PHLivePhoto, (URL, URL)) { 91 | let cacheDirectory = try cachesDirectory() 92 | let identifier = UUID().uuidString 93 | let pairedPhotoURL = try addIdentifier( 94 | identifier, 95 | fromPhotoURL: photoURL, 96 | to: cacheDirectory.appendingPathComponent(identifier).appendingPathExtension("jpg")) 97 | let pairedVideoURL = try await addIdentifier( 98 | identifier, 99 | fromVideoURL: videoURL, 100 | to: cacheDirectory.appendingPathComponent(identifier).appendingPathExtension("mov"), 101 | progress: progress) 102 | 103 | let livePhoto = try await withCheckedThrowingContinuation({ continuation in 104 | PHLivePhoto.request( 105 | withResourceFileURLs: [pairedPhotoURL, pairedVideoURL], 106 | placeholderImage: nil, 107 | targetSize: .zero, 108 | contentMode: .aspectFill) { livePhoto, info in 109 | if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded { 110 | return 111 | } 112 | if let livePhoto { 113 | continuation.resume(returning: livePhoto) 114 | } else { 115 | continuation.resume(throwing: LivePhotosAssembleError.requestFailed) 116 | } 117 | } 118 | }) 119 | return (livePhoto, (pairedPhotoURL, pairedVideoURL)) 120 | } 121 | 122 | private func addIdentifier(_ identifier: String, fromPhotoURL photoURL: URL, to destinationURL: URL) throws -> URL { 123 | guard let imageSource = CGImageSourceCreateWithURL(photoURL as CFURL, nil), 124 | let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil), 125 | var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable : Any] else { 126 | throw LivePhotosAssembleError.addPhotoIdentifierFailed 127 | } 128 | let identifierInfo = ["17" : identifier] 129 | imageProperties[kCGImagePropertyMakerAppleDictionary] = identifierInfo 130 | guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, UTType.jpeg.identifier as CFString, 1, nil) else { 131 | throw LivePhotosAssembleError.createDestinationImageFailed 132 | } 133 | CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary) 134 | if CGImageDestinationFinalize(imageDestination) { 135 | return destinationURL 136 | } else { 137 | throw LivePhotosAssembleError.createDestinationImageFailed 138 | } 139 | } 140 | 141 | private func addIdentifier( 142 | _ identifier: String, 143 | fromVideoURL videoURL: URL, 144 | to destinationURL: URL, 145 | progress: ((Float) -> Void)? = nil 146 | ) async throws -> URL { 147 | 148 | let asset = AVURLAsset(url: videoURL) 149 | // --- Reader --- 150 | 151 | // Create the video reader 152 | let videoReader = try AVAssetReader(asset: asset) 153 | 154 | // Create the video reader output 155 | guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else { throw LivePhotosAssembleError.loadTracksFailed } 156 | let videoReaderOutputSettings : [String : Any] = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA] 157 | let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderOutputSettings) 158 | 159 | // Add the video reader output to video reader 160 | videoReader.add(videoReaderOutput) 161 | 162 | // Create the audio reader 163 | let audioReader = try AVAssetReader(asset: asset) 164 | 165 | // Create the audio reader output 166 | guard let audioTrack = try await asset.loadTracks(withMediaType: .audio).first else { throw LivePhotosAssembleError.loadTracksFailed } 167 | let audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) 168 | 169 | // Add the audio reader output to audioReader 170 | audioReader.add(audioReaderOutput) 171 | 172 | // --- Writer --- 173 | 174 | // Create the asset writer 175 | let assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov) 176 | 177 | // Create the video writer input 178 | let videoWriterInputOutputSettings : [String : Any] = [ 179 | AVVideoCodecKey : AVVideoCodecType.h264, 180 | AVVideoWidthKey : try await videoTrack.load(.naturalSize).width, 181 | AVVideoHeightKey : try await videoTrack.load(.naturalSize).height] 182 | let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoWriterInputOutputSettings) 183 | videoWriterInput.transform = try await videoTrack.load(.preferredTransform) 184 | videoWriterInput.expectsMediaDataInRealTime = true 185 | 186 | // Add the video writer input to asset writer 187 | assetWriter.add(videoWriterInput) 188 | 189 | // Create the audio writer input 190 | let audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil) 191 | audioWriterInput.expectsMediaDataInRealTime = false 192 | 193 | // Add the audio writer input to asset writer 194 | assetWriter.add(audioWriterInput) 195 | 196 | // Create the identifier metadata 197 | let identifierMetadata = metadataItem(for: identifier) 198 | // Create still image time metadata track 199 | let stillImageTimeMetadataAdaptor = stillImageTimeMetadataAdaptor() 200 | assetWriter.metadata = [identifierMetadata] 201 | assetWriter.add(stillImageTimeMetadataAdaptor.assetWriterInput) 202 | 203 | // Start the asset writer 204 | assetWriter.startWriting() 205 | assetWriter.startSession(atSourceTime: .zero) 206 | 207 | // Add still image metadata 208 | let frameCount = try await asset.frameCount() 209 | let stillImagePercent: Float = 0.5 210 | await stillImageTimeMetadataAdaptor.append( 211 | AVTimedMetadataGroup( 212 | items: [stillImageTimeMetadataItem()], 213 | timeRange: try asset.makeStillImageTimeRange(percent: stillImagePercent, inFrameCount: frameCount))) 214 | 215 | async let writingVideoFinished: Bool = withCheckedThrowingContinuation { continuation in 216 | Task { 217 | videoReader.startReading() 218 | var currentFrameCount = 0 219 | videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) { 220 | while videoWriterInput.isReadyForMoreMediaData { 221 | if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() { 222 | currentFrameCount += 1 223 | if let progress { 224 | let progressValue = min(Float(currentFrameCount)/Float(frameCount), 1.0) 225 | Task { @MainActor in 226 | progress(progressValue) 227 | } 228 | } 229 | if !videoWriterInput.append(sampleBuffer) { 230 | videoReader.cancelReading() 231 | continuation.resume(throwing: LivePhotosAssembleError.writingVideoFailed) 232 | return 233 | } 234 | } else { 235 | videoWriterInput.markAsFinished() 236 | continuation.resume(returning: true) 237 | return 238 | } 239 | } 240 | } 241 | } 242 | } 243 | 244 | async let writingAudioFinished: Bool = withCheckedThrowingContinuation { continuation in 245 | Task { 246 | audioReader.startReading() 247 | audioWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) { 248 | while audioWriterInput.isReadyForMoreMediaData { 249 | if let sampleBuffer = audioReaderOutput.copyNextSampleBuffer() { 250 | if !audioWriterInput.append(sampleBuffer) { 251 | audioReader.cancelReading() 252 | continuation.resume(throwing: LivePhotosAssembleError.writingAudioFailed) 253 | return 254 | } 255 | } else { 256 | audioWriterInput.markAsFinished() 257 | continuation.resume(returning: true) 258 | return 259 | } 260 | } 261 | } 262 | } 263 | } 264 | 265 | await (_, _) = try (writingVideoFinished, writingAudioFinished) 266 | await assetWriter.finishWriting() 267 | return destinationURL 268 | } 269 | 270 | private func metadataItem(for identifier: String) -> AVMetadataItem { 271 | let item = AVMutableMetadataItem() 272 | item.keySpace = AVMetadataKeySpace.quickTimeMetadata // "mdta" 273 | item.dataType = "com.apple.metadata.datatype.UTF-8" 274 | item.key = AVMetadataKey.quickTimeMetadataKeyContentIdentifier as any NSCopying & NSObjectProtocol // "com.apple.quicktime.content.identifier" 275 | item.value = identifier as any NSCopying & NSObjectProtocol 276 | return item 277 | } 278 | 279 | private func stillImageTimeMetadataAdaptor() -> AVAssetWriterInputMetadataAdaptor { 280 | let quickTimeMetadataKeySpace = AVMetadataKeySpace.quickTimeMetadata.rawValue // "mdta" 281 | let stillImageTimeKey = "com.apple.quicktime.still-image-time" 282 | let spec: [NSString : Any] = [ 283 | kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString : "\(quickTimeMetadataKeySpace)/\(stillImageTimeKey)", 284 | kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString : kCMMetadataBaseDataType_SInt8] 285 | var desc : CMFormatDescription? = nil 286 | CMMetadataFormatDescriptionCreateWithMetadataSpecifications( 287 | allocator: kCFAllocatorDefault, 288 | metadataType: kCMMetadataFormatType_Boxed, 289 | metadataSpecifications: [spec] as CFArray, 290 | formatDescriptionOut: &desc) 291 | let input = AVAssetWriterInput( 292 | mediaType: .metadata, 293 | outputSettings: nil, 294 | sourceFormatHint: desc) 295 | return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input) 296 | } 297 | 298 | private func stillImageTimeMetadataItem() -> AVMetadataItem { 299 | let item = AVMutableMetadataItem() 300 | item.key = "com.apple.quicktime.still-image-time" as any NSCopying & NSObjectProtocol 301 | item.keySpace = AVMetadataKeySpace.quickTimeMetadata // "mdta" 302 | item.value = 0 as any NSCopying & NSObjectProtocol 303 | item.dataType = kCMMetadataBaseDataType_SInt8 as String // "com.apple.metadata.datatype.int8" 304 | return item 305 | } 306 | } 307 | 308 | extension LivePhotos { 309 | 310 | private func cachesDirectory() throws -> URL { 311 | if let cachesDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) { 312 | let cachesDirectory = cachesDirectoryURL.appendingPathComponent("livePhotos", isDirectory: true) 313 | if !FileManager.default.fileExists(atPath: cachesDirectory.absoluteString) { 314 | try? FileManager.default.createDirectory(at: cachesDirectory, withIntermediateDirectories: true, attributes: nil) 315 | } 316 | return cachesDirectory 317 | } 318 | throw LivePhotosError.noCachesDirectory 319 | } 320 | } 321 | 322 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/SceneDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SceneDelegate.swift 3 | // LivePhotos 4 | // 5 | // Created by yangjie.layer on 2023/3/29. 6 | // 7 | 8 | import UIKit 9 | 10 | class SceneDelegate: UIResponder, UIWindowSceneDelegate { 11 | 12 | var window: UIWindow? 13 | 14 | 15 | func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { 16 | // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. 17 | // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. 18 | // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). 19 | guard let _ = (scene as? UIWindowScene) else { return } 20 | } 21 | 22 | func sceneDidDisconnect(_ scene: UIScene) { 23 | // Called as the scene is being released by the system. 24 | // This occurs shortly after the scene enters the background, or when its session is discarded. 25 | // Release any resources associated with this scene that can be re-created the next time the scene connects. 26 | // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). 27 | } 28 | 29 | func sceneDidBecomeActive(_ scene: UIScene) { 30 | // Called when the scene has moved from an inactive state to an active state. 31 | // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. 32 | } 33 | 34 | func sceneWillResignActive(_ scene: UIScene) { 35 | // Called when the scene will move from an active state to an inactive state. 36 | // This may occur due to temporary interruptions (ex. an incoming phone call). 37 | } 38 | 39 | func sceneWillEnterForeground(_ scene: UIScene) { 40 | // Called as the scene transitions from the background to the foreground. 41 | // Use this method to undo the changes made on entering the background. 42 | } 43 | 44 | func sceneDidEnterBackground(_ scene: UIScene) { 45 | // Called as the scene transitions from the foreground to the background. 46 | // Use this method to save data, release shared resources, and store enough scene-specific state information 47 | // to restore the scene back to its current state. 48 | } 49 | 50 | 51 | } 52 | 53 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/Tools/Toast.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Toast.swift 3 | // LivePhotos 4 | // 5 | // Created by yangjie.layer on 2023/4/7. 6 | // 7 | 8 | import UIKit 9 | 10 | enum Toast { 11 | static func show(_ text: String) { 12 | Task { @MainActor in 13 | guard let windowScene = UIApplication.shared.connectedScenes.first(where: { $0 is UIWindowScene }) as? UIWindowScene, 14 | let keyWindow = windowScene.windows.first(where: { $0.isKeyWindow }), 15 | let rootViewController = keyWindow.rootViewController else { 16 | return 17 | } 18 | let topViewController: UIViewController? 19 | if let presentedViewController = rootViewController.presentedViewController { 20 | topViewController = presentedViewController 21 | } else if let navigationController = rootViewController as? UINavigationController { 22 | topViewController = navigationController.topViewController 23 | } else if let tabBarController = rootViewController as? UITabBarController { 24 | topViewController = tabBarController.selectedViewController 25 | } else { 26 | topViewController = rootViewController 27 | } 28 | guard let view = topViewController?.view else { 29 | return 30 | } 31 | let toastLabel = UILabel() 32 | toastLabel.text = text 33 | toastLabel.textAlignment = .center 34 | toastLabel.backgroundColor = .label 35 | toastLabel.textColor = .systemBackground 36 | toastLabel.layer.cornerRadius = 4.0 37 | toastLabel.layer.masksToBounds = true 38 | toastLabel.sizeToFit() 39 | view.addSubview(toastLabel) 40 | toastLabel.translatesAutoresizingMaskIntoConstraints = false 41 | NSLayoutConstraint.activate([ 42 | toastLabel.centerXAnchor.constraint(equalTo: view.centerXAnchor), 43 | toastLabel.centerYAnchor.constraint(equalTo: view.centerYAnchor), 44 | toastLabel.widthAnchor.constraint(equalToConstant: toastLabel.frame.width + 10), 45 | toastLabel.heightAnchor.constraint(equalToConstant: toastLabel.frame.height + 10) 46 | ]) 47 | DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { 48 | toastLabel.removeFromSuperview() 49 | } 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/ViewController/LivePhotosViewController+Asemble.swift: -------------------------------------------------------------------------------- 1 | // 2 | // LivePhotosViewController+Asemble.swift 3 | // LivePhotos 4 | // 5 | // Created by yangjie.layer on 2023/4/1. 6 | // 7 | 8 | import UIKit 9 | import PhotosUI 10 | 11 | // MARK: - Asemble Action 12 | 13 | extension LivePhotosViewController { 14 | 15 | func saveButtonDidSelect(_ sender: UIButton) { 16 | guard let (photoURL, videURL) = asembleURLs.value, 17 | let photoURL, let videURL else { 18 | return 19 | } 20 | PHPhotoLibrary.shared().performChanges({ 21 | let creationRequest = PHAssetCreationRequest.forAsset() 22 | let options = PHAssetResourceCreationOptions() 23 | creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: photoURL, options: options) 24 | creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: videURL, options: options) 25 | }, completionHandler: { success, _ in 26 | Toast.show(success ? "Saved successfully" : "An error occurred") 27 | }) 28 | } 29 | 30 | func pickPhotoButtonDidSelect(_ sender: UIButton) { 31 | pick(.any(of: [.images])) 32 | } 33 | 34 | func pickVideoButtonDidSelect(_ sender: UIButton) { 35 | pick(.any(of: [.videos])) 36 | } 37 | 38 | func assemblePicker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) { 39 | guard let itemProvider = results.first?.itemProvider else { 40 | return 41 | } 42 | if itemProvider.canLoadObject(ofClass: UIImage.self) { 43 | itemProvider.loadObject(ofClass: UIImage.self) { [weak self] photo, error in 44 | guard let self, let photo = photo as? UIImage else { 45 | return 46 | } 47 | Task { @MainActor in 48 | self.leftImageView.image = photo 49 | } 50 | do { 51 | let cachesDirectory = try self.cachesDirectory() 52 | let targetURL = cachesDirectory.appendingPathComponent(NSUUID().uuidString).appendingPathExtension("jpg") 53 | try photo.pngData()?.write(to: targetURL) 54 | self.photoURL.send(targetURL) 55 | } catch { 56 | Toast.show("An error occurred") 57 | } 58 | } 59 | } else if itemProvider.hasItemConformingToTypeIdentifier(UTType.movie.identifier) { 60 | itemProvider.loadFileRepresentation(forTypeIdentifier: itemProvider.registeredTypeIdentifiers.first!) { [weak self] url, error in 61 | guard let self, let url = url else { 62 | return 63 | } 64 | do { 65 | let cachesDirectory = try self.cachesDirectory() 66 | let targetURL = cachesDirectory.appendingPathComponent(NSUUID().uuidString).appendingPathExtension(url.lastPathComponent) 67 | try FileManager.default.copyItem(at: url, to: targetURL) 68 | self.videoURL.send(targetURL) 69 | self.playVideo(targetURL) 70 | } catch { 71 | Toast.show("An error occurred") 72 | } 73 | } 74 | } 75 | } 76 | 77 | private func cachesDirectory() throws -> URL { 78 | let cachesDirectoryURL = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) 79 | let cachesDirectory = cachesDirectoryURL.appendingPathComponent("asemble", isDirectory: true) 80 | if !FileManager.default.fileExists(atPath: cachesDirectory.absoluteString) { 81 | try FileManager.default.createDirectory(at: cachesDirectory, withIntermediateDirectories: true, attributes: nil) 82 | } 83 | return cachesDirectory 84 | } 85 | } 86 | 87 | extension LivePhotosViewController { 88 | 89 | func setupAsembleSubscibers() { 90 | photoURL 91 | .combineLatest(videoURL) 92 | .receive(on: DispatchQueue.main) 93 | .sink { [weak self] photoURL, videoURL in 94 | guard let self, 95 | self.style.value == .assemble, 96 | let photoURL = photoURL, 97 | let videoURL = videoURL else { 98 | return 99 | } 100 | self.assemble(photo: photoURL, video: videoURL) 101 | }.store(in: &subscriptions) 102 | } 103 | 104 | func assemble(photo: URL, video: URL) { 105 | progressView.progress = 0 106 | Task { 107 | let (livePhoto, (photoURL, videoURL)) = try await LivePhotos.sharedInstance.assemble(photoURL:photo, videoURL:video) { [weak self] process in 108 | guard let self else { return } 109 | self.progressView.progress = process 110 | } 111 | Task { @MainActor in 112 | self.livePhotoView.livePhoto = livePhoto 113 | } 114 | asembleURLs.send((photoURL, videoURL)) 115 | } 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/ViewController/LivePhotosViewController+Disassemble.swift: -------------------------------------------------------------------------------- 1 | // 2 | // LivePhotosViewController+Disassemble.swift 3 | // LivePhotos 4 | // 5 | // Created by yangjie.layer on 2023/4/1. 6 | // 7 | 8 | import AVKit 9 | import Combine 10 | import PhotosUI 11 | 12 | extension LivePhotosViewController { 13 | 14 | /// Select the LivePhoto photo in the photo app 15 | func pickButtonDidSelect(_ sender: UIButton) { 16 | pick(.any(of: [.livePhotos])) 17 | } 18 | 19 | /// Save the photo 20 | func savePhotoButtonDidSelect(_ sender: UIButton) { 21 | guard let photoURL = self.photoURL.value, 22 | !FileManager.default.fileExists(atPath: photoURL.absoluteString) else { 23 | return 24 | } 25 | PHPhotoLibrary.shared().performChanges({ 26 | PHAssetChangeRequest.creationRequestForAssetFromImage(atFileURL: photoURL) 27 | }, completionHandler: { success, error in 28 | Toast.show("\(success ? "Saved successfully" : "Save failed")") 29 | }) 30 | } 31 | 32 | /// Save the video 33 | func saveVideoButtonDidSelect(_ sender: UIButton) { 34 | guard let videoURL = self.videoURL.value, 35 | !FileManager.default.fileExists(atPath: videoURL.absoluteString) else { 36 | return 37 | } 38 | PHPhotoLibrary.shared().performChanges({ 39 | PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL) 40 | }, completionHandler: { success, error in 41 | Toast.show("\(success ? "Saved successfully" : "Save failed")") 42 | }) 43 | } 44 | 45 | func disassemblePicker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) { 46 | guard let itemProvider = results.first?.itemProvider, 47 | itemProvider.canLoadObject(ofClass: PHLivePhoto.self) else { 48 | Toast.show("Pick failed") 49 | return 50 | } 51 | itemProvider.loadObject(ofClass: PHLivePhoto.self) { [weak self] livePhoto, _ in 52 | Task { @MainActor in 53 | guard let self, let livePhoto = livePhoto as? PHLivePhoto else { 54 | Toast.show("Load failed") 55 | return 56 | } 57 | self.livePhotoView.livePhoto = livePhoto 58 | self.disassemble(livePhoto: livePhoto) 59 | } 60 | } 61 | } 62 | } 63 | 64 | extension LivePhotosViewController { 65 | 66 | func disassemble(livePhoto: PHLivePhoto) { 67 | self.progressView.progress = 0 68 | Task { 69 | do { 70 | // Disassemble the livePhoto 71 | let (photoURL, videoURL) = try await LivePhotos.sharedInstance.disassemble(livePhoto: livePhoto) 72 | await MainActor.run { self.progressView.progress = 1 } 73 | self.photoURL.send(photoURL) 74 | self.videoURL.send(videoURL) 75 | // Show the photo 76 | if FileManager.default.fileExists(atPath: photoURL.path) { 77 | guard let photo = UIImage(contentsOfFile: photoURL.path) else { return } 78 | await MainActor.run { leftImageView.image = photo } 79 | } 80 | // show the video 81 | if FileManager.default.fileExists(atPath: videoURL.path) { 82 | playVideo(URL(fileURLWithPath: videoURL.path)) 83 | } 84 | } catch { 85 | await MainActor.run { Toast.show("Disassemble failed") } 86 | } 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /LivePhotos/LivePhotos/ViewController/LivePhotosViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // LivePhotos 4 | // 5 | // Created by yangjie.layer on 2023/3/29. 6 | // 7 | 8 | import UIKit 9 | import AVKit 10 | import Combine 11 | import PhotosUI 12 | 13 | enum LivePhotosPageStyle: Int { 14 | case disassemble // Live Photos disassemble 15 | case assemble // Live Photos assemble 16 | } 17 | 18 | class LivePhotosViewController: UIViewController { 19 | 20 | // MARK: - Properties 21 | 22 | var subscriptions = Set() 23 | 24 | /// Current scene type 25 | let style = CurrentValueSubject(.disassemble) 26 | 27 | /// Current photo url 28 | let photoURL = CurrentValueSubject(nil) 29 | 30 | /// Current video url 31 | let videoURL = CurrentValueSubject(nil) 32 | 33 | /// Asemble URL 34 | let asembleURLs = CurrentValueSubject<(URL?, URL?)?, Never>(nil) 35 | 36 | // MARK: - UI Properties 37 | 38 | /// NavagationItem's segmentedControl, switch between disassemble and assemble 39 | private lazy var segmentedControl = { 40 | let segmentedControl = UISegmentedControl(items: ["Disassemble", "Assemble"]) 41 | segmentedControl.backgroundColor = .gray.withAlphaComponent(0.1) 42 | segmentedControl.addAction(UIAction(handler: { [weak self] _ in 43 | guard let self else { return } 44 | self.segmentValueDidChange(segmentedControl) 45 | }), for: .valueChanged) 46 | return segmentedControl 47 | }() 48 | 49 | /// NavagationItem's icon, shows whether it is currently disassemble or assemble 50 | private var iconImageView = UIImageView() 51 | 52 | /// The main Button at the bottom, for Live Photo selection or synthesis 53 | private lazy var mainButton = { 54 | var config = UIButton.Configuration.filled() 55 | config.buttonSize = .large 56 | config.cornerStyle = .large 57 | config.image = UIImage(systemName: "photo.on.rectangle.angled") 58 | config.imagePadding = 10.0 59 | config.titleTextAttributesTransformer = UIConfigurationTextAttributesTransformer { incoming in 60 | var outgoing = incoming 61 | outgoing.font = .boldSystemFont(ofSize: 15.0) 62 | return outgoing 63 | } 64 | let button = UIButton(configuration: config) 65 | button.configurationUpdateHandler = { button in 66 | let title: String 67 | let color: UIColor 68 | switch(self.style.value) { 69 | case .disassemble: 70 | title = "Pick A Live Photo" 71 | color = .systemBlue 72 | case .assemble: 73 | title = "Save the Live Photo" 74 | color = .systemPink 75 | } 76 | button.configuration?.title = title 77 | button.configuration?.baseBackgroundColor = color 78 | } 79 | button.addAction(UIAction(handler: { [weak self] _ in 80 | guard let self else { return } 81 | switch(self.style.value) { 82 | case .disassemble: 83 | self.pickButtonDidSelect(button) 84 | case .assemble: 85 | self.saveButtonDidSelect(button) 86 | } 87 | }), for: .touchUpInside) 88 | return button 89 | }() 90 | 91 | /// Live Photo Display Container 92 | let livePhotoView = { 93 | let livePhotoView = PHLivePhotoView() 94 | livePhotoView.backgroundColor = .gray.withAlphaComponent(0.1) 95 | livePhotoView.contentMode = .scaleAspectFit 96 | livePhotoView.layer.cornerRadius = 8.0 97 | return livePhotoView 98 | }() 99 | 100 | /// Icon on the Live Photo showcase container 101 | private var livePhotoIcon = UIImageView() 102 | 103 | /// Photo display container 104 | let leftImageView = { 105 | let imageView = UIImageView() 106 | imageView.backgroundColor = .gray.withAlphaComponent(0.1) 107 | imageView.contentMode = .scaleAspectFit 108 | imageView.layer.cornerRadius = 8.0 109 | return imageView 110 | }() 111 | 112 | /// Video display container 113 | private let rightPlayerViewController = { 114 | let playerViewController = AVPlayerViewController() 115 | playerViewController.view.backgroundColor = .gray.withAlphaComponent(0.1) 116 | playerViewController.view.layer.cornerRadius = 8.0 117 | return playerViewController 118 | }() 119 | 120 | /// Left photo button 121 | private lazy var leftButton = { 122 | var config = UIButton.Configuration.filled() 123 | config.buttonSize = .large 124 | config.cornerStyle = .large 125 | config.titleTextAttributesTransformer = UIConfigurationTextAttributesTransformer { incoming in 126 | var outgoing = incoming 127 | outgoing.font = .boldSystemFont(ofSize: 15.0) 128 | return outgoing 129 | } 130 | let button = UIButton(configuration: config) 131 | button.configurationUpdateHandler = { button in 132 | let title: String 133 | let color: UIColor 134 | switch(self.style.value) { 135 | case .disassemble: 136 | title = "Save Photo" 137 | color = .systemBlue 138 | case .assemble: 139 | title = "Pick Photo" 140 | color = .systemPink 141 | } 142 | button.configuration?.title = title 143 | button.configuration?.baseBackgroundColor = color 144 | } 145 | button.addAction(UIAction(handler: { [weak self] _ in 146 | guard let self else { return } 147 | switch(self.style.value) { 148 | case .disassemble: 149 | self.savePhotoButtonDidSelect(button) 150 | case .assemble: 151 | self.pickPhotoButtonDidSelect(button) 152 | } 153 | }), for: .touchUpInside) 154 | return button 155 | }() 156 | 157 | /// Right video button 158 | private lazy var rightButton = { 159 | var config = UIButton.Configuration.filled() 160 | config.buttonSize = .large 161 | config.cornerStyle = .large 162 | config.titleTextAttributesTransformer = UIConfigurationTextAttributesTransformer { incoming in 163 | var outgoing = incoming 164 | outgoing.font = .boldSystemFont(ofSize: 15.0) 165 | return outgoing 166 | } 167 | let button = UIButton(configuration: config) 168 | button.configurationUpdateHandler = { button in 169 | let title: String 170 | let color: UIColor 171 | switch(self.style.value) { 172 | case .disassemble: 173 | title = "Save Video" 174 | color = .systemBlue 175 | case .assemble: 176 | title = "Pick Video" 177 | color = .systemPink 178 | } 179 | button.configuration?.title = title 180 | button.configuration?.baseBackgroundColor = color 181 | } 182 | button.addAction(UIAction(handler: { [weak self] _ in 183 | guard let self else { return } 184 | switch(self.style.value) { 185 | case .disassemble: 186 | self.saveVideoButtonDidSelect(button) 187 | case .assemble: 188 | self.pickVideoButtonDidSelect(button) 189 | } 190 | }), for: .touchUpInside) 191 | return button 192 | }() 193 | 194 | let progressView: UIProgressView = UIProgressView() 195 | 196 | 197 | // MARK: - Lifecycle 198 | 199 | override func viewDidLoad() { 200 | super.viewDidLoad() 201 | setupUI() 202 | setupSubscibers() 203 | setUPAuthorization() 204 | } 205 | 206 | /// Adapt to light and dark mode switching 207 | override func traitCollectionDidChange(_ previousTraitCollection: UITraitCollection?) { 208 | updateLivePhotoIcon() 209 | } 210 | } 211 | 212 | // MARK: - Set up and update the UI 213 | 214 | extension LivePhotosViewController { 215 | 216 | private func setupUI() { 217 | let stackView = UIStackView(arrangedSubviews: [segmentedControl, iconImageView]) 218 | stackView.spacing = 10.0 219 | navigationItem.titleView = stackView 220 | view.addSubview(mainButton) 221 | mainButton.translatesAutoresizingMaskIntoConstraints = false 222 | NSLayoutConstraint.activate([ 223 | mainButton.leftAnchor.constraint(equalTo: view.leftAnchor, constant: 20.0), 224 | mainButton.rightAnchor.constraint(equalTo: view.rightAnchor, constant: -20.0), 225 | mainButton.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -40.0), 226 | mainButton.heightAnchor.constraint(equalToConstant: 40.0) 227 | ]) 228 | view.addSubview(livePhotoView) 229 | livePhotoView.translatesAutoresizingMaskIntoConstraints = false 230 | NSLayoutConstraint.activate([ 231 | livePhotoView.leftAnchor.constraint(equalTo: view.leftAnchor, constant: 20.0), 232 | livePhotoView.rightAnchor.constraint(equalTo: view.rightAnchor, constant: -20.0), 233 | livePhotoView.topAnchor.constraint(equalTo: view.safeAreaLayoutGuide.topAnchor, constant: 10.0), 234 | livePhotoView.heightAnchor.constraint(equalToConstant: 220.0) 235 | ]) 236 | view.addSubview(livePhotoIcon) 237 | livePhotoIcon.translatesAutoresizingMaskIntoConstraints = false 238 | NSLayoutConstraint.activate([ 239 | livePhotoIcon.leftAnchor.constraint(equalTo: livePhotoView.leftAnchor, constant: 10.0), 240 | livePhotoIcon.topAnchor.constraint(equalTo: livePhotoView.topAnchor, constant: 10.0), 241 | livePhotoIcon.widthAnchor.constraint(equalToConstant: 30.0), 242 | livePhotoIcon.heightAnchor.constraint(equalToConstant: 30.0) 243 | ]) 244 | view.addSubview(leftImageView) 245 | leftImageView.translatesAutoresizingMaskIntoConstraints = false 246 | NSLayoutConstraint.activate([ 247 | leftImageView.leftAnchor.constraint(equalTo: view.leftAnchor, constant: 20.0), 248 | leftImageView.rightAnchor.constraint(equalTo: view.centerXAnchor, constant: -10.0), 249 | leftImageView.topAnchor.constraint(equalTo: livePhotoView.bottomAnchor, constant: 20.0), 250 | leftImageView.heightAnchor.constraint(equalToConstant: 220.0) 251 | ]) 252 | view.addSubview(rightPlayerViewController.view) 253 | rightPlayerViewController.view.translatesAutoresizingMaskIntoConstraints = false 254 | NSLayoutConstraint.activate([ 255 | rightPlayerViewController.view.leftAnchor.constraint(equalTo: view.centerXAnchor, constant: 10.0), 256 | rightPlayerViewController.view.rightAnchor.constraint(equalTo: view.rightAnchor, constant: -20.0), 257 | rightPlayerViewController.view.topAnchor.constraint(equalTo: livePhotoView.bottomAnchor, constant: 20.0), 258 | rightPlayerViewController.view.heightAnchor.constraint(equalToConstant: 220.0) 259 | ]) 260 | view.addSubview(leftButton) 261 | leftButton.translatesAutoresizingMaskIntoConstraints = false 262 | NSLayoutConstraint.activate([ 263 | leftButton.leftAnchor.constraint(equalTo: view.leftAnchor, constant: 20.0), 264 | leftButton.rightAnchor.constraint(equalTo: view.centerXAnchor, constant: -10.0), 265 | leftButton.topAnchor.constraint(equalTo: leftImageView.bottomAnchor, constant: 20.0), 266 | leftButton.heightAnchor.constraint(equalToConstant: 40.0) 267 | ]) 268 | view.addSubview(rightButton) 269 | rightButton.translatesAutoresizingMaskIntoConstraints = false 270 | NSLayoutConstraint.activate([ 271 | rightButton.leftAnchor.constraint(equalTo: view.centerXAnchor, constant: 10.0), 272 | rightButton.rightAnchor.constraint(equalTo: view.rightAnchor, constant: -20.0), 273 | rightButton.topAnchor.constraint(equalTo: rightPlayerViewController.view.bottomAnchor, constant: 20.0), 274 | rightButton.heightAnchor.constraint(equalToConstant: 40.0) 275 | ]) 276 | 277 | view.addSubview(progressView) 278 | progressView.translatesAutoresizingMaskIntoConstraints = false 279 | NSLayoutConstraint.activate([ 280 | progressView.leftAnchor.constraint(equalTo: mainButton.leftAnchor), 281 | progressView.rightAnchor.constraint(equalTo: mainButton.rightAnchor), 282 | progressView.bottomAnchor.constraint(equalTo: mainButton.topAnchor, constant: -20.0), 283 | progressView.heightAnchor.constraint(equalToConstant: 10.0) 284 | ]) 285 | 286 | updateLivePhotoIcon() 287 | } 288 | 289 | private func updateLivePhotoIcon() { 290 | let mode = UITraitCollection.current.userInterfaceStyle; 291 | switch mode { 292 | case .dark: 293 | livePhotoIcon.image = UIImage(systemName: "livephoto")?.withTintColor(.label, renderingMode: .alwaysOriginal) 294 | case .light, .unspecified: 295 | livePhotoIcon.image = UIImage(systemName: "livephoto")?.withTintColor(.label, renderingMode: .alwaysOriginal) 296 | @unknown default: 297 | fatalError() 298 | } 299 | } 300 | 301 | private func setupSubscibers() { 302 | style.sink { [weak self] style in 303 | guard let self else { return } 304 | self.segmentedControl.selectedSegmentIndex = style.rawValue 305 | }.store(in: &subscriptions) 306 | 307 | style.sink { [weak self] style in 308 | guard let self else { return } 309 | let image: UIImage? 310 | let config = UIImage.SymbolConfiguration(weight: .bold) 311 | switch(style) { 312 | case .disassemble: 313 | image = UIImage(systemName: "rectangle.expand.vertical", withConfiguration: config) 314 | case .assemble: 315 | image = UIImage(systemName: "rectangle.compress.vertical", withConfiguration: config) 316 | } 317 | self.iconImageView.image = image?.withTintColor(.label, renderingMode: .alwaysOriginal) 318 | }.store(in: &subscriptions) 319 | 320 | style.sink { [weak self] style in 321 | guard let self else { return } 322 | self.mainButton.setNeedsUpdateConfiguration() 323 | self.leftButton.setNeedsUpdateConfiguration() 324 | self.rightButton.setNeedsUpdateConfiguration() 325 | }.store(in: &subscriptions) 326 | 327 | style.sink { [weak self] style in 328 | guard let self else { return } 329 | let color: UIColor 330 | switch(style) { 331 | case .disassemble: 332 | color = .systemBlue 333 | case .assemble: 334 | color = .systemPink 335 | } 336 | self.progressView.progressTintColor = color 337 | }.store(in: &subscriptions) 338 | 339 | NotificationCenter.default.publisher(for: .AVPlayerItemDidPlayToEndTime).sink { [weak self] _ in 340 | if let player = self?.rightPlayerViewController.player { 341 | player.seek(to: CMTimeMake(value: 0, timescale: 600)) 342 | player.play() 343 | } 344 | }.store(in: &subscriptions) 345 | 346 | setupAsembleSubscibers() 347 | } 348 | } 349 | 350 | // MARK: - Actions 351 | 352 | extension LivePhotosViewController { 353 | 354 | private func setUPAuthorization() { 355 | Task { 356 | await PHPhotoLibrary.requestAuthorization(for: .readWrite) 357 | } 358 | } 359 | 360 | private func segmentValueDidChange(_ sender: UISegmentedControl) { 361 | progressView.progress = 0 362 | livePhotoView.livePhoto = nil 363 | leftImageView.image = nil 364 | rightPlayerViewController.player = nil 365 | photoURL.send(nil) 366 | videoURL.send(nil) 367 | asembleURLs.send(nil) 368 | style.send(LivePhotosPageStyle(rawValue: sender.selectedSegmentIndex)!) 369 | } 370 | 371 | func playVideo(_ url:URL) { 372 | Task { 373 | let asset = AVAsset(url:url) 374 | if try await asset.load(.isPlayable) { 375 | let playerItem = AVPlayerItem(url: url) 376 | let player = AVPlayer(playerItem: playerItem) 377 | rightPlayerViewController.player = player 378 | player.play() 379 | } 380 | } 381 | } 382 | } 383 | 384 | // MARK: - PHPickerViewControllerDelegate 385 | 386 | extension LivePhotosViewController: PHPickerViewControllerDelegate { 387 | 388 | /// Present `PHPickerViewController` 389 | func pick(_ filter: PHPickerFilter) { 390 | var config = PHPickerConfiguration() 391 | config.filter = filter 392 | config.selectionLimit = 1 393 | config.preferredAssetRepresentationMode = .current 394 | let picker = PHPickerViewController(configuration: config) 395 | picker.delegate = self 396 | present(picker, animated: true, completion: nil) 397 | } 398 | 399 | func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) { 400 | defer { picker.dismiss(animated: true) } 401 | switch(style.value) { 402 | case .disassemble: 403 | disassemblePicker(picker, didFinishPicking: results) 404 | case .assemble: 405 | assemblePicker(picker, didFinishPicking: results) 406 | } 407 | } 408 | } 409 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 将实况照片(Live Photos)添加到 APP 中 2 | 3 | 2015 年,Apple 推出 iPhone 6s 和 iPhone 6s Plus,同时推出了实况照片(Live Photos)功能。在当时,这是一项开创性的、全新的摄影方式,能以动态方式记录精彩瞬间,为静态照片注入生命力。拍摄实况照片时,iPhone 会录下拍照前后各 1.5 秒所发生的一切。用户可以选择不同的封面照片、添加有趣的效果、编辑实况照片,并与家人或朋友进行分享。 4 | 5 | 本文将介绍 Live Photo 相关技术概念,并使用 Swift 实现 Live Photo 的分解、合成功能。分解和合成的演示如下: 6 | 7 | | ![Disassemble](https://raw.githubusercontent.com/LLLLLayer/Galaxy/main/resources/images/live_photos/Disassemble.gif) | ![Asemble](https://raw.githubusercontent.com/LLLLLayer/Galaxy/main/resources/images/live_photos/Asemble.gif) | 8 | | :----------------------------------------------------------: | :----------------------------------------------------------: | 9 | | 将 Live Photo 分解为照片和视频 | 使用(不相关的)照片和视频合成 Live Photo | 10 | 11 | > 文章所有涉及的 API 基于 **iOS 16.0+**,使用了较多 Swift 的结构化并发的相关概念,阅读需要有一定基础。 12 | 13 | > 文章项目代码已经开源,欢迎参考[这里](https://github.com/LLLLLayer/Live-Photos)。 14 | 15 | 16 | 17 | ## Live Photo 格式 18 | 19 | 以下是一张曾于武汉大学拍摄的樱花实况照片。我们如果直接将 Live Photo 隔空投送到 Mac,可以得到一张 `HEIC` 格式的照片。但若我们在分享页面,进行**「选项 -> 所有照片数据」**的勾选,那么我们投送后将得到一个文件夹,内部包含一张`HEIC` 格式的照片、一个 `MOV` 格式的视频: 20 | 21 | LivePhotoGif 22 | 23 | LivePhotoSave 24 | 25 | 正如我们所见,一张 Live Photo 由配对的两个资源组成,相同的 Identifier 进行配对: 26 | 27 | 28 | 29 | ### 具有特殊 Metadata 的 JPEG 图像 30 | 31 | 图片拥有属性,对于大多数图像文件格式,使用 [`CGImageSource`](https://developer.apple.com/documentation/imageio/cgimagesource) 类型可以有效地读取数据。可以使用 [The Photo Investigator](https://apps.apple.com/us/app/photo-investigator-view-edit/id571574618) 应用查看照片中的所有 Metadata: 32 | 33 | ![image-20230410011403546](https://raw.githubusercontent.com/LLLLLayer/Galaxy/main/resources/images/live_photos/ThePhotoInvestigator.png) 34 | 35 | 拍摄照片时,Apple 相机会自动为照片添加不同种类的 Metadata。大多数元数据都很好理解,如位置存储在 GPS Metadata 中、相机信息位于 EXIF Metadata 中。 36 | 37 | 其中 [`kCGImagePropertyMakerAppleDictionary`](https://developer.apple.com/documentation/imageio/kcgimagepropertymakerappledictionary) 是 Apple 相机拍摄的照片的键值对字典。“17” 是 Maker Apple 中的 LivePhotoVideoIndex,是 Live Photo 的 Identifier Key,完整列表可以参考 [Apple Tags](https://exiftool.org/TagNames/Apple.html)。 38 | 39 | Live Photo 需要有特殊 Metadata 的 JPEG 图像: 40 | 41 | ``` 42 | [kCGImagePropertyMakerAppleDictionary : [17 : ]] 43 | ``` 44 | 45 | 46 | 47 | ### 具有特殊 Metadata 的 MOV 视频文件 48 | 49 | > [默认情况下,Live Photo 捕获使用 H.264 编解码器对 Live Photo 的视频部分进行编码](https://developer.apple.com/documentation/avfoundation/avcapturephotooutput/2866560-availablelivephotovideocodectype)。 50 | 51 | [`AVAsset`](https://developer.apple.com/documentation/avfoundation/avasset) 是模拟定时视听媒体的类。其本身不是媒体资源(例如 QuickTime 电影或 MP3 音频文件、以及使用 HTTP 实时流式传输 (HLS) 流式传输的媒体等),但是它可以作为媒体资源的容器。 52 | 53 | 一个 `AVAsset` 是一个或多个 `AVAssetTrack` 实例的容器对象,它模拟统一类型的媒体轨道。最常用的轨道类型是 `audio` 音频和 `video` 视频,也可能包含补充轨道,如 `closedCaption` 隐藏式字幕、`subtitle` 副标题和 `metadata` 元数据等。 54 | 55 | ```swift 56 | static let audio: AVMediaType // The media contains audio media. 57 | static let closedCaption: AVMediaType //The media contains closed-caption content. 58 | static let depthData: AVMediaType // The media contains depth data. 59 | static let metadataObject: AVMediaType // The media contains metadata objects. 60 | static let muxed: AVMediaType // The media contains muxed media. 61 | static let subtitle: AVMediaType // The media contains subtitles. 62 | static let text: AVMediaType // The media contains text. 63 | static let timecode: AVMediaType //The media contains a time code. 64 | static let video: AVMediaType // The media contains video. 65 | ``` 66 | 67 | ![image-20230415154610857](https://raw.githubusercontent.com/LLLLLayer/Galaxy/main/resources/images/live_photos/image-20230415154610857.png) 68 | 69 | `AVAsset` 存储关于其媒体的描述性 Metadata。AVFoundation 通过使 其 `AVMetadataItem ` 类简化了对 Metadata 的处理。最简单的讲,`AVMetadataItem` 的实例是一个键值对,表示单个 Metadata 值,比如电影的标题或专辑的插图。AVFoundation 框架将相关 Metadata 分组到 `keySpace` 中: 70 | 71 | - 特定格式的 [`keySpace`](https://developer.apple.com/documentation/coremedia/cmmetadata/metadata_identifier_keyspaces)。AVFoundation 框架定义了几个特定格式的 Metadata,大致与特定容器或文件格式相关,例如 `quickTimeMetadata` 、 `iTunes`、`id3` 等。单个资源可能包含跨多个 `keySpace` 的元数据值。 72 | - Common `keySpace`。有几个常见的元数据值,为了帮助规范化对公共 Metadata 如例如创建日期或描述的访问,提供了一个common `keySpace`,允许访问几个 `keySpace` 共有的一组有限 Metadata 值。 73 | 74 | 75 | 76 | Live Photo 需要 `keySpace` 为 `AVMetadataKeySpace.quickTimeMetadata` 的特定 top-level Metadata: 77 | 78 | ```swift 79 | ["com.apple.quicktime.content.identifier" : ] 80 | ``` 81 | 82 | > "com.apple.quicktime.content.identifier" 即 `AVMetadataKey.quickTimeMetadataKeyContentIdentifier` 83 | > 84 | > 这里的 Identifier 同 「具有特殊 Metadata 的 JPEG 图像」的 Identifier。 85 | 86 | 静止图像的 Timed Metadata Track: 87 | 88 | ```swift 89 | ["MetadataIdentifier" : "mdta/com.apple.quicktime.still-image-time", 90 | "MetadataDataType" : "com.apple.metadata.datatype.int8"] 91 | ``` 92 | 93 | > "MetadataIdentifier" 即 `kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier` 94 | > "mdta" 即 `AVMetadataKeySpace.quickTimeMetadata` 95 | > "MetadataDataType" 即 `kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType` 96 | > "com.apple.metadata.datatype.int8" 即 `kCMMetadataBaseDataType_SInt8` 97 | 98 | 静止图像的 Timed Metadata Track 的 Metadata,即让系统知道图像在视频 Timeline 中的位置: 99 | 100 | ``` 101 | ["com.apple.quicktime.still-image-time" : 0] 102 | ``` 103 | 104 | 105 | 106 | ## PHLivePhoto 和 PHLivePhotoView 107 | 108 | ```swift 109 | class PHLivePhoto : NSObject 110 | class PHLivePhotoView : UIView 111 | ``` 112 | 113 | [`PHLivePhoto`](https://developer.apple.com/documentation/photokit/phlivephoto) 是 Live Photo 的可显示表示、代码中的实例。在 iOS 中,我们可以使用此类从用户的相册等地方引用 Live Photo,将 `PHLivePhoto` 分配给 [`PHLivePhotoView`](https://developer.apple.com/documentation/photokit/phlivephotoview) 从而进行显示。`PHLivePhotoView` 提供了显示 Live Photo 的方法,同时提供与相册中相同的交互式播放功能。 114 | 115 | `PHLivePhoto` 对于 Live Photo,类似于与 `UIImage` 对于静态图像。`UIImage` 不只是图像的数据文件,而是可以在 `UIImageView` 中显示的即用型图像。`PHLivePhoto` 同样也不只是相册中的 Live Photo 数据资源,而是准备好在 `PHLivePhotoView` 上显示的 Live Photo。 116 | 117 | 在 iOS 中,我们可以使用 [`UIImagePickerController`](https://developer.apple.com/documentation/uikit/uiimagepickercontroller)、[`PHAsset`](https://developer.apple.com/documentation/photokit/phasset) 和 [`PHImageManager`](https://developer.apple.com/documentation/photokit/phimagemanager) 从用户的相册中获取 Live Photo,或者通过相册资源创建一个 Live Photo。在 iOS 14.0 及以上版本,我们也可以使用 [`PHPickerViewController`](https://developer.apple.com/documentation/photokit/phpickerviewcontroller) 从用户的相册中获取 Live Photo。 118 | 119 | **使用 `UIImagePickerController` 的示例代码如下:** 120 | 121 | ```swift 122 | func pickLivePhoto(_ sender: AnyObject) { 123 | let imagePicker = UIImagePickerController() 124 | imagePicker.sourceType = UIImagePickerControllerSourceType.photoLibrary 125 | imagePicker.allowsEditing = false 126 | imagePicker.delegate = self 127 | imagePicker.mediaTypes = [kUTTypeLivePhoto, kUTTypeImage] as [String] 128 | present(imagePicker, animated: true, completion: nil) 129 | } 130 | 131 | // MARK: UIImagePickerControllerDelegate 132 | 133 | func imagePickerController( 134 | _ picker: UIImagePickerController, 135 | didFinishPickingMediaWithInfo info: [String : Any] 136 | ) { 137 | guard let mediaType = info[UIImagePickerControllerMediaType] as? NSString, 138 | mediaType == kUTTypeLivePhoto, 139 | let livePhoto = info[UIImagePickerControllerLivePhoto] as? PHLivePhoto else { 140 | return 141 | } 142 | livePhotoView.livePhoto = livePhoto 143 | } 144 | ``` 145 | 146 | > 这里需要注意,我们在指定 `mediaTypes` 时,除了 `kUTTypeLivePhoto`,还有 `kUTTypeImage`,否则在运行时将抛出异常: 147 | > 148 | > ``` 149 | > *** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: 'The Live Photo type cannot be specified without the Image media type' 150 | > terminating with uncaught exception of type NSException 151 | > ``` 152 | > 153 | > 这就导致我们通过代理拿到的 `mediaType`,可能并非 `kUTTypeLivePhoto` 而是静态照片 `kUTTypeImage`,需要进行判断或提示。 154 | 155 | **使用 `PHAsset` 和 `PHImageManager` 的示例代码如下:** 156 | 157 | ```swift 158 | let fetchOptions = PHFetchOptions() 159 | fetchOptions.predicate = NSPredicate( 160 | format: "(mediaSubtype & %d) != 0", 161 | PHAssetMediaSubtype.photoLive.rawValue) 162 | let images = PHAsset.fetchAssets(with: .image, options: fetchOptions) 163 | PHImageManager.default().requestLivePhoto( 164 | for: images.firstObject!, 165 | targetSize: .zero, 166 | contentMode: .default, 167 | options: nil) { [weak self] livePhoto, _ in 168 | guard let self else { return } 169 | self.livePhotoView.livePhoto = livePhoto 170 | } 171 | ``` 172 | 173 | **使用 `PHPickerViewController` 的示例代码如下:** 174 | 175 | ```swift 176 | func pickLivePhoto(_ sender: UIButton) { 177 | var config = PHPickerConfiguration() 178 | config.selectionLimit = 1 179 | config.filter = .any(of: [.livePhotos]) 180 | config.preferredAssetRepresentationMode = .current 181 | let picker = PHPickerViewController(configuration: config) 182 | picker.delegate = self 183 | present(picker, animated: true, completion: nil) 184 | } 185 | 186 | // MARK: - PHPickerViewControllerDelegate 187 | 188 | func picker( 189 | _ picker: PHPickerViewController, 190 | didFinishPicking results: [PHPickerResult] 191 | ) { 192 | defer { picker.dismiss(animated: true) } 193 | guard let itemProvider = results.first?.itemProvider, 194 | itemProvider.canLoadObject(ofClass: PHLivePhoto.self) else { 195 | return 196 | } 197 | itemProvider.loadObject(ofClass: PHLivePhoto.self) { [weak self] livePhoto, _ in 198 | Task { @MainActor in 199 | guard let self, let livePhoto else { return } 200 | self.livePhotoView.livePhoto = livePhoto 201 | } 202 | } 203 | } 204 | ``` 205 | 206 | `PHPickerResult` 是从相册中选择的 Asset 的类型。其 [`let itemProvider: NSItemProvider`](https://developer.apple.com/documentation/photokit/phpickerresult/3606600-itemprovider) 属性是所选 Asset 的表示。[`NSItemProvider`](https://developer.apple.com/documentation/foundation/nsitemprovider) 用于在进程之间传输数据或文件。`canLoadObject(ofClass:)`指示其是否可以加载指定类的对象。`loadObject(ofClass:completionHandler:) `将异步加载指定类的对象。最终,我们获取到 `livePhoto` 进行展示和分解。 207 | 208 | > 这里需要注意 `loadObject(ofClass:)` 的回调并非主线程,需要回到主线程进行 UI 更新。 209 | 210 | > `PHPickerViewController` 内置隐私(无需完整的相册访问权限)、独立进程、支持多选、支持搜索等,详细可以参考 [Meet the new Photos picker](https://developer.apple.com/videos/play/wwdc2020/10652/)。后文的实现将使用该方式。 211 | 212 | 213 | 214 | ## 将 Live Photo 分解为照片和视频 215 | 216 | 在后文代码示例中,我们使用 `actor LivePhotos` 实现 Live Photo 的分解和合成。`LivePhotos` 已提供单例 `sharedInstance`: 217 | 218 | ```swift 219 | // LivePhotos.swift 220 | actor LivePhotos { 221 | static let sharedInstance = LivePhotos() 222 | } 223 | ``` 224 | 225 | 在示例项目的 `LivePhotosViewController+Disassemble.swift` 中,我们这样使用 `disassemble(livePhoto:)` 来分解 Live Photo: 226 | 227 | ```swift 228 | func disassemble(livePhoto: PHLivePhoto) { 229 | Task { 230 | do { 231 | // Disassemble the livePhoto 232 | let (photoURL, videoURL) = try await LivePhotos.sharedInstance.disassemble(livePhoto: livePhoto) 233 | // Show the photo 234 | if FileManager.default.fileExists(atPath: photoURL.path) { 235 | guard let photo = UIImage(contentsOfFile: photoURL.path) else { return } 236 | await MainActor.run { leftImageView.image = photo } 237 | } 238 | // show the video 239 | if FileManager.default.fileExists(atPath: videoURL.path) { 240 | playVideo(URL(fileURLWithPath: videoURL.path)) 241 | } 242 | } catch { 243 | await MainActor.run { Toast.show("Disassemble failed") } 244 | } 245 | } 246 | } 247 | ``` 248 | 249 | 在这里我们可以看到,`disassemble(livePhoto:)` 是一个异步函数,且可以抛出错误,因此我们使用 `try await` 调用,并用 `Task {...}` 进行包裹。函数返回两个 URL,分别是图片的 URL 和 视频的 URL,利用这两个 URL 进行展示。如果在分解过程中抛出错误,将进行提示。 250 | 251 | 接着,我们来看 `disassemble(livePhoto:)` 的具体实现: 252 | 253 | ```swift 254 | func disassemble(livePhoto: PHLivePhoto) async throws -> (URL, URL) { 255 | // 1 256 | let assetResources = PHAssetResource.assetResources(for: livePhoto) 257 | // 5 258 | let list = try await withThrowingTaskGroup(of: (PHAssetResource, Data).self) { taskGroup in 259 | for assetResource in assetResources { 260 | taskGroup.addTask { 261 | // 3 262 | return try await withCheckedThrowingContinuation { continuation in 263 | let dataBuffer = NSMutableData() 264 | // 2 265 | let options = PHAssetResourceRequestOptions() 266 | options.isNetworkAccessAllowed = true 267 | PHAssetResourceManager.default().requestData(for: assetResource, options: options) { data in 268 | dataBuffer.append(data) 269 | } completionHandler: { error in 270 | // 4 271 | guard error == nil else { 272 | continuation.resume(throwing: LivePhotosDisassembleError.requestDataFailed) 273 | return 274 | } 275 | continuation.resume(returning: (assetResource, dataBuffer as Data)) 276 | } 277 | } 278 | } 279 | } 280 | // 6 281 | var results: [(PHAssetResource, Data)] = [] 282 | for try await result in taskGroup { 283 | results.append(result) 284 | } 285 | return results 286 | } 287 | // ... 288 | } 289 | ``` 290 | 291 | 我们先看这部分代码: 292 | 293 | 1. [`assetResources(for:)`](https://developer.apple.com/documentation/photokit/phassetresource/1623988-assetresources) 函数返回与 Asset 关联的数据资源列表 `[PHAssetResource]`。由于我们的入参是 `PHLivePhoto` 因此,这里将获得两个资源,我们可以从控制台查看资源类型: 294 | 295 | ``` 296 | (lldb) po assetResources[0].uniformTypeIdentifier 297 | "public.heic" 298 | (lldb) po assetResources[1].uniformTypeIdentifier 299 | "com.apple.quicktime-movie" 300 | ``` 301 | 302 | 2. 我们希望将两个资源分别转换成 `Data` 类型的对象,这里使用用 `requestData(for:options:dataReceivedHandler:completionHandler:)` 函数完成。该函数异步的请求指定资产资源的底层数据。我们为 `options` 的 `isNetworkAccessAllowed` 设置为 `true`,指定照片可以从 iCloud 下载。`handler` 提供请求数据的块,我们自行将其组合。`completionHandler` 中,我们获得最终的结果。 303 | 3. 由于我们使用异步函数,因此使用 `withCheckedThrowingContinuation(function:_:)` 挂起当前任务,调用闭包,直到得到结果或抛出错误,从而桥接代码到新的并发模型上。 304 | 4. 我们使用 `completionHandler` 里的 `error` 参数来为 `continuation` 提供结果或抛出错误。 305 | 5. 由于我们有两个资源,我们希望并行处理资源的转换,我们使用 `withThrowingTaskGroup(of:returning:body:)` 启动两个子任务。 306 | 6. 我们等待 Task Group 中的子任务完成,返回 `[(PHAssetResource, Data)]` 类型的结果。 307 | 308 | 我们来看剩下的部分: 309 | 310 | ```swift 311 | func disassemble(livePhoto: PHLivePhoto) async throws -> (URL, URL) { 312 | // ... 313 | // 7 314 | guard let photo = (list.first { $0.0.type == .photo }), 315 | let video = (list.first { $0.0.type == .pairedVideo }) else { 316 | throw LivePhotosDisassembleError.requestDataFailed 317 | } 318 | // 8 319 | let cachesDirectory = try cachesDirectory() 320 | let photoURL = try save(photo.0, data: photo.1, to: cachesDirectory) 321 | let videoURL = try save(video.0, data: video.1, to: cachesDirectory) 322 | return (photoURL, videoURL) 323 | } 324 | 325 | private func save(_ assetResource: PHAssetResource, data: Data, to url: URL) throws -> URL { 326 | // 9 327 | guard let ext = UTType(assetResource.uniformTypeIdentifier)?.preferredFilenameExtension else { 328 | throw LivePhotosDisassembleError.noFilenameExtension 329 | } 330 | let destinationURL = url.appendingPathComponent(NSUUID().uuidString).appendingPathExtension(ext as String) 331 | try data.write(to: destinationURL, options: [Data.WritingOptions.atomic]) 332 | return destinationURL 333 | } 334 | ``` 335 | 336 | 7. 我们根据 `PHAssetResource` 的 `type` 属性,找到照片元组和视频元组,若未找到则抛出错误。 337 | 8. 我们将 `PHAssetResource` 对应的 `Data` 写入缓存文件夹中。 338 | 9. `uniformTypeIdentifier` 是资源的统一类型标识符,Apple Inc. 提供的软件上使用的标识符,用于唯一标识给定类别或类型的项目。这里用 `UTType` 的 `init(_:)` 将其转换为 `heic`、`mov` 作为文件的后缀。 339 | 340 | 至此,我们得到 Live Photo 分解得到的图片和视频 URL,以供展示或保存。 341 | 342 | 343 | 344 | ## 使用照片和视频创建 Live Photo 345 | 346 | 正如前文提到的,创建 Live Photo 需要使用 Identifier 将照片和视频配对。我们要将此 Identifier 添加到照片和视频的 Metadata 中,从而生成有效的 Live Photo。 347 | 348 | 在示例项目的 `LivePhotosViewController+Asemble.swift` 中,我们将通过以下方式使用创建 Live Photo API: 349 | 350 | ```swift 351 | func assemble(photo: URL, video: URL) { 352 | progressView.progress = 0 353 | Task { 354 | let livePhoto = try await LivePhotos.sharedInstance.assemble(photoURL:photo, videoURL:video) { [weak self] process in 355 | guard let self else { return } 356 | self.progressView.progress = process 357 | } 358 | Task { @MainActor in 359 | self.livePhotoView.livePhoto = livePhoto 360 | } 361 | } 362 | } 363 | ``` 364 | 365 | 和成 Live Photo 的函数签名如下: 366 | 367 | ```swift 368 | func assemble(photoURL: URL, videoURL: URL, progress: ((Float) -> Void)? = nil) async throws -> PHLivePhoto 369 | ``` 370 | 371 | 入参为 `photoURL`、`videoURL`、进度回调 `progress`,该异步函数最终返回一个 `PHLivePhoto` 对象。 372 | 373 | 和成总共分为三步:获取处理好的 `pairedPhotoURL`、获取处理好的 `pairedVideoURL`、使用两个 URL 创建 `PHLivePhoto`: 374 | 375 | ```swift 376 | func assemble(photoURL: URL, videoURL: URL, progress: ((Float) -> Void)? = nil) async throws -> PHLivePhoto { 377 | let cacheDirectory = try cachesDirectory() 378 | let identifier = UUID().uuidString 379 | // 1 380 | let pairedPhotoURL = addIdentifier( 381 | identifier, 382 | fromImageURL: photoURL, 383 | to: cacheDirectory.appendingPathComponent(identifier).appendingPathExtension("jpg")) 384 | // 2 385 | let pairedVideoURL = try await addIdentifier( 386 | identifier, 387 | fromVideoURL: videoURL, 388 | to: cacheDirectory.appendingPathComponent(identifier).appendingPathExtension("mov"), 389 | progress: progress) 390 | // 3 391 | return try await withCheckedThrowingContinuation({ continuation in 392 | // Create a `PHLivePhoto` with the `pairedPhotoURL` and the `pairedVideoURL`. 393 | }) 394 | } 395 | ``` 396 | 397 | 398 | 399 | ### 将 Metadata 添加到照片 400 | 401 | [Image I/O](https://developer.apple.com/documentation/imageio) Framework 允许我们打开一个图像,然后将 Identifier 写入 `kCGImagePropertyMakerAppleDictionary` 一个特殊的属性 Key `17` : 402 | 403 | ```swift 404 | private func addIdentifier( 405 | _ identifier: String, 406 | fromPhotoURL photoURL: URL, 407 | to destinationURL: URL 408 | ) throws -> URL { 409 | // 1 410 | guard let imageSource = CGImageSourceCreateWithURL(photoURL as CFURL, nil), 411 | // 2 412 | let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil), 413 | // 3 414 | var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable : Any] else { 415 | throw LivePhotosAssembleError.addPhotoIdentifierFailed 416 | } 417 | // 4 418 | let identifierInfo = ["17" : identifier] 419 | imageProperties[kCGImagePropertyMakerAppleDictionary] = identifierInfo 420 | // 5 421 | guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, UTType.jpeg.identifier as CFString, 1, nil) else { 422 | throw LivePhotosAssembleError.createDestinationImageFailed 423 | } 424 | // 6 425 | CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary) 426 | // 7 427 | if CGImageDestinationFinalize(imageDestination) { 428 | return destinationURL 429 | } else { 430 | throw LivePhotosAssembleError.createDestinationImageFailed 431 | } 432 | } 433 | ``` 434 | 435 | 在上述代码中: 436 | 437 | 1. 使用 [`CGImageSourceCreateWithURL(_:_:)`](https://developer.apple.com/documentation/imageio/1465262-cgimagesourcecreatewithurl) 创建从 URL 指定的位置读取的图像源,类型为 [`CGImageSource`](https://developer.apple.com/documentation/imageio/cgimagesource#3702930),使用该类型可以读取大多数图像文件格式的数据,获取 Metadata 、缩略图等。`url` 参数为图片的 URL;`options` 参数为指定附加创建选项的[字典](https://developer.apple.com/documentation/imageio/cgimagesource#3702930),如指示是否缓存解码图像、指定是否创建缩略图等。 438 | 2. 使用 [`CGImageSourceCreateImageAtIndex(_:_:_:)`](https://developer.apple.com/documentation/imageio/1465011-cgimagesourcecreateimageatindex) 在图像源中指定索引处的数据创建图像对象,类型为 [`CGImage`](https://developer.apple.com/documentation/coregraphics/cgimage)。`isrc` 参数为包含图像数据的图像源;`index` 为所需图像的从零开始的索引;`options` 为指定附加创建选项的字典。 439 | 440 | > 如果我们愿意,也可以通过 `Data` 的方式获取 `imageRef`: 441 | > 442 | > ```swift 443 | > let data = try? Data(contentsOf: imageURL) 444 | > let imageRef = UIImage(data: data)?.cgImage 445 | > ``` 446 | 447 | 3. 使用 [`CGImageSourceCopyPropertiesAtIndex(_:_:_:)`](https://developer.apple.com/documentation/imageio/1465363-cgimagesourcecopypropertiesatind) 返回图像源中指定位置的图像属性,类型为 [`CFDictionary`](https://developer.apple.com/documentation/corefoundation/cfdictionary)。参数同样为 `isrc`、`index`、`options`。 448 | 449 | 4. 将特殊的 Metadata 写入 `imageProperties` 的 `kCGImagePropertyMakerAppleDictionary` 中。 450 | 5. 使用 [`CGImageDestinationCreateWithURL(_:_:_:_:) `](https://developer.apple.com/documentation/imageio/1465361-cgimagedestinationcreatewithurl) 将图像数据写入指定的 URL,返回值类型为 [`CGImageDestination`](https://developer.apple.com/documentation/imageio/cgimagedestination),提供了一个用于保存图像数据的抽象接口,例如我们可以创建还包含缩略图的图像、可以使用 `CGImageDestination` 向图像添加 Metadata。`url` 是写入图像数据的 URL,此对象会覆盖指定 URL 中的任何数据;`type` 为生成的图像文件的[统一类型标识符](https://developer.apple.com/documentation/uniformtypeidentifiers),映射到 MIME 和文件类型的通用类型;`count` 是要包含在图像文件中的图像数量;`options` 是预留参数,暂时还没有用,指定为 `nil` 即可。 451 | 6. 使用 [`CGImageDestinationAddImage(_:_:_:)`](https://developer.apple.com/documentation/imageio/1464962-cgimagedestinationaddimage) 将图像添加到 `CGImageDestination`,参数 `idst` 是要修改的 `CGImageDestination`、`image` 是要添加的图像、`properties` 是一个可选的字典,指定添加图像的[属性](https://developer.apple.com/documentation/imageio/image_properties/individual_image_properties)。 452 | 7. [`CGImageDestinationFinalize(_:)`](https://developer.apple.com/documentation/imageio/1464968-cgimagedestinationfinalize) 是作为保存图像的最后一步,返回保存结果的 `Bool`,在调用此方法之前的输出无效。调用此函数后,我们无法再向 `CGImageDestination` 添加任何数据。 453 | 454 | 455 | 456 | ### 将 Metadata 添加到视频 457 | 458 | #### 相关类和接口介绍 459 | 460 | 将这些数据添加到视频中会复杂一些。 我们需要使用 AVFoundation 的 [`AVAssetReader`](https://developer.apple.com/documentation/avfoundation/avassetreader) 、[`AVAssetWriter`](https://developer.apple.com/documentation/avfoundation/avassetwriter) 重写视频。 我们先来简单看下它们的概念和我们将使用到的函数: 461 | 462 | **AVAssetReader** 463 | 464 | 1. `AVAssetReader` 与一个 `AVAsset` 关联,是一个视频对象。需要为 `AVAssetReader` 添加 `AVAssetReaderOutput` 来读取数据,`AVAssetReaderOutput` 同样需要 `AVAssetReader` 才能完成功能。一个 `AVAssetReader` 可以关联多个 `AVAssetReaderOutput`。 465 | 2. `AVAssetReaderTrackOutput` 是 `AVAssetReaderOutput` 的子类,是从 `AVAssetTrack` 读取媒体数据的对象。可以通过 `AVAsset ` 指定 `AVMediaType` 的 Track 创建一个`AVAssetReaderTrackOutput` 作为 Track 数据读取器。 466 | 467 | 3. `assetReader.startReading()` 表示 `AVAssetReaderTrackOutput` 可以开始读取数据了。 它可以是音频数据、视频数据或其他数据。 468 | 4. `assetReaderOutput.copyNextSampleBuffer()` 表示读取下一条数据。 469 | 5. `assetReader.cancelReading()` 表示停止读取数据。 470 | 471 | **AVAssetWriter** 472 | 473 | 1. `AVAssetWriter` 是写管理器, `AVAssetWriterInput` 是数据写入器。 一个 `AVAssetWriter` 可以有多个 `AVAssetWriterInput`。 474 | 2. `assetWriter.startWriting()` 表示 `AVAssetWriterInput` 可以开始写入。 475 | 3. `assetWriter.startSession(atSourceTime: .zero)` 表示数据从零秒开始写入。 476 | 4. `assetWriterInput.isReadyForMoreMediaData`,一个布尔值,表示输入准备好接受更多媒体数据。 477 | 5. 如果有多个 `AVAssetWriterInput`,当其中一个 `AVAssetWriterInput` 填满缓冲区时,数据不会被处理,而是等待其他数据被 `AVAssetWriterInput` 写入相应的时长,然后才会处理数据。 478 | 479 | 480 | 481 | #### 整体步骤 482 | 483 | 我们合成 Live Photo 的整体步骤如下: 484 | 485 | 1. 初始化 `AVAssetReader` ,创建对应的 `AVAssetReaderTrackOutput`,包括 `videoReaderOutput`、`audioReaderOutput`。 486 | 2. 初始化`AVAssetWriter`,创建及对应的 `AVAssetWriterInput`,包括 `videoWriterInput`、`audioWriterInput`。 487 | 3. 使用 `AVAssetWriter` 写入构造好的 Identifier Metadata(只能在入开始前设置)。 488 | 4. `AVAssetWriter` 添加来自 `AVAssetWriterInputMetadataAdaptor` 的 `assetWriterInput`。 489 | 5. `AVAssetWriter` 进入写状态。 490 | 6. 使用 `AVAssetReader` 和 `AVAssetWriterInputMetadataAdaptor` 写入 Timed Metadata Track 的 Metadata。 491 | 7. `videoReaderOutput` 、 `audioReaderOutput` 、`videoWriterInput`、`audioWriterInput`进入读写写状态。 492 | 8. 一旦 `AVAssetReaderOuput `读取 Track 数据,使用 `AVAssetWriterInput` 写入 Track 数据。 493 | 9. 读取完所有数据后,让 `AVAssetReader` 停止读取。 使所有 `AVAssetWriterInput` 标记完成。 494 | 10. 等待 `AVAssetWriter` 变为完成状态,视频创建完成。 495 | 496 | 497 | 498 | #### 代码实现 499 | 500 | 下面我们来看具体代码实现,首先是核心「具有特殊 Metadata 的 MOV 视频文件」的 Mata 部分。 501 | 502 | 创建一个具有 Identifier 的 Metadata 的 `AVMetadataItem`,这里的 Identifier 与照片的 Identifier 相同: 503 | 504 | ```swift 505 | private func metadataItem(for identifier: String) -> AVMetadataItem { 506 | let item = AVMutableMetadataItem() 507 | item.keySpace = AVMetadataKeySpace.quickTimeMetadata // "mdta" 508 | item.dataType = "com.apple.metadata.datatype.UTF-8" 509 | item.key = AVMetadataKey.quickTimeMetadataKeyContentIdentifier as any NSCopying & NSObjectProtocol // "com.apple.quicktime.content.identifier" 510 | item.value = identifier as any NSCopying & NSObjectProtocol 511 | return item 512 | } 513 | ``` 514 | 515 | 创建静止图像的 Timed Metadata Track: 516 | 517 | ```swift 518 | private func stillImageTimeMetadataAdaptor() -> AVAssetWriterInputMetadataAdaptor { 519 | let quickTimeMetadataKeySpace = AVMetadataKeySpace.quickTimeMetadata.rawValue // "mdta" 520 | let stillImageTimeKey = "com.apple.quicktime.still-image-time" 521 | let spec: [NSString : Any] = [ 522 | kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString : "\(quickTimeMetadataKeySpace)/\(stillImageTimeKey)", 523 | kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString : kCMMetadataBaseDataType_SInt8] 524 | var desc : CMFormatDescription? = nil 525 | CMMetadataFormatDescriptionCreateWithMetadataSpecifications( 526 | allocator: kCFAllocatorDefault, 527 | metadataType: kCMMetadataFormatType_Boxed, 528 | metadataSpecifications: [spec] as CFArray, 529 | formatDescriptionOut: &desc) 530 | let input = AVAssetWriterInput( 531 | mediaType: .metadata, 532 | outputSettings: nil, 533 | sourceFormatHint: desc) 534 | return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input) 535 | } 536 | ``` 537 | 538 | 创建静止图像的 Timed Metadata Track 的 Metadata: 539 | 540 | ```swift 541 | private func stillImageTimeMetadataItem() -> AVMetadataItem { 542 | let item = AVMutableMetadataItem() 543 | item.key = "com.apple.quicktime.still-image-time" as any NSCopying & NSObjectProtocol 544 | item.keySpace = AVMetadataKeySpace.quickTimeMetadata // "mdta" 545 | item.value = 0 as any NSCopying & NSObjectProtocol 546 | item.dataType = kCMMetadataBaseDataType_SInt8 as String // "com.apple.metadata.datatype.int8" 547 | return item 548 | } 549 | ``` 550 | 551 | 接着,我们具体来看添加 Identifier 逻辑。首先,我们初始化 `AVAssetReader` ,创建对应的 `AVAssetReaderTrackOutput`,包括 `videoReaderOutput`、`audioReaderOutput`。初始化`AVAssetWriter`,创建及对应的 `AVAssetWriterInput`,包括 `videoWriterInput`、`audioWriterInput`。对应整体步骤的 1、2: 552 | 553 | ```swift 554 | private func addIdentifier( 555 | _ identifier: String, 556 | fromVideoURL videoURL: URL, 557 | to destinationURL: URL, 558 | progress: ((Float) -> Void)? = nil 559 | ) async throws -> URL { 560 | let asset = AVURLAsset(url: videoURL) 561 | 562 | // --- Reader --- 563 | 564 | // Create the video reader 565 | let videoReader = try AVAssetReader(asset: asset) 566 | 567 | // Create the video reader output 568 | guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else { 569 | throw LivePhotosAssembleError.loadTracksFailed 570 | } 571 | let videoReaderOutputSettings : [String : Any] = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA] 572 | let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderOutputSettings) 573 | 574 | // Add the video reader output to video reader 575 | videoReader.add(videoReaderOutput) 576 | 577 | // Create the audio reader 578 | let audioReader = try AVAssetReader(asset: asset) 579 | 580 | // Create the audio reader output 581 | guard let audioTrack = try await asset.loadTracks(withMediaType: .audio).first else { 582 | throw LivePhotosAssembleError.loadTracksFailed 583 | } 584 | let audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) 585 | 586 | // Add the audio reader output to audioReader 587 | audioReader.add(audioReaderOutput) 588 | 589 | // --- Writer --- 590 | 591 | // Create the asset writer 592 | let assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov) 593 | 594 | // Create the video writer input 595 | let videoWriterInputOutputSettings : [String : Any] = [ 596 | AVVideoCodecKey : AVVideoCodecType.h264, 597 | AVVideoWidthKey : try await videoTrack.load(.naturalSize).width, 598 | AVVideoHeightKey : try await videoTrack.load(.naturalSize).height] 599 | let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoWriterInputOutputSettings) 600 | videoWriterInput.transform = try await videoTrack.load(.preferredTransform) 601 | videoWriterInput.expectsMediaDataInRealTime = true 602 | 603 | // Add the video writer input to asset writer 604 | assetWriter.add(videoWriterInput) 605 | 606 | // Create the audio writer input 607 | let audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil) 608 | audioWriterInput.expectsMediaDataInRealTime = false 609 | 610 | // Add the audio writer input to asset writer 611 | assetWriter.add(audioWriterInput) 612 | 613 | // ... 614 | } 615 | ``` 616 | 617 | 接着,我们使用 `AVAssetWriter` 写入构造好的 Identifier Metadata(只能在入开始前设置)。`AVAssetWriter` 添加来自 `AVAssetWriterInputMetadataAdaptor` 的 `assetWriterInput`。`AVAssetWriter` 进入写状态。对应整体步骤的 3、4、5: 618 | 619 | ```swift 620 | private func addIdentifier( 621 | _ identifier: String, 622 | fromVideoURL videoURL: URL, 623 | to destinationURL: URL, 624 | progress: ((Float) -> Void)? = nil 625 | ) async throws -> URL? { 626 | // ... 627 | 628 | // Create the identifier metadata 629 | let identifierMetadata = metadataItem(for: identifier) 630 | // Create still image time metadata track 631 | let stillImageTimeMetadataAdaptor = stillImageTimeMetadataAdaptor() 632 | assetWriter.metadata = [identifierMetadata] 633 | assetWriter.add(stillImageTimeMetadataAdaptor.assetWriterInput) 634 | 635 | // Start the asset writer 636 | assetWriter.startWriting() 637 | assetWriter.startSession(atSourceTime: .zero) 638 | 639 | // ... 640 | } 641 | ``` 642 | 643 | 接着,我们使用 `AVAssetReader` 和 `AVAssetWriterInputMetadataAdaptor` 写入 Timed Metadata Track 的 Metadata。对应整体步骤的 6: 644 | 645 | ```swift 646 | private func addIdentifier( 647 | _ identifier: String, 648 | fromVideoURL videoURL: URL, 649 | to destinationURL: URL, 650 | progress: ((Float) -> Void)? = nil 651 | ) async throws -> URL { 652 | // ... 653 | 654 | let frameCount = try await asset.frameCount() 655 | let stillImagePercent: Float = 0.5 656 | await stillImageTimeMetadataAdaptor.append( 657 | AVTimedMetadataGroup( 658 | items: [stillImageTimeMetadataItem()], 659 | timeRange: try asset.makeStillImageTimeRange(percent: stillImagePercent, inFrameCount: frameCount))) 660 | 661 | // ... 662 | } 663 | ``` 664 | 665 | > 其中,涉及获取 `AVAsset` 帧数、静止图像 `CMTimeRange` 的方法: 666 | > 667 | > ```swift 668 | > extension AVAsset { 669 | > func frameCount(exact: Bool = false) async throws -> Int { 670 | > let videoReader = try AVAssetReader(asset: self) 671 | > guard let videoTrack = try await self.loadTracks(withMediaType: .video).first else { return 0 } 672 | > if !exact { 673 | > async let duration = CMTimeGetSeconds(self.load(.duration)) 674 | > async let nominalFrameRate = Float64(videoTrack.load(.nominalFrameRate)) 675 | > return try await Int(duration * nominalFrameRate) 676 | > } 677 | > let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil) 678 | > videoReader.add(videoReaderOutput) 679 | > videoReader.startReading() 680 | > var frameCount = 0 681 | > while let _ = videoReaderOutput.copyNextSampleBuffer() { 682 | > frameCount += 1 683 | > } 684 | > videoReader.cancelReading() 685 | > return frameCount 686 | > } 687 | > 688 | > func makeStillImageTimeRange(percent: Float, inFrameCount: Int = 0) async throws -> CMTimeRange { 689 | > var time = try await self.load(.duration) 690 | > var frameCount = inFrameCount 691 | > if frameCount == 0 { 692 | > frameCount = try await self.frameCount(exact: true) 693 | > } 694 | > let duration = Int64(Float(time.value) / Float(frameCount)) 695 | > time.value = Int64(Float(time.value) * percent) 696 | > return CMTimeRangeMake(start: time, duration: CMTimeMake(value: duration, timescale: time.timescale)) 697 | > } 698 | > } 699 | > ``` 700 | 701 | 接着`videoReaderOutput` 、 `audioReaderOutput` 、`videoWriterInput`、`audioWriterInput`进入读写写状态。一旦 `AVAssetReaderOuput `读取 Track 数据,使用 `AVAssetWriterInput` 写入 Track 数据。对应整体步骤的 7、8、9: 702 | 703 | ```swift 704 | private func addIdentifier( 705 | _ identifier: String, 706 | fromVideoURL videoURL: URL, 707 | to destinationURL: URL, 708 | progress: ((Float) -> Void)? = nil 709 | ) async throws -> URL { 710 | // ... 711 | 712 | async let writingVideoFinished: Bool = withCheckedThrowingContinuation { continuation in 713 | Task { 714 | videoReader.startReading() 715 | var currentFrameCount = 0 716 | videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) { 717 | while videoWriterInput.isReadyForMoreMediaData { 718 | if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() { 719 | currentFrameCount += 1 720 | if let progress { 721 | let progressValue = min(Float(currentFrameCount)/Float(frameCount), 1.0) 722 | Task { @MainActor in 723 | progress(progressValue) 724 | } 725 | } 726 | if !videoWriterInput.append(sampleBuffer) { 727 | videoReader.cancelReading() 728 | continuation.resume(throwing: LivePhotosAssembleError.writingVideoFailed) 729 | return 730 | } 731 | } else { 732 | videoWriterInput.markAsFinished() 733 | continuation.resume(returning: true) 734 | return 735 | } 736 | } 737 | } 738 | } 739 | } 740 | 741 | async let writingAudioFinished: Bool = withCheckedThrowingContinuation { continuation in 742 | Task { 743 | audioReader.startReading() 744 | audioWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) { 745 | while audioWriterInput.isReadyForMoreMediaData { 746 | if let sampleBuffer = audioReaderOutput.copyNextSampleBuffer() { 747 | if !audioWriterInput.append(sampleBuffer) { 748 | audioReader.cancelReading() 749 | continuation.resume(throwing: LivePhotosAssembleError.writingAudioFailed) 750 | return 751 | } 752 | } else { 753 | audioWriterInput.markAsFinished() 754 | continuation.resume(returning: true) 755 | return 756 | } 757 | } 758 | } 759 | } 760 | } 761 | 762 | await (_, _) = try (writingVideoFinished, writingAudioFinished) 763 | 764 | // ... 765 | } 766 | ``` 767 | 768 | 最后,等待 `AVAssetWriter` 变为完成状态,视频创建完成。对应整体步骤的 10: 769 | 770 | ```swift 771 | private func addIdentifier( 772 | _ identifier: String, 773 | fromVideoURL videoURL: URL, 774 | to destinationURL: URL, 775 | progress: ((Float) -> Void)? = nil 776 | ) async throws -> URL? { 777 | // ... 778 | 779 | await assetWriter.finishWriting() 780 | return destinationURL 781 | 782 | // ... 783 | } 784 | ``` 785 | 786 | 带有具有特殊 Metadata 的 MOV 视频文件创建完成,可回到「使用照片和视频创建 Live Photo」查看图片、视频的合成。 787 | 788 | 789 | 790 | ### 将 Live Photo 保存到本地 791 | 792 | 我们可以调整下合成 Live Photo 的异步函数,将 `pairedPhotoURL`、`pairedVideoURL` 作为返回值一并返回: 793 | 794 | ```swift 795 | func assemble(photoURL: URL, videoURL: URL, progress: ((Float) -> Void)? = nil) async throws -> (PHLivePhoto, (URL, URL)) { 796 | let pairedPhotoURL = // ... 797 | let pairedVideoURL = // ... 798 | let livePhoto = // ... 799 | return (livePhoto, (pairedPhotoURL, pairedVideoURL)) 800 | } 801 | ``` 802 | 803 | 我们将图片、视频分别保存即可: 804 | 805 | ```swift 806 | func saveButtonDidSelect(_ sender: UIButton) { 807 | guard let (photoURL, videURL) = asembleURLs.value, 808 | let photoURL, let videURL else { 809 | return 810 | } 811 | PHPhotoLibrary.shared().performChanges({ 812 | let creationRequest = PHAssetCreationRequest.forAsset() 813 | let options = PHAssetResourceCreationOptions() 814 | creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: photoURL, options: options) 815 | creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: videURL, options: options) 816 | }, completionHandler: { success, _ in 817 | Toast.show(success ? "Saved successfully" : "An error occurred") 818 | }) 819 | } 820 | ``` 821 | 822 | 823 | 824 | 825 | 826 | ## 参考资料 827 | 828 | [1] [Apple Introduces iPhone 6s & iPhone 6s Plus](https://www.apple.com/newsroom/2015/09/09Apple-Introduces-iPhone-6s-iPhone-6s-Plus/) 829 | 830 | [2] [Take and edit Live Photos](https://support.apple.com/en-us/HT207310) 831 | 832 | [3] [What is the “Maker Apple” Metadata in iPhone Photos?](https://photoinvestigator.co/blog/the-mystery-of-maker-apple-metadata/) 833 | 834 | [4] [Displaying Live Photos](https://developer.apple.com/documentation/photokit/displaying_live_photos) 835 | 836 | [5] [How to make Live Photo and save it in photo library in iOS.](https://prafullkumar77.medium.com/how-to-make-live-photo-and-save-it-in-photo-library-in-ios-5255cdc2f15d) 837 | 838 | [6] [LimitPoint LivePhoto](https://github.com/LimitPoint/LivePhoto) --------------------------------------------------------------------------------