├── .gitignore ├── LICENSE ├── MergeVideos.xcodeproj ├── project.pbxproj └── xcshareddata │ └── xcschemes │ └── MergeVideos.xcscheme ├── MergeVideos ├── AppDelegate.swift ├── Assets.xcassets │ └── AppIcon.appiconset │ │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Info.plist ├── Sample video │ ├── black.mov │ ├── landscape.MOV │ ├── movie1.mov │ ├── movie2.mov │ ├── portrait.MOV │ ├── sample.mp3 │ └── silence.mp3 ├── VideoManager │ ├── KVExtensions.swift │ └── KVVideoManager.swift └── ViewController.swift ├── Podfile └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Mac OS X Finder 2 | .DS_Store 3 | 4 | # Ignore pod folder 5 | .Podfile.swp 6 | Podfile.lock 7 | Pods/ 8 | 9 | # Xcode 10 | xcuserdata/ 11 | DerivedData/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Khoa Vo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MergeVideos.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 926A9B69B2331F99C8C9E1C6 /* Pods_MergeVideos.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 226F5B5B88298E15EBD89684 /* Pods_MergeVideos.framework */; }; 11 | 9F21A12124B2E97F0099BEE2 /* landscape.MOV in Resources */ = {isa = PBXBuildFile; fileRef = 9F21A11F24B2E97F0099BEE2 /* landscape.MOV */; }; 12 | 9F21A12224B2E97F0099BEE2 /* portrait.MOV in Resources */ = {isa = PBXBuildFile; fileRef = 9F21A12024B2E97F0099BEE2 /* portrait.MOV */; }; 13 | 9F21A12424B2EC0C0099BEE2 /* sample.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 9F21A12324B2EC0C0099BEE2 /* sample.mp3 */; }; 14 | 9F52A872271F216300174122 /* KVExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9F52A871271F216300174122 /* KVExtensions.swift */; }; 15 | 9FB5855D1FEA2F2C0082F0CE /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9FB5855C1FEA2F2C0082F0CE /* AppDelegate.swift */; }; 16 | 9FB5855F1FEA2F2C0082F0CE /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9FB5855E1FEA2F2C0082F0CE /* ViewController.swift */; }; 17 | 9FB585621FEA2F2C0082F0CE /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 9FB585601FEA2F2C0082F0CE /* Main.storyboard */; }; 18 | 9FB585641FEA2F2C0082F0CE /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9FB585631FEA2F2C0082F0CE /* Assets.xcassets */; }; 19 | 9FB585671FEA2F2C0082F0CE /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 9FB585651FEA2F2C0082F0CE /* LaunchScreen.storyboard */; }; 20 | 9FB5856F1FEA2F810082F0CE /* KVVideoManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9FB5856E1FEA2F800082F0CE /* KVVideoManager.swift */; }; 21 | 9FB585741FEA32AB0082F0CE /* movie1.mov in Resources */ = {isa = PBXBuildFile; fileRef = 9FB585721FEA32AB0082F0CE /* movie1.mov */; }; 22 | 9FB585751FEA32AB0082F0CE /* movie2.mov in Resources */ = {isa = PBXBuildFile; fileRef = 9FB585731FEA32AB0082F0CE /* movie2.mov */; }; 23 | 9FCC06E92022C4280078942F /* silence.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 9FCC06E82022C4270078942F /* silence.mp3 */; }; 24 | 9FE48B4D1FEB881900DBBA88 /* black.mov in Resources */ = {isa = PBXBuildFile; fileRef = 9FE48B4C1FEB881900DBBA88 /* black.mov */; }; 25 | /* End PBXBuildFile section */ 26 | 27 | /* Begin PBXFileReference section */ 28 | 120D421B11D234A90309D709 /* Pods-MergeVideos.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-MergeVideos.release.xcconfig"; path = "Pods/Target Support Files/Pods-MergeVideos/Pods-MergeVideos.release.xcconfig"; sourceTree = ""; }; 29 | 226F5B5B88298E15EBD89684 /* Pods_MergeVideos.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_MergeVideos.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 30 | 461AE80AAB042A2C5866C10C /* Pods-MergeVideos.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-MergeVideos.debug.xcconfig"; path = "Pods/Target Support Files/Pods-MergeVideos/Pods-MergeVideos.debug.xcconfig"; sourceTree = ""; }; 31 | 9F21A11F24B2E97F0099BEE2 /* landscape.MOV */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = landscape.MOV; sourceTree = ""; }; 32 | 9F21A12024B2E97F0099BEE2 /* portrait.MOV */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = portrait.MOV; sourceTree = ""; }; 33 | 9F21A12324B2EC0C0099BEE2 /* sample.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = sample.mp3; sourceTree = ""; }; 34 | 9F52A871271F216300174122 /* KVExtensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = KVExtensions.swift; sourceTree = ""; }; 35 | 9FB585591FEA2F2C0082F0CE /* MergeVideos.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = MergeVideos.app; sourceTree = BUILT_PRODUCTS_DIR; }; 36 | 9FB5855C1FEA2F2C0082F0CE /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 37 | 9FB5855E1FEA2F2C0082F0CE /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 38 | 9FB585611FEA2F2C0082F0CE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 39 | 9FB585631FEA2F2C0082F0CE /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 40 | 9FB585661FEA2F2C0082F0CE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 41 | 9FB585681FEA2F2C0082F0CE /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 42 | 9FB5856E1FEA2F800082F0CE /* KVVideoManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = KVVideoManager.swift; sourceTree = ""; }; 43 | 9FB585721FEA32AB0082F0CE /* movie1.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = movie1.mov; sourceTree = ""; }; 44 | 9FB585731FEA32AB0082F0CE /* movie2.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = movie2.mov; sourceTree = ""; }; 45 | 9FCC06E82022C4270078942F /* silence.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = silence.mp3; sourceTree = ""; }; 46 | 9FE48B4C1FEB881900DBBA88 /* black.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = black.mov; sourceTree = ""; }; 47 | /* End PBXFileReference section */ 48 | 49 | /* Begin PBXFrameworksBuildPhase section */ 50 | 9FB585561FEA2F2C0082F0CE /* Frameworks */ = { 51 | isa = PBXFrameworksBuildPhase; 52 | buildActionMask = 2147483647; 53 | files = ( 54 | 926A9B69B2331F99C8C9E1C6 /* Pods_MergeVideos.framework in Frameworks */, 55 | ); 56 | runOnlyForDeploymentPostprocessing = 0; 57 | }; 58 | /* End PBXFrameworksBuildPhase section */ 59 | 60 | /* Begin PBXGroup section */ 61 | 09E07E6CEECD83F3EC350656 /* Frameworks */ = { 62 | isa = PBXGroup; 63 | children = ( 64 | 226F5B5B88298E15EBD89684 /* Pods_MergeVideos.framework */, 65 | ); 66 | name = Frameworks; 67 | sourceTree = ""; 68 | }; 69 | 9FB585501FEA2F2B0082F0CE = { 70 | isa = PBXGroup; 71 | children = ( 72 | 9FB5855B1FEA2F2C0082F0CE /* MergeVideos */, 73 | 9FB5855A1FEA2F2C0082F0CE /* Products */, 74 | E3270B90AA6EBEA9092035D1 /* Pods */, 75 | 09E07E6CEECD83F3EC350656 /* Frameworks */, 76 | ); 77 | sourceTree = ""; 78 | }; 79 | 9FB5855A1FEA2F2C0082F0CE /* Products */ = { 80 | isa = PBXGroup; 81 | children = ( 82 | 9FB585591FEA2F2C0082F0CE /* MergeVideos.app */, 83 | ); 84 | name = Products; 85 | sourceTree = ""; 86 | }; 87 | 9FB5855B1FEA2F2C0082F0CE /* MergeVideos */ = { 88 | isa = PBXGroup; 89 | children = ( 90 | 9FB585761FEA32CC0082F0CE /* VideoManager */, 91 | 9FC599CC1FEBE66600CA354C /* Sample video */, 92 | 9FB5855C1FEA2F2C0082F0CE /* AppDelegate.swift */, 93 | 9FB5855E1FEA2F2C0082F0CE /* ViewController.swift */, 94 | 9FB585601FEA2F2C0082F0CE /* Main.storyboard */, 95 | 9FB585631FEA2F2C0082F0CE /* Assets.xcassets */, 96 | 9FB585651FEA2F2C0082F0CE /* LaunchScreen.storyboard */, 97 | 9FB585681FEA2F2C0082F0CE /* Info.plist */, 98 | ); 99 | path = MergeVideos; 100 | sourceTree = ""; 101 | }; 102 | 9FB585761FEA32CC0082F0CE /* VideoManager */ = { 103 | isa = PBXGroup; 104 | children = ( 105 | 9FB5856E1FEA2F800082F0CE /* KVVideoManager.swift */, 106 | 9F52A871271F216300174122 /* KVExtensions.swift */, 107 | ); 108 | path = VideoManager; 109 | sourceTree = ""; 110 | }; 111 | 9FC599CC1FEBE66600CA354C /* Sample video */ = { 112 | isa = PBXGroup; 113 | children = ( 114 | 9F21A12324B2EC0C0099BEE2 /* sample.mp3 */, 115 | 9FCC06E82022C4270078942F /* silence.mp3 */, 116 | 9FB585721FEA32AB0082F0CE /* movie1.mov */, 117 | 9FB585731FEA32AB0082F0CE /* movie2.mov */, 118 | 9FE48B4C1FEB881900DBBA88 /* black.mov */, 119 | 9F21A11F24B2E97F0099BEE2 /* landscape.MOV */, 120 | 9F21A12024B2E97F0099BEE2 /* portrait.MOV */, 121 | ); 122 | path = "Sample video"; 123 | sourceTree = ""; 124 | }; 125 | E3270B90AA6EBEA9092035D1 /* Pods */ = { 126 | isa = PBXGroup; 127 | children = ( 128 | 461AE80AAB042A2C5866C10C /* Pods-MergeVideos.debug.xcconfig */, 129 | 120D421B11D234A90309D709 /* Pods-MergeVideos.release.xcconfig */, 130 | ); 131 | name = Pods; 132 | sourceTree = ""; 133 | }; 134 | /* End PBXGroup section */ 135 | 136 | /* Begin PBXNativeTarget section */ 137 | 9FB585581FEA2F2C0082F0CE /* MergeVideos */ = { 138 | isa = PBXNativeTarget; 139 | buildConfigurationList = 9FB5856B1FEA2F2C0082F0CE /* Build configuration list for PBXNativeTarget "MergeVideos" */; 140 | buildPhases = ( 141 | B6CD67D0F3C88CA3F855ECB8 /* [CP] Check Pods Manifest.lock */, 142 | 9FB585551FEA2F2C0082F0CE /* Sources */, 143 | 9FB585561FEA2F2C0082F0CE /* Frameworks */, 144 | 9FB585571FEA2F2C0082F0CE /* Resources */, 145 | 3B6815DE4EC4279664DA34E5 /* [CP] Embed Pods Frameworks */, 146 | ); 147 | buildRules = ( 148 | ); 149 | dependencies = ( 150 | ); 151 | name = MergeVideos; 152 | productName = MergeVideos; 153 | productReference = 9FB585591FEA2F2C0082F0CE /* MergeVideos.app */; 154 | productType = "com.apple.product-type.application"; 155 | }; 156 | /* End PBXNativeTarget section */ 157 | 158 | /* Begin PBXProject section */ 159 | 9FB585511FEA2F2B0082F0CE /* Project object */ = { 160 | isa = PBXProject; 161 | attributes = { 162 | CLASSPREFIX = KV; 163 | LastSwiftUpdateCheck = 0920; 164 | LastUpgradeCheck = 0920; 165 | ORGANIZATIONNAME = "Khoa Vo"; 166 | TargetAttributes = { 167 | 9FB585581FEA2F2C0082F0CE = { 168 | CreatedOnToolsVersion = 9.2; 169 | ProvisioningStyle = Manual; 170 | }; 171 | }; 172 | }; 173 | buildConfigurationList = 9FB585541FEA2F2B0082F0CE /* Build configuration list for PBXProject "MergeVideos" */; 174 | compatibilityVersion = "Xcode 8.0"; 175 | developmentRegion = en; 176 | hasScannedForEncodings = 0; 177 | knownRegions = ( 178 | en, 179 | Base, 180 | ); 181 | mainGroup = 9FB585501FEA2F2B0082F0CE; 182 | productRefGroup = 9FB5855A1FEA2F2C0082F0CE /* Products */; 183 | projectDirPath = ""; 184 | projectRoot = ""; 185 | targets = ( 186 | 9FB585581FEA2F2C0082F0CE /* MergeVideos */, 187 | ); 188 | }; 189 | /* End PBXProject section */ 190 | 191 | /* Begin PBXResourcesBuildPhase section */ 192 | 9FB585571FEA2F2C0082F0CE /* Resources */ = { 193 | isa = PBXResourcesBuildPhase; 194 | buildActionMask = 2147483647; 195 | files = ( 196 | 9F21A12124B2E97F0099BEE2 /* landscape.MOV in Resources */, 197 | 9FB585671FEA2F2C0082F0CE /* LaunchScreen.storyboard in Resources */, 198 | 9FB585751FEA32AB0082F0CE /* movie2.mov in Resources */, 199 | 9F21A12224B2E97F0099BEE2 /* portrait.MOV in Resources */, 200 | 9F21A12424B2EC0C0099BEE2 /* sample.mp3 in Resources */, 201 | 9FCC06E92022C4280078942F /* silence.mp3 in Resources */, 202 | 9FE48B4D1FEB881900DBBA88 /* black.mov in Resources */, 203 | 9FB585641FEA2F2C0082F0CE /* Assets.xcassets in Resources */, 204 | 9FB585621FEA2F2C0082F0CE /* Main.storyboard in Resources */, 205 | 9FB585741FEA32AB0082F0CE /* movie1.mov in Resources */, 206 | ); 207 | runOnlyForDeploymentPostprocessing = 0; 208 | }; 209 | /* End PBXResourcesBuildPhase section */ 210 | 211 | /* Begin PBXShellScriptBuildPhase section */ 212 | 3B6815DE4EC4279664DA34E5 /* [CP] Embed Pods Frameworks */ = { 213 | isa = PBXShellScriptBuildPhase; 214 | buildActionMask = 2147483647; 215 | files = ( 216 | ); 217 | inputPaths = ( 218 | "${PODS_ROOT}/Target Support Files/Pods-MergeVideos/Pods-MergeVideos-frameworks.sh", 219 | "${BUILT_PRODUCTS_DIR}/CropViewController/CropViewController.framework", 220 | "${BUILT_PRODUCTS_DIR}/DKCamera/DKCamera.framework", 221 | "${BUILT_PRODUCTS_DIR}/DKImagePickerController/DKImagePickerController.framework", 222 | "${BUILT_PRODUCTS_DIR}/DKPhotoGallery/DKPhotoGallery.framework", 223 | "${BUILT_PRODUCTS_DIR}/SDWebImage/SDWebImage.framework", 224 | "${BUILT_PRODUCTS_DIR}/SwiftyGif/SwiftyGif.framework", 225 | ); 226 | name = "[CP] Embed Pods Frameworks"; 227 | outputPaths = ( 228 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/CropViewController.framework", 229 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DKCamera.framework", 230 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DKImagePickerController.framework", 231 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DKPhotoGallery.framework", 232 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SDWebImage.framework", 233 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SwiftyGif.framework", 234 | ); 235 | runOnlyForDeploymentPostprocessing = 0; 236 | shellPath = /bin/sh; 237 | shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-MergeVideos/Pods-MergeVideos-frameworks.sh\"\n"; 238 | showEnvVarsInLog = 0; 239 | }; 240 | B6CD67D0F3C88CA3F855ECB8 /* [CP] Check Pods Manifest.lock */ = { 241 | isa = PBXShellScriptBuildPhase; 242 | buildActionMask = 2147483647; 243 | files = ( 244 | ); 245 | inputPaths = ( 246 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock", 247 | "${PODS_ROOT}/Manifest.lock", 248 | ); 249 | name = "[CP] Check Pods Manifest.lock"; 250 | outputPaths = ( 251 | "$(DERIVED_FILE_DIR)/Pods-MergeVideos-checkManifestLockResult.txt", 252 | ); 253 | runOnlyForDeploymentPostprocessing = 0; 254 | shellPath = /bin/sh; 255 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; 256 | showEnvVarsInLog = 0; 257 | }; 258 | /* End PBXShellScriptBuildPhase section */ 259 | 260 | /* Begin PBXSourcesBuildPhase section */ 261 | 9FB585551FEA2F2C0082F0CE /* Sources */ = { 262 | isa = PBXSourcesBuildPhase; 263 | buildActionMask = 2147483647; 264 | files = ( 265 | 9FB5855F1FEA2F2C0082F0CE /* ViewController.swift in Sources */, 266 | 9FB5856F1FEA2F810082F0CE /* KVVideoManager.swift in Sources */, 267 | 9F52A872271F216300174122 /* KVExtensions.swift in Sources */, 268 | 9FB5855D1FEA2F2C0082F0CE /* AppDelegate.swift in Sources */, 269 | ); 270 | runOnlyForDeploymentPostprocessing = 0; 271 | }; 272 | /* End PBXSourcesBuildPhase section */ 273 | 274 | /* Begin PBXVariantGroup section */ 275 | 9FB585601FEA2F2C0082F0CE /* Main.storyboard */ = { 276 | isa = PBXVariantGroup; 277 | children = ( 278 | 9FB585611FEA2F2C0082F0CE /* Base */, 279 | ); 280 | name = Main.storyboard; 281 | sourceTree = ""; 282 | }; 283 | 9FB585651FEA2F2C0082F0CE /* LaunchScreen.storyboard */ = { 284 | isa = PBXVariantGroup; 285 | children = ( 286 | 9FB585661FEA2F2C0082F0CE /* Base */, 287 | ); 288 | name = LaunchScreen.storyboard; 289 | sourceTree = ""; 290 | }; 291 | /* End PBXVariantGroup section */ 292 | 293 | /* Begin XCBuildConfiguration section */ 294 | 9FB585691FEA2F2C0082F0CE /* Debug */ = { 295 | isa = XCBuildConfiguration; 296 | buildSettings = { 297 | ALWAYS_SEARCH_USER_PATHS = NO; 298 | CLANG_ANALYZER_NONNULL = YES; 299 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 300 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 301 | CLANG_CXX_LIBRARY = "libc++"; 302 | CLANG_ENABLE_MODULES = YES; 303 | CLANG_ENABLE_OBJC_ARC = YES; 304 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 305 | CLANG_WARN_BOOL_CONVERSION = YES; 306 | CLANG_WARN_COMMA = YES; 307 | CLANG_WARN_CONSTANT_CONVERSION = YES; 308 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 309 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 310 | CLANG_WARN_EMPTY_BODY = YES; 311 | CLANG_WARN_ENUM_CONVERSION = YES; 312 | CLANG_WARN_INFINITE_RECURSION = YES; 313 | CLANG_WARN_INT_CONVERSION = YES; 314 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 315 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 316 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 317 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 318 | CLANG_WARN_STRICT_PROTOTYPES = YES; 319 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 320 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 321 | CLANG_WARN_UNREACHABLE_CODE = YES; 322 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 323 | CODE_SIGN_IDENTITY = "iPhone Developer"; 324 | COPY_PHASE_STRIP = NO; 325 | DEBUG_INFORMATION_FORMAT = dwarf; 326 | ENABLE_STRICT_OBJC_MSGSEND = YES; 327 | ENABLE_TESTABILITY = YES; 328 | GCC_C_LANGUAGE_STANDARD = gnu11; 329 | GCC_DYNAMIC_NO_PIC = NO; 330 | GCC_NO_COMMON_BLOCKS = YES; 331 | GCC_OPTIMIZATION_LEVEL = 0; 332 | GCC_PREPROCESSOR_DEFINITIONS = ( 333 | "DEBUG=1", 334 | "$(inherited)", 335 | ); 336 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 337 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 338 | GCC_WARN_UNDECLARED_SELECTOR = YES; 339 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 340 | GCC_WARN_UNUSED_FUNCTION = YES; 341 | GCC_WARN_UNUSED_VARIABLE = YES; 342 | IPHONEOS_DEPLOYMENT_TARGET = 11.2; 343 | MTL_ENABLE_DEBUG_INFO = YES; 344 | ONLY_ACTIVE_ARCH = YES; 345 | SDKROOT = iphoneos; 346 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 347 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 348 | }; 349 | name = Debug; 350 | }; 351 | 9FB5856A1FEA2F2C0082F0CE /* Release */ = { 352 | isa = XCBuildConfiguration; 353 | buildSettings = { 354 | ALWAYS_SEARCH_USER_PATHS = NO; 355 | CLANG_ANALYZER_NONNULL = YES; 356 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 357 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 358 | CLANG_CXX_LIBRARY = "libc++"; 359 | CLANG_ENABLE_MODULES = YES; 360 | CLANG_ENABLE_OBJC_ARC = YES; 361 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 362 | CLANG_WARN_BOOL_CONVERSION = YES; 363 | CLANG_WARN_COMMA = YES; 364 | CLANG_WARN_CONSTANT_CONVERSION = YES; 365 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 366 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 367 | CLANG_WARN_EMPTY_BODY = YES; 368 | CLANG_WARN_ENUM_CONVERSION = YES; 369 | CLANG_WARN_INFINITE_RECURSION = YES; 370 | CLANG_WARN_INT_CONVERSION = YES; 371 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 372 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 373 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 374 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 375 | CLANG_WARN_STRICT_PROTOTYPES = YES; 376 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 377 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 378 | CLANG_WARN_UNREACHABLE_CODE = YES; 379 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 380 | CODE_SIGN_IDENTITY = "iPhone Developer"; 381 | COPY_PHASE_STRIP = NO; 382 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 383 | ENABLE_NS_ASSERTIONS = NO; 384 | ENABLE_STRICT_OBJC_MSGSEND = YES; 385 | GCC_C_LANGUAGE_STANDARD = gnu11; 386 | GCC_NO_COMMON_BLOCKS = YES; 387 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 388 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 389 | GCC_WARN_UNDECLARED_SELECTOR = YES; 390 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 391 | GCC_WARN_UNUSED_FUNCTION = YES; 392 | GCC_WARN_UNUSED_VARIABLE = YES; 393 | IPHONEOS_DEPLOYMENT_TARGET = 11.2; 394 | MTL_ENABLE_DEBUG_INFO = NO; 395 | SDKROOT = iphoneos; 396 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 397 | VALIDATE_PRODUCT = YES; 398 | }; 399 | name = Release; 400 | }; 401 | 9FB5856C1FEA2F2C0082F0CE /* Debug */ = { 402 | isa = XCBuildConfiguration; 403 | baseConfigurationReference = 461AE80AAB042A2C5866C10C /* Pods-MergeVideos.debug.xcconfig */; 404 | buildSettings = { 405 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 406 | CODE_SIGN_IDENTITY = "Apple Development"; 407 | CODE_SIGN_STYLE = Manual; 408 | DEVELOPMENT_TEAM = ""; 409 | INFOPLIST_FILE = MergeVideos/Info.plist; 410 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 411 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 412 | PRODUCT_BUNDLE_IDENTIFIER = yuno.MergeVideos; 413 | PRODUCT_NAME = "$(TARGET_NAME)"; 414 | PROVISIONING_PROFILE = "9827d264-3cdb-4f7c-ab3e-20ea79d7a9ed"; 415 | PROVISIONING_PROFILE_SPECIFIER = ""; 416 | SWIFT_VERSION = 5.0; 417 | TARGETED_DEVICE_FAMILY = 1; 418 | }; 419 | name = Debug; 420 | }; 421 | 9FB5856D1FEA2F2C0082F0CE /* Release */ = { 422 | isa = XCBuildConfiguration; 423 | baseConfigurationReference = 120D421B11D234A90309D709 /* Pods-MergeVideos.release.xcconfig */; 424 | buildSettings = { 425 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 426 | CODE_SIGN_IDENTITY = "Apple Development"; 427 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Distribution"; 428 | CODE_SIGN_STYLE = Manual; 429 | DEVELOPMENT_TEAM = ""; 430 | INFOPLIST_FILE = MergeVideos/Info.plist; 431 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 432 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 433 | PRODUCT_BUNDLE_IDENTIFIER = yuno.MergeVideos; 434 | PRODUCT_NAME = "$(TARGET_NAME)"; 435 | PROVISIONING_PROFILE = "5f5c5d9d-fe2c-47a4-b4a7-b70cbdb1357e"; 436 | PROVISIONING_PROFILE_SPECIFIER = ""; 437 | SWIFT_VERSION = 5.0; 438 | TARGETED_DEVICE_FAMILY = 1; 439 | }; 440 | name = Release; 441 | }; 442 | /* End XCBuildConfiguration section */ 443 | 444 | /* Begin XCConfigurationList section */ 445 | 9FB585541FEA2F2B0082F0CE /* Build configuration list for PBXProject "MergeVideos" */ = { 446 | isa = XCConfigurationList; 447 | buildConfigurations = ( 448 | 9FB585691FEA2F2C0082F0CE /* Debug */, 449 | 9FB5856A1FEA2F2C0082F0CE /* Release */, 450 | ); 451 | defaultConfigurationIsVisible = 0; 452 | defaultConfigurationName = Release; 453 | }; 454 | 9FB5856B1FEA2F2C0082F0CE /* Build configuration list for PBXNativeTarget "MergeVideos" */ = { 455 | isa = XCConfigurationList; 456 | buildConfigurations = ( 457 | 9FB5856C1FEA2F2C0082F0CE /* Debug */, 458 | 9FB5856D1FEA2F2C0082F0CE /* Release */, 459 | ); 460 | defaultConfigurationIsVisible = 0; 461 | defaultConfigurationName = Release; 462 | }; 463 | /* End XCConfigurationList section */ 464 | }; 465 | rootObject = 9FB585511FEA2F2B0082F0CE /* Project object */; 466 | } 467 | -------------------------------------------------------------------------------- /MergeVideos.xcodeproj/xcshareddata/xcschemes/MergeVideos.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 43 | 45 | 51 | 52 | 53 | 54 | 60 | 62 | 68 | 69 | 70 | 71 | 73 | 74 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /MergeVideos/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // MergeVideos 4 | // 5 | // Created by Khoa Vo on 12/20/17. 6 | // Copyright © 2017 Khoa Vo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @UIApplicationMain 12 | class AppDelegate: UIResponder, UIApplicationDelegate { 13 | 14 | var window: UIWindow? 15 | 16 | 17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 18 | // Override point for customization after application launch. 19 | return true 20 | } 21 | 22 | func applicationWillResignActive(_ application: UIApplication) { 23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 24 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 25 | } 26 | 27 | func applicationDidEnterBackground(_ application: UIApplication) { 28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 30 | } 31 | 32 | func applicationWillEnterForeground(_ application: UIApplication) { 33 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 34 | } 35 | 36 | func applicationDidBecomeActive(_ application: UIApplication) { 37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 38 | } 39 | 40 | func applicationWillTerminate(_ application: UIApplication) { 41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 42 | } 43 | 44 | 45 | } 46 | 47 | -------------------------------------------------------------------------------- /MergeVideos/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | } 88 | ], 89 | "info" : { 90 | "version" : 1, 91 | "author" : "xcode" 92 | } 93 | } -------------------------------------------------------------------------------- /MergeVideos/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /MergeVideos/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 26 | 27 | 28 | 29 | 30 | 37 | 44 | 51 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /MergeVideos/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | NSPhotoLibraryUsageDescription 24 | Access Photo Library 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIMainStoryboardFile 28 | Main 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | 37 | UISupportedInterfaceOrientations~ipad 38 | 39 | UIInterfaceOrientationPortrait 40 | UIInterfaceOrientationPortraitUpsideDown 41 | UIInterfaceOrientationLandscapeLeft 42 | UIInterfaceOrientationLandscapeRight 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /MergeVideos/Sample video/black.mov: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/khoavd-dev/MergeVideos/82d90d4f17d23f64ae0d33be168943204dc666a5/MergeVideos/Sample video/black.mov -------------------------------------------------------------------------------- /MergeVideos/Sample video/landscape.MOV: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/khoavd-dev/MergeVideos/82d90d4f17d23f64ae0d33be168943204dc666a5/MergeVideos/Sample video/landscape.MOV -------------------------------------------------------------------------------- /MergeVideos/Sample video/movie1.mov: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/khoavd-dev/MergeVideos/82d90d4f17d23f64ae0d33be168943204dc666a5/MergeVideos/Sample video/movie1.mov -------------------------------------------------------------------------------- /MergeVideos/Sample video/movie2.mov: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/khoavd-dev/MergeVideos/82d90d4f17d23f64ae0d33be168943204dc666a5/MergeVideos/Sample video/movie2.mov -------------------------------------------------------------------------------- /MergeVideos/Sample video/portrait.MOV: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/khoavd-dev/MergeVideos/82d90d4f17d23f64ae0d33be168943204dc666a5/MergeVideos/Sample video/portrait.MOV -------------------------------------------------------------------------------- /MergeVideos/Sample video/sample.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/khoavd-dev/MergeVideos/82d90d4f17d23f64ae0d33be168943204dc666a5/MergeVideos/Sample video/sample.mp3 -------------------------------------------------------------------------------- /MergeVideos/Sample video/silence.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/khoavd-dev/MergeVideos/82d90d4f17d23f64ae0d33be168943204dc666a5/MergeVideos/Sample video/silence.mp3 -------------------------------------------------------------------------------- /MergeVideos/VideoManager/KVExtensions.swift: -------------------------------------------------------------------------------- 1 | // 2 | // KVExtensions.swift 3 | // MergeVideos 4 | // 5 | // Created by Khoa Vo on 19/10/2021. 6 | // Copyright © 2021 Khoa Vo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVKit 11 | 12 | extension Double { 13 | func toCMTime() -> CMTime { 14 | return CMTime(seconds: self, preferredTimescale: CMTimeScale(NSEC_PER_SEC)) 15 | } 16 | } 17 | 18 | extension FileManager { 19 | func removeItemIfExisted(_ url:URL) -> Void { 20 | if FileManager.default.fileExists(atPath: url.path) { 21 | do { 22 | try FileManager.default.removeItem(atPath: url.path) 23 | } 24 | catch { 25 | print("Failed to delete file") 26 | } 27 | } 28 | } 29 | } 30 | 31 | extension AVAssetTrack { 32 | var fixedPreferredTransform: CGAffineTransform { 33 | var newT = preferredTransform 34 | switch [newT.a, newT.b, newT.c, newT.d] { 35 | case [1, 0, 0, 1]: 36 | newT.tx = 0 37 | newT.ty = 0 38 | case [1, 0, 0, -1]: 39 | newT.tx = 0 40 | newT.ty = naturalSize.height 41 | case [-1, 0, 0, 1]: 42 | newT.tx = naturalSize.width 43 | newT.ty = 0 44 | case [-1, 0, 0, -1]: 45 | newT.tx = naturalSize.width 46 | newT.ty = naturalSize.height 47 | case [0, -1, 1, 0]: 48 | newT.tx = 0 49 | newT.ty = naturalSize.width 50 | case [0, 1, -1, 0]: 51 | newT.tx = naturalSize.height 52 | newT.ty = 0 53 | case [0, 1, 1, 0]: 54 | newT.tx = 0 55 | newT.ty = 0 56 | case [0, -1, -1, 0]: 57 | newT.tx = naturalSize.height 58 | newT.ty = naturalSize.width 59 | default: 60 | break 61 | } 62 | return newT 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /MergeVideos/VideoManager/KVVideoManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // KVVideoManager.swift 3 | // MergeVideos 4 | // 5 | // Created by Khoa Vo on 12/20/17. 6 | // Copyright © 2017 Khoa Vo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import MediaPlayer 11 | import MobileCoreServices 12 | import AVKit 13 | 14 | struct VideoData { 15 | var index:Int? 16 | var image:UIImage? 17 | var asset:AVAsset? 18 | var isVideo = false 19 | } 20 | 21 | struct TextData { 22 | var text = "" 23 | var fontSize:CGFloat = 40 24 | var textColor = UIColor.red 25 | var showTime:CGFloat = 0 26 | var endTime:CGFloat = 0 27 | var textFrame = CGRect(x: 0, y: 0, width: 500, height: 500) 28 | } 29 | 30 | class KVVideoManager { 31 | static let shared = KVVideoManager() 32 | 33 | let defaultSize = CGSize(width: 720, height: 1280) // Default video size 34 | var videoDuration = 30.0 // Duration of output video when merging videos & images 35 | var imageDuration = 5.0 // Duration of each image 36 | 37 | 38 | typealias Completion = (URL?, Error?) -> Void 39 | 40 | // 41 | // Merge array videos 42 | // 43 | func merge(arrayVideos:[AVAsset], completion:@escaping Completion) -> Void { 44 | doMerge(arrayVideos: arrayVideos, animation: false, completion: completion) 45 | } 46 | 47 | // 48 | // Merge array videos with transition animation 49 | // 50 | func mergeWithAnimation(arrayVideos:[AVAsset], completion:@escaping Completion) -> Void { 51 | doMerge(arrayVideos: arrayVideos, animation: true, completion: completion) 52 | } 53 | 54 | // 55 | // Add background music to video 56 | // 57 | func merge(video:AVAsset, withBackgroundMusic music:AVAsset, completion:@escaping Completion) -> Void { 58 | // Init composition 59 | let mixComposition = AVMutableComposition() 60 | var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = [] 61 | 62 | // Get video track 63 | guard let videoTrack = video.tracks(withMediaType: AVMediaType.video).first else { 64 | completion(nil, nil) 65 | return 66 | } 67 | 68 | // Get audio track 69 | var audioTrack:AVAssetTrack? 70 | if music.tracks(withMediaType: AVMediaType.audio).count > 0 { 71 | audioTrack = music.tracks(withMediaType: AVMediaType.audio).first 72 | } 73 | 74 | // Init video & audio composition track 75 | let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, 76 | preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 77 | 78 | let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, 79 | preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 80 | 81 | let startTime = CMTime.zero 82 | let duration = video.duration 83 | var insertTime = CMTime.zero 84 | 85 | do { 86 | // Add video track to video composition at specific time 87 | try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration), 88 | of: videoTrack, 89 | at: insertTime) 90 | 91 | // Add audio track to audio composition at specific time 92 | if let audioTrack = audioTrack { 93 | let audioDuration = music.duration > video.duration ? video.duration : music.duration 94 | try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: audioDuration), 95 | of: audioTrack, 96 | at: insertTime) 97 | } 98 | 99 | // Add instruction for video track 100 | if let videoCompositionTrack = videoCompositionTrack { 101 | let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack, asset: video, targetSize: defaultSize) 102 | arrayLayerInstructions.append(layerInstruction) 103 | } 104 | 105 | // Increase the insert time 106 | insertTime = CMTimeAdd(insertTime, duration) 107 | } catch { 108 | print("Load track error") 109 | completion(nil, nil) 110 | } 111 | 112 | let path = NSTemporaryDirectory().appending("mergedVideo.mp4") 113 | let exportURL = URL(fileURLWithPath: path) 114 | 115 | // Check exist and remove old file 116 | FileManager.default.removeItemIfExisted(exportURL) 117 | 118 | // Main video composition instruction 119 | let mainInstruction = AVMutableVideoCompositionInstruction() 120 | mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: insertTime) 121 | mainInstruction.layerInstructions = arrayLayerInstructions 122 | 123 | // Main video composition 124 | let mainComposition = AVMutableVideoComposition() 125 | mainComposition.instructions = [mainInstruction] 126 | mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30) 127 | mainComposition.renderSize = defaultSize 128 | 129 | // Init exporter 130 | let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) 131 | exporter?.outputURL = exportURL 132 | exporter?.outputFileType = AVFileType.mp4 133 | exporter?.shouldOptimizeForNetworkUse = true 134 | exporter?.videoComposition = mainComposition 135 | 136 | // Do export 137 | exporter?.exportAsynchronously(completionHandler: { 138 | DispatchQueue.main.async { 139 | self.exportDidFinish(exporter: exporter, videoURL: exportURL, completion: completion) 140 | } 141 | }) 142 | } 143 | 144 | private func doMerge(arrayVideos:[AVAsset], animation:Bool, completion:@escaping Completion) -> Void { 145 | var insertTime = CMTime.zero 146 | var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = [] 147 | 148 | // Silence sound (in case video has no sound track) 149 | guard let silenceURL = Bundle.main.url(forResource: "silence", withExtension: "mp3") else { 150 | print("Missing resource") 151 | completion(nil, nil) 152 | return 153 | } 154 | 155 | let silenceAsset = AVAsset(url:silenceURL) 156 | let silenceSoundTrack = silenceAsset.tracks(withMediaType: AVMediaType.audio).first 157 | 158 | // Init composition 159 | let mixComposition = AVMutableComposition() 160 | 161 | for videoAsset in arrayVideos { 162 | // Get video track 163 | guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue } 164 | 165 | // Get audio track 166 | var audioTrack:AVAssetTrack? 167 | if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 { 168 | audioTrack = videoAsset.tracks(withMediaType: AVMediaType.audio).first 169 | } 170 | else { 171 | audioTrack = silenceSoundTrack 172 | } 173 | 174 | // Init video & audio composition track 175 | let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, 176 | preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 177 | 178 | let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, 179 | preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 180 | 181 | do { 182 | let startTime = CMTime.zero 183 | let duration = videoAsset.duration 184 | 185 | // Add video track to video composition at specific time 186 | try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration), 187 | of: videoTrack, 188 | at: insertTime) 189 | 190 | // Add audio track to audio composition at specific time 191 | if let audioTrack = audioTrack { 192 | try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration), 193 | of: audioTrack, 194 | at: insertTime) 195 | } 196 | 197 | // Add instruction for video track 198 | if let videoCompositionTrack = videoCompositionTrack { 199 | let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack, asset: videoAsset, targetSize: defaultSize) 200 | 201 | // Hide video track before changing to new track 202 | let endTime = CMTimeAdd(insertTime, duration) 203 | 204 | if animation { 205 | let durationAnimation = 1.0.toCMTime() 206 | 207 | layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange(start: endTime, duration: durationAnimation)) 208 | } 209 | else { 210 | layerInstruction.setOpacity(0, at: endTime) 211 | } 212 | 213 | arrayLayerInstructions.append(layerInstruction) 214 | } 215 | 216 | // Increase the insert time 217 | insertTime = CMTimeAdd(insertTime, duration) 218 | } 219 | catch { 220 | print("Load track error") 221 | } 222 | } 223 | 224 | // Main video composition instruction 225 | let mainInstruction = AVMutableVideoCompositionInstruction() 226 | mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: insertTime) 227 | mainInstruction.layerInstructions = arrayLayerInstructions 228 | 229 | // Main video composition 230 | let mainComposition = AVMutableVideoComposition() 231 | mainComposition.instructions = [mainInstruction] 232 | mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30) 233 | mainComposition.renderSize = defaultSize 234 | 235 | // Export to file 236 | let path = NSTemporaryDirectory().appending("mergedVideo.mp4") 237 | let exportURL = URL(fileURLWithPath: path) 238 | 239 | // Remove file if existed 240 | FileManager.default.removeItemIfExisted(exportURL) 241 | 242 | // Init exporter 243 | let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) 244 | exporter?.outputURL = exportURL 245 | exporter?.outputFileType = AVFileType.mp4 246 | exporter?.shouldOptimizeForNetworkUse = true 247 | exporter?.videoComposition = mainComposition 248 | 249 | // Do export 250 | exporter?.exportAsynchronously(completionHandler: { 251 | DispatchQueue.main.async { 252 | self.exportDidFinish(exporter: exporter, videoURL: exportURL, completion: completion) 253 | } 254 | }) 255 | } 256 | 257 | // 258 | // Merge videos & images 259 | // 260 | func makeVideoFrom(data:[VideoData], textData:[TextData]?, completion:@escaping Completion) -> Void { 261 | var insertTime = CMTime.zero 262 | var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = [] 263 | var arrayLayerImages:[CALayer] = [] 264 | 265 | // Black background video 266 | guard let bgVideoURL = Bundle.main.url(forResource: "black", withExtension: "mov") else { 267 | print("Need black background video !") 268 | completion(nil,nil) 269 | return 270 | } 271 | 272 | let bgVideoAsset = AVAsset(url: bgVideoURL) 273 | guard let bgVideoTrack = bgVideoAsset.tracks(withMediaType: AVMediaType.video).first else { 274 | print("Need black background video !") 275 | completion(nil,nil) 276 | return 277 | } 278 | 279 | // Silence sound (in case video has no sound track) 280 | guard let silenceURL = Bundle.main.url(forResource: "silence", withExtension: "mp3") else { 281 | print("Missing resource") 282 | completion(nil, nil) 283 | return 284 | } 285 | 286 | let silenceAsset = AVAsset(url:silenceURL) 287 | let silenceSoundTrack = silenceAsset.tracks(withMediaType: AVMediaType.audio).first 288 | 289 | // Init composition 290 | let mixComposition = AVMutableComposition() 291 | 292 | // Merge 293 | for videoData in data { 294 | if videoData.isVideo { 295 | guard let videoAsset = videoData.asset else { continue } 296 | 297 | // Get video track 298 | guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue } 299 | 300 | // Get audio track 301 | var audioTrack:AVAssetTrack? 302 | if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 { 303 | audioTrack = videoAsset.tracks(withMediaType: AVMediaType.audio).first 304 | } 305 | else { 306 | audioTrack = silenceSoundTrack 307 | } 308 | 309 | // Init video & audio composition track 310 | let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, 311 | preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 312 | 313 | let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, 314 | preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 315 | 316 | do { 317 | let startTime = CMTime.zero 318 | let duration = videoAsset.duration 319 | 320 | // Add video track to video composition at specific time 321 | try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration), 322 | of: videoTrack, 323 | at: insertTime) 324 | 325 | // Add audio track to audio composition at specific time 326 | if let audioTrack = audioTrack { 327 | try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration), 328 | of: audioTrack, 329 | at: insertTime) 330 | } 331 | 332 | // Add instruction for video track 333 | if let videoCompositionTrack = videoCompositionTrack { 334 | let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack, asset: videoAsset, targetSize: defaultSize) 335 | 336 | // Hide video track before changing to new track 337 | let endTime = CMTimeAdd(insertTime, duration) 338 | let durationAnimation = 1.0.toCMTime() 339 | 340 | layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange.init(start: endTime, duration: durationAnimation)) 341 | 342 | arrayLayerInstructions.append(layerInstruction) 343 | } 344 | 345 | // Increase the insert time 346 | insertTime = CMTimeAdd(insertTime, duration) 347 | } 348 | catch { 349 | print("Load track error") 350 | } 351 | } 352 | else { // Image 353 | let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, 354 | preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 355 | 356 | let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, 357 | preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 358 | 359 | let itemDuration = imageDuration.toCMTime() 360 | 361 | do { 362 | try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: itemDuration), 363 | of: bgVideoTrack, 364 | at: insertTime) 365 | 366 | // Add audio track to audio composition at specific time 367 | if let audioTrack = silenceSoundTrack { 368 | try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: itemDuration), 369 | of: audioTrack, 370 | at: insertTime) 371 | } 372 | } 373 | catch { 374 | print("Load background track error") 375 | } 376 | 377 | // Create Image layer 378 | guard let image = videoData.image else { continue } 379 | 380 | let imageLayer = CALayer() 381 | imageLayer.frame = CGRect.init(origin: CGPoint.zero, size: defaultSize) 382 | imageLayer.contents = image.cgImage 383 | imageLayer.opacity = 0 384 | imageLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill 385 | 386 | setOrientation(image: image, onLayer: imageLayer, outputSize: defaultSize) 387 | 388 | // Add Fade in & Fade out animation 389 | let fadeInAnimation = CABasicAnimation.init(keyPath: "opacity") 390 | fadeInAnimation.duration = 1 391 | fadeInAnimation.fromValue = NSNumber(value: 0) 392 | fadeInAnimation.toValue = NSNumber(value: 1) 393 | fadeInAnimation.isRemovedOnCompletion = false 394 | fadeInAnimation.beginTime = insertTime.seconds == 0 ? 0.05: insertTime.seconds 395 | fadeInAnimation.fillMode = CAMediaTimingFillMode.forwards 396 | imageLayer.add(fadeInAnimation, forKey: "opacityIN") 397 | 398 | let fadeOutAnimation = CABasicAnimation.init(keyPath: "opacity") 399 | fadeOutAnimation.duration = 1 400 | fadeOutAnimation.fromValue = NSNumber(value: 1) 401 | fadeOutAnimation.toValue = NSNumber(value: 0) 402 | fadeOutAnimation.isRemovedOnCompletion = false 403 | fadeOutAnimation.beginTime = CMTimeAdd(insertTime, itemDuration).seconds 404 | fadeOutAnimation.fillMode = CAMediaTimingFillMode.forwards 405 | imageLayer.add(fadeOutAnimation, forKey: "opacityOUT") 406 | 407 | arrayLayerImages.append(imageLayer) 408 | 409 | // Increase the insert time 410 | insertTime = CMTimeAdd(insertTime, itemDuration) 411 | } 412 | } 413 | 414 | // Init Video layer 415 | let videoLayer = CALayer() 416 | videoLayer.frame = CGRect(x: 0, y: 0, width: defaultSize.width, height: defaultSize.height) 417 | 418 | let parentlayer = CALayer() 419 | parentlayer.frame = CGRect(x: 0, y: 0, width: defaultSize.width, height: defaultSize.height) 420 | parentlayer.addSublayer(videoLayer) 421 | 422 | // Add Image layers 423 | for layer in arrayLayerImages { 424 | parentlayer.addSublayer(layer) 425 | } 426 | 427 | // Add Text layer 428 | if let textData = textData { 429 | for aTextData in textData { 430 | let textLayer = makeTextLayer(string: aTextData.text, 431 | fontSize: aTextData.fontSize, 432 | textColor: UIColor.green, 433 | frame: aTextData.textFrame, 434 | showTime: aTextData.showTime, 435 | hideTime: aTextData.endTime) 436 | parentlayer.addSublayer(textLayer) 437 | } 438 | } 439 | 440 | // Main video composition instruction 441 | let mainInstruction = AVMutableVideoCompositionInstruction() 442 | mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: insertTime) 443 | mainInstruction.layerInstructions = arrayLayerInstructions 444 | 445 | // Main video composition 446 | let mainComposition = AVMutableVideoComposition() 447 | mainComposition.instructions = [mainInstruction] 448 | mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30) 449 | mainComposition.renderSize = defaultSize 450 | mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentlayer) 451 | 452 | // Export to file 453 | let path = NSTemporaryDirectory().appending("mergedVideo.mp4") 454 | let exportURL = URL.init(fileURLWithPath: path) 455 | 456 | // Remove file if existed 457 | FileManager.default.removeItemIfExisted(exportURL) 458 | 459 | let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) 460 | exporter?.outputURL = exportURL 461 | exporter?.outputFileType = AVFileType.mp4 462 | exporter?.shouldOptimizeForNetworkUse = true 463 | exporter?.videoComposition = mainComposition 464 | 465 | // Do export 466 | exporter?.exportAsynchronously(completionHandler: { 467 | DispatchQueue.main.async { 468 | self.exportDidFinish(exporter: exporter, videoURL: exportURL, completion: completion) 469 | } 470 | }) 471 | } 472 | } 473 | 474 | // MARK:- Private methods 475 | extension KVVideoManager { 476 | private func videoCompositionInstructionForTrack(track: AVCompositionTrack?, asset: AVAsset, targetSize: CGSize) -> AVMutableVideoCompositionLayerInstruction { 477 | guard let track = track else { 478 | return AVMutableVideoCompositionLayerInstruction() 479 | } 480 | 481 | let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) 482 | let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0] 483 | 484 | let transform = assetTrack.fixedPreferredTransform 485 | let assetInfo = orientationFromTransform(transform) 486 | 487 | var scaleToFitRatio = targetSize.width / assetTrack.naturalSize.width 488 | if assetInfo.isPortrait { 489 | // Scale to fit target size 490 | scaleToFitRatio = targetSize.width / assetTrack.naturalSize.height 491 | let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) 492 | 493 | // Align center Y 494 | let newY = targetSize.height/2 - (assetTrack.naturalSize.width * scaleToFitRatio)/2 495 | let moveCenterFactor = CGAffineTransform(translationX: 0, y: newY) 496 | 497 | let finalTransform = transform.concatenating(scaleFactor).concatenating(moveCenterFactor) 498 | 499 | instruction.setTransform(finalTransform, at: .zero) 500 | } else { 501 | // Scale to fit target size 502 | let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) 503 | 504 | // Align center Y 505 | let newY = targetSize.height/2 - (assetTrack.naturalSize.height * scaleToFitRatio)/2 506 | let moveCenterFactor = CGAffineTransform(translationX: 0, y: newY) 507 | 508 | let finalTransform = transform.concatenating(scaleFactor).concatenating(moveCenterFactor) 509 | 510 | instruction.setTransform(finalTransform, at: .zero) 511 | } 512 | 513 | return instruction 514 | } 515 | 516 | private func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) { 517 | var assetOrientation = UIImage.Orientation.up 518 | var isPortrait = false 519 | 520 | switch [transform.a, transform.b, transform.c, transform.d] { 521 | case [0.0, 1.0, -1.0, 0.0]: 522 | assetOrientation = .right 523 | isPortrait = true 524 | 525 | case [0.0, -1.0, 1.0, 0.0]: 526 | assetOrientation = .left 527 | isPortrait = true 528 | 529 | case [1.0, 0.0, 0.0, 1.0]: 530 | assetOrientation = .up 531 | 532 | case [-1.0, 0.0, 0.0, -1.0]: 533 | assetOrientation = .down 534 | 535 | default: 536 | break 537 | } 538 | 539 | return (assetOrientation, isPortrait) 540 | } 541 | 542 | private func setOrientation(image:UIImage?, onLayer:CALayer, outputSize:CGSize) -> Void { 543 | guard let image = image else { return } 544 | 545 | if image.imageOrientation == UIImage.Orientation.up { 546 | // Do nothing 547 | } 548 | else if image.imageOrientation == UIImage.Orientation.left { 549 | let rotate = CGAffineTransform(rotationAngle: .pi/2) 550 | onLayer.setAffineTransform(rotate) 551 | } 552 | else if image.imageOrientation == UIImage.Orientation.down { 553 | let rotate = CGAffineTransform(rotationAngle: .pi) 554 | onLayer.setAffineTransform(rotate) 555 | } 556 | else if image.imageOrientation == UIImage.Orientation.right { 557 | let rotate = CGAffineTransform(rotationAngle: -.pi/2) 558 | onLayer.setAffineTransform(rotate) 559 | } 560 | } 561 | 562 | private func exportDidFinish(exporter:AVAssetExportSession?, videoURL:URL, completion:@escaping Completion) -> Void { 563 | if exporter?.status == AVAssetExportSession.Status.completed { 564 | print("Exported file: \(videoURL.absoluteString)") 565 | completion(videoURL,nil) 566 | } 567 | else if exporter?.status == AVAssetExportSession.Status.failed { 568 | completion(videoURL,exporter?.error) 569 | } 570 | } 571 | 572 | private func makeTextLayer(string:String, fontSize:CGFloat, textColor:UIColor, frame:CGRect, showTime:CGFloat, hideTime:CGFloat) -> CXETextLayer { 573 | let textLayer = CXETextLayer() 574 | textLayer.string = string 575 | textLayer.fontSize = fontSize 576 | textLayer.foregroundColor = textColor.cgColor 577 | textLayer.alignmentMode = CATextLayerAlignmentMode.center 578 | textLayer.opacity = 0 579 | textLayer.frame = frame 580 | 581 | 582 | let fadeInAnimation = CABasicAnimation.init(keyPath: "opacity") 583 | fadeInAnimation.duration = 0.5 584 | fadeInAnimation.fromValue = NSNumber(value: 0) 585 | fadeInAnimation.toValue = NSNumber(value: 1) 586 | fadeInAnimation.isRemovedOnCompletion = false 587 | fadeInAnimation.beginTime = CFTimeInterval(showTime) 588 | fadeInAnimation.fillMode = CAMediaTimingFillMode.forwards 589 | 590 | textLayer.add(fadeInAnimation, forKey: "textOpacityIN") 591 | 592 | if hideTime > 0 { 593 | let fadeOutAnimation = CABasicAnimation.init(keyPath: "opacity") 594 | fadeOutAnimation.duration = 1 595 | fadeOutAnimation.fromValue = NSNumber(value: 1) 596 | fadeOutAnimation.toValue = NSNumber(value: 0) 597 | fadeOutAnimation.isRemovedOnCompletion = false 598 | fadeOutAnimation.beginTime = CFTimeInterval(hideTime) 599 | fadeOutAnimation.fillMode = CAMediaTimingFillMode.forwards 600 | 601 | textLayer.add(fadeOutAnimation, forKey: "textOpacityOUT") 602 | } 603 | 604 | return textLayer 605 | } 606 | } 607 | 608 | class CXETextLayer : CATextLayer { 609 | 610 | override init() { 611 | super.init() 612 | } 613 | 614 | override init(layer: Any) { 615 | super.init(layer: layer) 616 | } 617 | 618 | required init(coder aDecoder: NSCoder) { 619 | super.init(layer: aDecoder) 620 | } 621 | 622 | override func draw(in ctx: CGContext) { 623 | let height = self.bounds.size.height 624 | let fontSize = self.fontSize 625 | let yDiff = (height-fontSize)/2 - fontSize/10 626 | 627 | ctx.saveGState() 628 | ctx.translateBy(x: 0.0, y: yDiff) 629 | super.draw(in: ctx) 630 | ctx.restoreGState() 631 | } 632 | } 633 | 634 | 635 | -------------------------------------------------------------------------------- /MergeVideos/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // MergeVideos 4 | // 5 | // Created by Khoa Vo on 12/20/17. 6 | // Copyright © 2017 Khoa Vo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVKit 11 | import DKImagePickerController 12 | 13 | class ViewController: UIViewController { 14 | @IBOutlet weak var indicatorView: UIActivityIndicatorView! 15 | @IBOutlet weak var labelProcessing: UILabel! 16 | @IBOutlet weak var buttonMergeVideosImages: UIButton! 17 | 18 | override func viewDidLoad() { 19 | super.viewDidLoad() 20 | 21 | indicatorView.isHidden = true 22 | labelProcessing.isHidden = true 23 | } 24 | 25 | override func didReceiveMemoryWarning() { 26 | super.didReceiveMemoryWarning() 27 | // Dispose of any resources that can be recreated. 28 | } 29 | 30 | func showProcessing(isShow: Bool) { 31 | if isShow { 32 | indicatorView.isHidden = false 33 | indicatorView.startAnimating() 34 | labelProcessing.isHidden = false 35 | } else { 36 | indicatorView.isHidden = true 37 | indicatorView.stopAnimating() 38 | labelProcessing.isHidden = true 39 | } 40 | } 41 | 42 | @IBAction func actionMergeTwoVideos(_ sender: Any) { 43 | guard let portraitURL = Bundle.main.url(forResource: "portrait", withExtension: "MOV"), 44 | let landscapeURL = Bundle.main.url(forResource: "landscape", withExtension: "MOV") 45 | else { return } 46 | 47 | let portraitAsset = AVAsset(url: portraitURL) 48 | let landscapeAsset = AVAsset(url: landscapeURL) 49 | 50 | showProcessing(isShow: true) 51 | 52 | DispatchQueue.global().async { 53 | KVVideoManager.shared.merge(arrayVideos: [portraitAsset, landscapeAsset]) {[weak self] (outputURL, error) in 54 | guard let `self` = self else { return } 55 | 56 | DispatchQueue.main.async { 57 | self.showProcessing(isShow: false) 58 | 59 | if let error = error { 60 | print("Error:\(error.localizedDescription)") 61 | } 62 | else if let url = outputURL { 63 | self.openPreviewScreen(url) 64 | } 65 | } 66 | } 67 | } 68 | } 69 | 70 | @IBAction func actionMergeTwoVideosWithAnimation(_ sender: Any) { 71 | guard let videoURL1 = Bundle.main.url(forResource: "movie1", withExtension: "mov"), 72 | let videoURL2 = Bundle.main.url(forResource: "movie2", withExtension: "mov") 73 | else { return } 74 | 75 | let videoAsset1 = AVAsset(url: videoURL1) 76 | let videoAsset2 = AVAsset(url: videoURL2) 77 | 78 | showProcessing(isShow: true) 79 | 80 | DispatchQueue.global().async { 81 | KVVideoManager.shared.mergeWithAnimation(arrayVideos: [videoAsset1, videoAsset2]) { [weak self] (outputURL, error) in 82 | guard let `self` = self else { return } 83 | 84 | DispatchQueue.main.async { 85 | self.showProcessing(isShow: false) 86 | 87 | if let error = error { 88 | print("Error:\(error.localizedDescription)") 89 | } 90 | else if let url = outputURL { 91 | self.openPreviewScreen(url) 92 | } 93 | } 94 | } 95 | } 96 | } 97 | 98 | @IBAction func actionButtonAddMusic(_ sender: Any) { 99 | guard let videoURL = Bundle.main.url(forResource: "portrait", withExtension: "MOV"), 100 | let musicURL = Bundle.main.url(forResource: "sample", withExtension: "mp3") 101 | else { return } 102 | 103 | let videoAsset = AVAsset(url: videoURL) 104 | let musicAsset = AVAsset(url: musicURL) 105 | 106 | showProcessing(isShow: true) 107 | 108 | DispatchQueue.global().async { 109 | KVVideoManager.shared.merge(video: videoAsset, withBackgroundMusic: musicAsset) {[weak self] (outputURL, error) in 110 | guard let `self` = self else { return } 111 | 112 | DispatchQueue.main.async { 113 | self.showProcessing(isShow: false) 114 | 115 | if let error = error { 116 | print("Error:\(error.localizedDescription)") 117 | } 118 | else if let url = outputURL { 119 | self.openPreviewScreen(url) 120 | } 121 | } 122 | } 123 | } 124 | } 125 | 126 | 127 | @IBAction func actionButtonMergeVideosAndImages(_ sender: Any) { 128 | let picker = DKImagePickerController() 129 | picker.assetType = .allAssets 130 | picker.showsEmptyAlbums = false 131 | picker.showsCancelButton = true 132 | picker.allowsLandscape = false 133 | picker.maxSelectableCount = 6 134 | picker.modalPresentationStyle = .fullScreen 135 | 136 | picker.didSelectAssets = {[weak self] (assets: [DKAsset]) in 137 | guard let `self` = self, assets.count > 0 else {return} 138 | self.preprocess(assets: assets) 139 | } 140 | 141 | present(picker, animated: true, completion: nil) 142 | } 143 | 144 | private func openPreviewScreen(_ videoURL:URL) -> Void { 145 | let player = AVPlayer(url: videoURL) 146 | let playerController = AVPlayerViewController() 147 | playerController.player = player 148 | playerController.modalPresentationStyle = .fullScreen 149 | 150 | present(playerController, animated: true, completion: { 151 | player.play() 152 | }) 153 | } 154 | 155 | private func preprocess(assets: [DKAsset]) { 156 | var arrayAsset:[VideoData] = [] 157 | 158 | var index = 0 159 | let group = DispatchGroup() 160 | 161 | assets.forEach { (asset) in 162 | var videoData = VideoData() 163 | videoData.index = index 164 | index += 1 165 | 166 | if asset.type == .video { 167 | videoData.isVideo = true 168 | 169 | group.enter() 170 | asset.fetchAVAsset { (avAsset, info) in 171 | guard let avAsset = avAsset else { 172 | group.leave() 173 | return 174 | } 175 | 176 | videoData.asset = avAsset 177 | arrayAsset.append(videoData) 178 | group.leave() 179 | } 180 | } 181 | else { 182 | group.enter() 183 | asset.fetchOriginalImage { (image, info) in 184 | guard let image = image else { 185 | group.leave() 186 | return 187 | } 188 | 189 | videoData.image = image 190 | arrayAsset.append(videoData) 191 | group.leave() 192 | } 193 | } 194 | } 195 | 196 | group.notify(queue: .main) { 197 | self.mergeVideosAndImages(arrayData: arrayAsset) 198 | } 199 | } 200 | 201 | private func mergeVideosAndImages(arrayData: [VideoData]) { 202 | showProcessing(isShow: true) 203 | 204 | let textData = TextData(text: "HELLO WORLD", 205 | fontSize: 50, 206 | textColor: UIColor.green, 207 | showTime: 3, 208 | endTime: 5, 209 | textFrame: CGRect(x: 0, y: 0, width: 400, height: 300)) 210 | 211 | DispatchQueue.global().async { 212 | KVVideoManager.shared.makeVideoFrom(data: arrayData, textData: [textData], completion: {[weak self] (outputURL, error) in 213 | guard let `self` = self else { return } 214 | 215 | DispatchQueue.main.async { 216 | self.showProcessing(isShow: false) 217 | 218 | if let error = error { 219 | print("Error:\(error.localizedDescription)") 220 | } else if let url = outputURL { 221 | self.openPreviewScreen(url) 222 | } 223 | } 224 | }) 225 | } 226 | } 227 | } 228 | 229 | -------------------------------------------------------------------------------- /Podfile: -------------------------------------------------------------------------------- 1 | # Uncomment the next line to define a global platform for your project 2 | platform :ios, '11.0' 3 | 4 | 5 | target 'MergeVideos' do 6 | # Comment the next line if you're not using Swift and don't want to use dynamic frameworks 7 | use_frameworks! 8 | 9 | # Pods for MergeVideos 10 | 11 | pod 'DKImagePickerController’ 12 | 13 | end 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MergeVideos 2 | This is a sample implementation for merging multiple videos and/or images using AVFoundation, fixed orientation issues. 3 | 4 | ## Features 5 | - Merge videos. 6 | - Merge videos with transition animation. 7 | - Add background music to a video. 8 | - Merge videos and images with transition animation. 9 | - Add text with fade in / fade out animation into a video. 10 | 11 | ## Requirements 12 | - iOS 13.0+ 13 | - Xcode 13.0+ 14 | - Swift 5+ 15 | 16 | ## Updates 17 | ### 20/10/2021 18 | - Update source code to run on Xccode 13 and Swift 5. 19 | - Refactor code. 20 | - Fix issue: merging videos shows black screen sometimes. (update `videoCompositionInstructionForTrack` function) 21 | 22 | ## Usage 23 | Drag the files in `VideoManager` folder into your project. 24 | 25 | Please refer to the sample project `MergeVideos` for more details. (Don't forget to run `pod install` before opening the project). 26 | 27 | - Merge videos 28 | ```swift 29 | let videoAsset1 = AVAsset(url: urlVideo1) 30 | let videoAsset2 = AVAsset(url: urlVideo2) 31 | 32 | KVVideoManager.shared.merge(arrayVideos: [videoAsset1, videoAsset2]) { (outputURL, error) in 33 | if let error = error { 34 | print("Error:\(error.localizedDescription)") 35 | } 36 | else { 37 | if let url = outputURL { 38 | print("Output video file:\(url)") 39 | } 40 | } 41 | } 42 | ``` 43 | - Merge videos with transition animation 44 | ```swift 45 | let videoAsset1 = AVAsset(url: urlVideo1) 46 | let videoAsset2 = AVAsset(url: urlVideo2) 47 | 48 | KVVideoManager.shared.mergeWithAnimation(arrayVideos: [videoAsset1, videoAsset2]) { (outputURL, error) in 49 | if let error = error { 50 | print("Error:\(error.localizedDescription)") 51 | } 52 | else { 53 | if let url = outputURL { 54 | print("Output video file:\(url)") 55 | } 56 | } 57 | } 58 | ``` 59 | - Add background music to a video 60 | ```swift 61 | let videoAsset = AVAsset(url: urlVideo) 62 | let musicAsset = AVAsset(url: urlMusic) 63 | 64 | KVVideoManager.shared.merge(video:videoAsset, withBackgroundMusic:musicAsset) { (outputURL, error) in 65 | if let error = error { 66 | print("Error:\(error.localizedDescription)") 67 | } 68 | else { 69 | if let url = outputURL { 70 | print("Output video file:\(url)") 71 | } 72 | } 73 | } 74 | ``` 75 | - Merge videos and images and text with transition animation 76 | ```swift 77 | let videoData = VideoData() 78 | videoData.isVideo = true 79 | videoData.asset = AVAsset(url: urlVideo) 80 | 81 | let imageData = VideoData() 82 | imageData.isVideo = false 83 | imageData.image = UIImage(named: "sample-image") 84 | 85 | let textData = TextData(text: "HELLO WORLD", 86 | fontSize: 50, 87 | textColor: UIColor.green, 88 | showTime: 3, 89 | endTime: 5, 90 | textFrame: CGRect(x: 0, y: 0, width: 400, height: 300)) 91 | 92 | KVVideoManager.shared.makeVideoFrom(data: [videoData, imageData], textData: [textData]) { (outputURL, error) in 93 | if let error = error { 94 | print("Error:\(error.localizedDescription)") 95 | } 96 | else { 97 | if let url = outputURL { 98 | print("Output video file:\(url)") 99 | } 100 | } 101 | } 102 | ``` 103 | ## Note 104 | This is a sample implementation to demonstrate the functions in AVFoundation with just some simple animations, but you got the idea ! 105 | 106 | You would be able to add more complicated transition animation, text showing animation by using Core Animation !!! 107 | 108 | --------------------------------------------------------------------------------