├── README.md ├── SFMixer.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ └── contents.xcworkspacedata └── xcuserdata │ └── chenwanfei.xcuserdatad │ └── xcschemes │ ├── SFMixer.xcscheme │ └── xcschememanagement.plist ├── SFMixer ├── AppDelegate.swift ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ ├── Contents.json │ │ ├── icon@2x.png │ │ └── icon@3x.png │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Info.plist ├── SFMixer │ ├── PanDirectionGestureRecognizer.swift │ ├── SFAudioBus.swift │ ├── SFAudioBusContainerView.swift │ ├── SFAudioBusView.swift │ ├── SFCommon.swift │ ├── SFMixerViewController.storyboard │ ├── SFMixerViewController.swift │ ├── SFPositionFlagView.swift │ ├── audio_2.mp3 │ ├── dashedFlag@2x.png │ ├── dashedFlag@3x.png │ ├── playBtn@2x.png │ ├── playBtn@3x.png │ ├── plus@2x.png │ ├── plus@3x.png │ ├── popover@2x.png │ ├── popover@3x.png │ ├── positionFlag@2x.png │ ├── positionFlag@3x.png │ ├── stopBtn@2x.png │ └── stopBtn@3x.png ├── ThirdParty │ └── SFAudioWaveformHelper.swift ├── ViewController.swift ├── audio_0.mp3 ├── audio_1.mp3 ├── audio_3.mp3 └── audio_4.mp3 └── s1.png /README.md: -------------------------------------------------------------------------------- 1 | ## SFMixer 2 | 3 | 4 | This project illustrates how to use AVFoundation to build a simple audio mixer. 5 | 6 | ![ScreenShot](https://raw.github.com/JagieChen/SFMixer/master/s1.png) 7 | 8 | 9 | 10 | ## Features 11 | 12 | * Using Swift 3; 13 | * Panning an audio bus vertically to increase/decrease its volumn; 14 | * Panning an audio bus horizentally to position it; 15 | * Taping an audio bus to disable/enable it; 16 | * Long pressing an audio bus to remove it; 17 | * You can clip the final mixed audio by draging the starting/ending line; 18 | * Using AVAudioMix to preview the final audio and Using AVAssetExportSession to export it; 19 | * Tapping the "Trim" button to trim the final audio automatically; 20 | * Please use a real device to test the exporting feature. 21 | 22 | 23 | ## License 24 | 25 | This code is distributed under the terms and conditions of the [MIT license](LICENSE). 26 | 27 | 28 | -------------------------------------------------------------------------------- /SFMixer.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 7B0C3B051E7562460016837F /* SFAudioWaveformHelper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B0C3B041E7562460016837F /* SFAudioWaveformHelper.swift */; }; 11 | 7B0C3B071E7648020016837F /* PanDirectionGestureRecognizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B0C3B061E7648020016837F /* PanDirectionGestureRecognizer.swift */; }; 12 | 7B0CB9791E726CA2003DC1DB /* SFCommon.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B0CB9781E726CA2003DC1DB /* SFCommon.swift */; }; 13 | 7B281F561E7F714F003F8630 /* s1.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B281F541E7F714F003F8630 /* s1.png */; }; 14 | 7B281F571E7F714F003F8630 /* README.md in Sources */ = {isa = PBXBuildFile; fileRef = 7B281F551E7F714F003F8630 /* README.md */; }; 15 | 7B2886A01E7B9018006E6C5F /* audio_3.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 7B28869F1E7B9018006E6C5F /* audio_3.mp3 */; }; 16 | 7B2886A21E7B9041006E6C5F /* audio_4.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 7B2886A11E7B9041006E6C5F /* audio_4.mp3 */; }; 17 | 7B2DA0001E71127200B8126C /* SFAudioBusContainerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B2D9FFF1E71127200B8126C /* SFAudioBusContainerView.swift */; }; 18 | 7B51528E1E723F5600208B4A /* SFAudioBusView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B51528D1E723F5600208B4A /* SFAudioBusView.swift */; }; 19 | 7B5152901E723FB200208B4A /* SFAudioBus.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B51528F1E723FB200208B4A /* SFAudioBus.swift */; }; 20 | 7B5152931E7258E200208B4A /* popover@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B5152911E7258E200208B4A /* popover@2x.png */; }; 21 | 7B5152941E7258E200208B4A /* popover@3x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B5152921E7258E200208B4A /* popover@3x.png */; }; 22 | 7B77CFC01E78F783005B69D3 /* dashedFlag@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B77CFBE1E78F783005B69D3 /* dashedFlag@2x.png */; }; 23 | 7B77CFC11E78F783005B69D3 /* dashedFlag@3x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B77CFBF1E78F783005B69D3 /* dashedFlag@3x.png */; }; 24 | 7B77CFC41E7A66B4005B69D3 /* stopBtn@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B77CFC21E7A66B4005B69D3 /* stopBtn@2x.png */; }; 25 | 7B77CFC51E7A66B4005B69D3 /* stopBtn@3x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B77CFC31E7A66B4005B69D3 /* stopBtn@3x.png */; }; 26 | 7B8870891E6E6B640031FDA4 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B8870881E6E6B640031FDA4 /* AppDelegate.swift */; }; 27 | 7B88708B1E6E6B640031FDA4 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B88708A1E6E6B640031FDA4 /* ViewController.swift */; }; 28 | 7B88708E1E6E6B640031FDA4 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7B88708C1E6E6B640031FDA4 /* Main.storyboard */; }; 29 | 7B8870901E6E6B640031FDA4 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7B88708F1E6E6B640031FDA4 /* Assets.xcassets */; }; 30 | 7B8870931E6E6B640031FDA4 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7B8870911E6E6B640031FDA4 /* LaunchScreen.storyboard */; }; 31 | 7B88709D1E6E6DDE0031FDA4 /* SFMixerViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B88709B1E6E6DDE0031FDA4 /* SFMixerViewController.swift */; }; 32 | 7B8870A01E6E72A10031FDA4 /* SFMixerViewController.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7B88709F1E6E72A10031FDA4 /* SFMixerViewController.storyboard */; }; 33 | 7B8870A51E6E7A390031FDA4 /* playBtn@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B8870A11E6E7A390031FDA4 /* playBtn@2x.png */; }; 34 | 7B8870A61E6E7A390031FDA4 /* playBtn@3x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B8870A21E6E7A390031FDA4 /* playBtn@3x.png */; }; 35 | 7B8870A71E6E7A390031FDA4 /* plus@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B8870A31E6E7A390031FDA4 /* plus@2x.png */; }; 36 | 7B8870A81E6E7A390031FDA4 /* plus@3x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B8870A41E6E7A390031FDA4 /* plus@3x.png */; }; 37 | 7B8870AB1E6E7EBB0031FDA4 /* positionFlag@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B8870A91E6E7EBB0031FDA4 /* positionFlag@2x.png */; }; 38 | 7B8870AC1E6E7EBB0031FDA4 /* positionFlag@3x.png in Resources */ = {isa = PBXBuildFile; fileRef = 7B8870AA1E6E7EBB0031FDA4 /* positionFlag@3x.png */; }; 39 | 7B8870AE1E6E8A4E0031FDA4 /* SFPositionFlagView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B8870AD1E6E8A4E0031FDA4 /* SFPositionFlagView.swift */; }; 40 | 7B89B0761E744F7600F6DFFB /* audio_2.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 7B89B0751E744F7600F6DFFB /* audio_2.mp3 */; }; 41 | 7B89B0A41E754CCE00F6DFFB /* audio_0.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 7B89B0A31E754CCE00F6DFFB /* audio_0.mp3 */; }; 42 | 7B89B0A61E754D1100F6DFFB /* audio_1.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 7B89B0A51E754D1100F6DFFB /* audio_1.mp3 */; }; 43 | /* End PBXBuildFile section */ 44 | 45 | /* Begin PBXFileReference section */ 46 | 7B0C3B041E7562460016837F /* SFAudioWaveformHelper.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SFAudioWaveformHelper.swift; sourceTree = ""; }; 47 | 7B0C3B061E7648020016837F /* PanDirectionGestureRecognizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PanDirectionGestureRecognizer.swift; sourceTree = ""; }; 48 | 7B0CB9781E726CA2003DC1DB /* SFCommon.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SFCommon.swift; sourceTree = ""; }; 49 | 7B281F541E7F714F003F8630 /* s1.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = s1.png; sourceTree = ""; }; 50 | 7B281F551E7F714F003F8630 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = ""; }; 51 | 7B28869F1E7B9018006E6C5F /* audio_3.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = audio_3.mp3; sourceTree = ""; }; 52 | 7B2886A11E7B9041006E6C5F /* audio_4.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = audio_4.mp3; sourceTree = ""; }; 53 | 7B2D9FFF1E71127200B8126C /* SFAudioBusContainerView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SFAudioBusContainerView.swift; sourceTree = ""; }; 54 | 7B51528D1E723F5600208B4A /* SFAudioBusView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SFAudioBusView.swift; sourceTree = ""; }; 55 | 7B51528F1E723FB200208B4A /* SFAudioBus.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SFAudioBus.swift; sourceTree = ""; }; 56 | 7B5152911E7258E200208B4A /* popover@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "popover@2x.png"; sourceTree = ""; }; 57 | 7B5152921E7258E200208B4A /* popover@3x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "popover@3x.png"; sourceTree = ""; }; 58 | 7B77CFBE1E78F783005B69D3 /* dashedFlag@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "dashedFlag@2x.png"; sourceTree = ""; }; 59 | 7B77CFBF1E78F783005B69D3 /* dashedFlag@3x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "dashedFlag@3x.png"; sourceTree = ""; }; 60 | 7B77CFC21E7A66B4005B69D3 /* stopBtn@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "stopBtn@2x.png"; sourceTree = ""; }; 61 | 7B77CFC31E7A66B4005B69D3 /* stopBtn@3x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "stopBtn@3x.png"; sourceTree = ""; }; 62 | 7B8870851E6E6B640031FDA4 /* SFMixer.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SFMixer.app; sourceTree = BUILT_PRODUCTS_DIR; }; 63 | 7B8870881E6E6B640031FDA4 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 64 | 7B88708A1E6E6B640031FDA4 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 65 | 7B88708D1E6E6B640031FDA4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 66 | 7B88708F1E6E6B640031FDA4 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 67 | 7B8870921E6E6B640031FDA4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 68 | 7B8870941E6E6B640031FDA4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 69 | 7B88709B1E6E6DDE0031FDA4 /* SFMixerViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SFMixerViewController.swift; sourceTree = ""; }; 70 | 7B88709F1E6E72A10031FDA4 /* SFMixerViewController.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = SFMixerViewController.storyboard; sourceTree = ""; }; 71 | 7B8870A11E6E7A390031FDA4 /* playBtn@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "playBtn@2x.png"; sourceTree = ""; }; 72 | 7B8870A21E6E7A390031FDA4 /* playBtn@3x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "playBtn@3x.png"; sourceTree = ""; }; 73 | 7B8870A31E6E7A390031FDA4 /* plus@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "plus@2x.png"; sourceTree = ""; }; 74 | 7B8870A41E6E7A390031FDA4 /* plus@3x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "plus@3x.png"; sourceTree = ""; }; 75 | 7B8870A91E6E7EBB0031FDA4 /* positionFlag@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "positionFlag@2x.png"; sourceTree = ""; }; 76 | 7B8870AA1E6E7EBB0031FDA4 /* positionFlag@3x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "positionFlag@3x.png"; sourceTree = ""; }; 77 | 7B8870AD1E6E8A4E0031FDA4 /* SFPositionFlagView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SFPositionFlagView.swift; sourceTree = ""; }; 78 | 7B89B0751E744F7600F6DFFB /* audio_2.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; name = audio_2.mp3; path = SFMixer/audio_2.mp3; sourceTree = ""; }; 79 | 7B89B0A31E754CCE00F6DFFB /* audio_0.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = audio_0.mp3; sourceTree = ""; }; 80 | 7B89B0A51E754D1100F6DFFB /* audio_1.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = audio_1.mp3; sourceTree = ""; }; 81 | /* End PBXFileReference section */ 82 | 83 | /* Begin PBXFrameworksBuildPhase section */ 84 | 7B8870821E6E6B640031FDA4 /* Frameworks */ = { 85 | isa = PBXFrameworksBuildPhase; 86 | buildActionMask = 2147483647; 87 | files = ( 88 | ); 89 | runOnlyForDeploymentPostprocessing = 0; 90 | }; 91 | /* End PBXFrameworksBuildPhase section */ 92 | 93 | /* Begin PBXGroup section */ 94 | 7B88707C1E6E6B640031FDA4 = { 95 | isa = PBXGroup; 96 | children = ( 97 | 7B281F541E7F714F003F8630 /* s1.png */, 98 | 7B281F551E7F714F003F8630 /* README.md */, 99 | 7B8870871E6E6B640031FDA4 /* SFMixer */, 100 | 7B8870861E6E6B640031FDA4 /* Products */, 101 | ); 102 | sourceTree = ""; 103 | }; 104 | 7B8870861E6E6B640031FDA4 /* Products */ = { 105 | isa = PBXGroup; 106 | children = ( 107 | 7B8870851E6E6B640031FDA4 /* SFMixer.app */, 108 | ); 109 | name = Products; 110 | sourceTree = ""; 111 | }; 112 | 7B8870871E6E6B640031FDA4 /* SFMixer */ = { 113 | isa = PBXGroup; 114 | children = ( 115 | 7B89B04E1E73AA4400F6DFFB /* ThirdParty */, 116 | 7B88709A1E6E6DA60031FDA4 /* SFMixer */, 117 | 7B8870881E6E6B640031FDA4 /* AppDelegate.swift */, 118 | 7B88708A1E6E6B640031FDA4 /* ViewController.swift */, 119 | 7B2886A11E7B9041006E6C5F /* audio_4.mp3 */, 120 | 7B89B0A31E754CCE00F6DFFB /* audio_0.mp3 */, 121 | 7B28869F1E7B9018006E6C5F /* audio_3.mp3 */, 122 | 7B89B0A51E754D1100F6DFFB /* audio_1.mp3 */, 123 | 7B89B0751E744F7600F6DFFB /* audio_2.mp3 */, 124 | 7B88708C1E6E6B640031FDA4 /* Main.storyboard */, 125 | 7B88708F1E6E6B640031FDA4 /* Assets.xcassets */, 126 | 7B8870911E6E6B640031FDA4 /* LaunchScreen.storyboard */, 127 | 7B8870941E6E6B640031FDA4 /* Info.plist */, 128 | ); 129 | path = SFMixer; 130 | sourceTree = ""; 131 | }; 132 | 7B88709A1E6E6DA60031FDA4 /* SFMixer */ = { 133 | isa = PBXGroup; 134 | children = ( 135 | 7B77CFBE1E78F783005B69D3 /* dashedFlag@2x.png */, 136 | 7B77CFC21E7A66B4005B69D3 /* stopBtn@2x.png */, 137 | 7B77CFC31E7A66B4005B69D3 /* stopBtn@3x.png */, 138 | 7B77CFBF1E78F783005B69D3 /* dashedFlag@3x.png */, 139 | 7B8870A91E6E7EBB0031FDA4 /* positionFlag@2x.png */, 140 | 7B8870AA1E6E7EBB0031FDA4 /* positionFlag@3x.png */, 141 | 7B5152911E7258E200208B4A /* popover@2x.png */, 142 | 7B5152921E7258E200208B4A /* popover@3x.png */, 143 | 7B8870A11E6E7A390031FDA4 /* playBtn@2x.png */, 144 | 7B8870A21E6E7A390031FDA4 /* playBtn@3x.png */, 145 | 7B8870A31E6E7A390031FDA4 /* plus@2x.png */, 146 | 7B8870A41E6E7A390031FDA4 /* plus@3x.png */, 147 | 7B88709B1E6E6DDE0031FDA4 /* SFMixerViewController.swift */, 148 | 7B88709F1E6E72A10031FDA4 /* SFMixerViewController.storyboard */, 149 | 7B8870AD1E6E8A4E0031FDA4 /* SFPositionFlagView.swift */, 150 | 7B2D9FFF1E71127200B8126C /* SFAudioBusContainerView.swift */, 151 | 7B51528D1E723F5600208B4A /* SFAudioBusView.swift */, 152 | 7B51528F1E723FB200208B4A /* SFAudioBus.swift */, 153 | 7B0CB9781E726CA2003DC1DB /* SFCommon.swift */, 154 | 7B0C3B061E7648020016837F /* PanDirectionGestureRecognizer.swift */, 155 | ); 156 | path = SFMixer; 157 | sourceTree = ""; 158 | }; 159 | 7B89B04E1E73AA4400F6DFFB /* ThirdParty */ = { 160 | isa = PBXGroup; 161 | children = ( 162 | 7B0C3B041E7562460016837F /* SFAudioWaveformHelper.swift */, 163 | ); 164 | path = ThirdParty; 165 | sourceTree = ""; 166 | }; 167 | /* End PBXGroup section */ 168 | 169 | /* Begin PBXNativeTarget section */ 170 | 7B8870841E6E6B640031FDA4 /* SFMixer */ = { 171 | isa = PBXNativeTarget; 172 | buildConfigurationList = 7B8870971E6E6B640031FDA4 /* Build configuration list for PBXNativeTarget "SFMixer" */; 173 | buildPhases = ( 174 | 7B8870811E6E6B640031FDA4 /* Sources */, 175 | 7B8870821E6E6B640031FDA4 /* Frameworks */, 176 | 7B8870831E6E6B640031FDA4 /* Resources */, 177 | ); 178 | buildRules = ( 179 | ); 180 | dependencies = ( 181 | ); 182 | name = SFMixer; 183 | productName = SFMixer; 184 | productReference = 7B8870851E6E6B640031FDA4 /* SFMixer.app */; 185 | productType = "com.apple.product-type.application"; 186 | }; 187 | /* End PBXNativeTarget section */ 188 | 189 | /* Begin PBXProject section */ 190 | 7B88707D1E6E6B640031FDA4 /* Project object */ = { 191 | isa = PBXProject; 192 | attributes = { 193 | LastSwiftUpdateCheck = 0820; 194 | LastUpgradeCheck = 0820; 195 | ORGANIZATIONNAME = SwordFish; 196 | TargetAttributes = { 197 | 7B8870841E6E6B640031FDA4 = { 198 | CreatedOnToolsVersion = 8.2; 199 | DevelopmentTeam = ZP39L9XLVC; 200 | ProvisioningStyle = Automatic; 201 | }; 202 | }; 203 | }; 204 | buildConfigurationList = 7B8870801E6E6B640031FDA4 /* Build configuration list for PBXProject "SFMixer" */; 205 | compatibilityVersion = "Xcode 3.2"; 206 | developmentRegion = English; 207 | hasScannedForEncodings = 0; 208 | knownRegions = ( 209 | en, 210 | Base, 211 | ); 212 | mainGroup = 7B88707C1E6E6B640031FDA4; 213 | productRefGroup = 7B8870861E6E6B640031FDA4 /* Products */; 214 | projectDirPath = ""; 215 | projectRoot = ""; 216 | targets = ( 217 | 7B8870841E6E6B640031FDA4 /* SFMixer */, 218 | ); 219 | }; 220 | /* End PBXProject section */ 221 | 222 | /* Begin PBXResourcesBuildPhase section */ 223 | 7B8870831E6E6B640031FDA4 /* Resources */ = { 224 | isa = PBXResourcesBuildPhase; 225 | buildActionMask = 2147483647; 226 | files = ( 227 | 7B8870A01E6E72A10031FDA4 /* SFMixerViewController.storyboard in Resources */, 228 | 7B8870A51E6E7A390031FDA4 /* playBtn@2x.png in Resources */, 229 | 7B89B0761E744F7600F6DFFB /* audio_2.mp3 in Resources */, 230 | 7B77CFC01E78F783005B69D3 /* dashedFlag@2x.png in Resources */, 231 | 7B8870931E6E6B640031FDA4 /* LaunchScreen.storyboard in Resources */, 232 | 7B8870AB1E6E7EBB0031FDA4 /* positionFlag@2x.png in Resources */, 233 | 7B8870AC1E6E7EBB0031FDA4 /* positionFlag@3x.png in Resources */, 234 | 7B8870A71E6E7A390031FDA4 /* plus@2x.png in Resources */, 235 | 7B8870901E6E6B640031FDA4 /* Assets.xcassets in Resources */, 236 | 7B8870A81E6E7A390031FDA4 /* plus@3x.png in Resources */, 237 | 7B5152931E7258E200208B4A /* popover@2x.png in Resources */, 238 | 7B281F561E7F714F003F8630 /* s1.png in Resources */, 239 | 7B8870A61E6E7A390031FDA4 /* playBtn@3x.png in Resources */, 240 | 7B89B0A41E754CCE00F6DFFB /* audio_0.mp3 in Resources */, 241 | 7B89B0A61E754D1100F6DFFB /* audio_1.mp3 in Resources */, 242 | 7B77CFC11E78F783005B69D3 /* dashedFlag@3x.png in Resources */, 243 | 7B2886A21E7B9041006E6C5F /* audio_4.mp3 in Resources */, 244 | 7B2886A01E7B9018006E6C5F /* audio_3.mp3 in Resources */, 245 | 7B77CFC41E7A66B4005B69D3 /* stopBtn@2x.png in Resources */, 246 | 7B77CFC51E7A66B4005B69D3 /* stopBtn@3x.png in Resources */, 247 | 7B88708E1E6E6B640031FDA4 /* Main.storyboard in Resources */, 248 | 7B5152941E7258E200208B4A /* popover@3x.png in Resources */, 249 | ); 250 | runOnlyForDeploymentPostprocessing = 0; 251 | }; 252 | /* End PBXResourcesBuildPhase section */ 253 | 254 | /* Begin PBXSourcesBuildPhase section */ 255 | 7B8870811E6E6B640031FDA4 /* Sources */ = { 256 | isa = PBXSourcesBuildPhase; 257 | buildActionMask = 2147483647; 258 | files = ( 259 | 7B2DA0001E71127200B8126C /* SFAudioBusContainerView.swift in Sources */, 260 | 7B88709D1E6E6DDE0031FDA4 /* SFMixerViewController.swift in Sources */, 261 | 7B88708B1E6E6B640031FDA4 /* ViewController.swift in Sources */, 262 | 7B0C3B051E7562460016837F /* SFAudioWaveformHelper.swift in Sources */, 263 | 7B0CB9791E726CA2003DC1DB /* SFCommon.swift in Sources */, 264 | 7B5152901E723FB200208B4A /* SFAudioBus.swift in Sources */, 265 | 7B8870AE1E6E8A4E0031FDA4 /* SFPositionFlagView.swift in Sources */, 266 | 7B0C3B071E7648020016837F /* PanDirectionGestureRecognizer.swift in Sources */, 267 | 7B8870891E6E6B640031FDA4 /* AppDelegate.swift in Sources */, 268 | 7B281F571E7F714F003F8630 /* README.md in Sources */, 269 | 7B51528E1E723F5600208B4A /* SFAudioBusView.swift in Sources */, 270 | ); 271 | runOnlyForDeploymentPostprocessing = 0; 272 | }; 273 | /* End PBXSourcesBuildPhase section */ 274 | 275 | /* Begin PBXVariantGroup section */ 276 | 7B88708C1E6E6B640031FDA4 /* Main.storyboard */ = { 277 | isa = PBXVariantGroup; 278 | children = ( 279 | 7B88708D1E6E6B640031FDA4 /* Base */, 280 | ); 281 | name = Main.storyboard; 282 | sourceTree = ""; 283 | }; 284 | 7B8870911E6E6B640031FDA4 /* LaunchScreen.storyboard */ = { 285 | isa = PBXVariantGroup; 286 | children = ( 287 | 7B8870921E6E6B640031FDA4 /* Base */, 288 | ); 289 | name = LaunchScreen.storyboard; 290 | sourceTree = ""; 291 | }; 292 | /* End PBXVariantGroup section */ 293 | 294 | /* Begin XCBuildConfiguration section */ 295 | 7B8870951E6E6B640031FDA4 /* Debug */ = { 296 | isa = XCBuildConfiguration; 297 | buildSettings = { 298 | ALWAYS_SEARCH_USER_PATHS = NO; 299 | CLANG_ANALYZER_NONNULL = YES; 300 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 301 | CLANG_CXX_LIBRARY = "libc++"; 302 | CLANG_ENABLE_MODULES = YES; 303 | CLANG_ENABLE_OBJC_ARC = YES; 304 | CLANG_WARN_BOOL_CONVERSION = YES; 305 | CLANG_WARN_CONSTANT_CONVERSION = YES; 306 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 307 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 308 | CLANG_WARN_EMPTY_BODY = YES; 309 | CLANG_WARN_ENUM_CONVERSION = YES; 310 | CLANG_WARN_INFINITE_RECURSION = YES; 311 | CLANG_WARN_INT_CONVERSION = YES; 312 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 313 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 314 | CLANG_WARN_UNREACHABLE_CODE = YES; 315 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 316 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 317 | COPY_PHASE_STRIP = NO; 318 | DEBUG_INFORMATION_FORMAT = dwarf; 319 | ENABLE_STRICT_OBJC_MSGSEND = YES; 320 | ENABLE_TESTABILITY = YES; 321 | GCC_C_LANGUAGE_STANDARD = gnu99; 322 | GCC_DYNAMIC_NO_PIC = NO; 323 | GCC_NO_COMMON_BLOCKS = YES; 324 | GCC_OPTIMIZATION_LEVEL = 0; 325 | GCC_PREPROCESSOR_DEFINITIONS = ( 326 | "DEBUG=1", 327 | "$(inherited)", 328 | ); 329 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 330 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 331 | GCC_WARN_UNDECLARED_SELECTOR = YES; 332 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 333 | GCC_WARN_UNUSED_FUNCTION = YES; 334 | GCC_WARN_UNUSED_VARIABLE = YES; 335 | IPHONEOS_DEPLOYMENT_TARGET = 10.2; 336 | MTL_ENABLE_DEBUG_INFO = YES; 337 | ONLY_ACTIVE_ARCH = YES; 338 | SDKROOT = iphoneos; 339 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 340 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 341 | }; 342 | name = Debug; 343 | }; 344 | 7B8870961E6E6B640031FDA4 /* Release */ = { 345 | isa = XCBuildConfiguration; 346 | buildSettings = { 347 | ALWAYS_SEARCH_USER_PATHS = NO; 348 | CLANG_ANALYZER_NONNULL = YES; 349 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 350 | CLANG_CXX_LIBRARY = "libc++"; 351 | CLANG_ENABLE_MODULES = YES; 352 | CLANG_ENABLE_OBJC_ARC = YES; 353 | CLANG_WARN_BOOL_CONVERSION = YES; 354 | CLANG_WARN_CONSTANT_CONVERSION = YES; 355 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 356 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 357 | CLANG_WARN_EMPTY_BODY = YES; 358 | CLANG_WARN_ENUM_CONVERSION = YES; 359 | CLANG_WARN_INFINITE_RECURSION = YES; 360 | CLANG_WARN_INT_CONVERSION = YES; 361 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 362 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 363 | CLANG_WARN_UNREACHABLE_CODE = YES; 364 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 365 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 366 | COPY_PHASE_STRIP = NO; 367 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 368 | ENABLE_NS_ASSERTIONS = NO; 369 | ENABLE_STRICT_OBJC_MSGSEND = YES; 370 | GCC_C_LANGUAGE_STANDARD = gnu99; 371 | GCC_NO_COMMON_BLOCKS = YES; 372 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 373 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 374 | GCC_WARN_UNDECLARED_SELECTOR = YES; 375 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 376 | GCC_WARN_UNUSED_FUNCTION = YES; 377 | GCC_WARN_UNUSED_VARIABLE = YES; 378 | IPHONEOS_DEPLOYMENT_TARGET = 10.2; 379 | MTL_ENABLE_DEBUG_INFO = NO; 380 | SDKROOT = iphoneos; 381 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 382 | VALIDATE_PRODUCT = YES; 383 | }; 384 | name = Release; 385 | }; 386 | 7B8870981E6E6B640031FDA4 /* Debug */ = { 387 | isa = XCBuildConfiguration; 388 | buildSettings = { 389 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 390 | DEVELOPMENT_TEAM = ZP39L9XLVC; 391 | FRAMEWORK_SEARCH_PATHS = ( 392 | "$(inherited)", 393 | "$(PROJECT_DIR)/SFMixer/ThirdParty", 394 | ); 395 | INFOPLIST_FILE = SFMixer/Info.plist; 396 | IPHONEOS_DEPLOYMENT_TARGET = 9.0; 397 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 398 | PRODUCT_BUNDLE_IDENTIFIER = com.swordfish.ios.SFMixer; 399 | PRODUCT_NAME = "$(TARGET_NAME)"; 400 | SWIFT_VERSION = 3.0; 401 | }; 402 | name = Debug; 403 | }; 404 | 7B8870991E6E6B640031FDA4 /* Release */ = { 405 | isa = XCBuildConfiguration; 406 | buildSettings = { 407 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 408 | DEVELOPMENT_TEAM = ZP39L9XLVC; 409 | FRAMEWORK_SEARCH_PATHS = ( 410 | "$(inherited)", 411 | "$(PROJECT_DIR)/SFMixer/ThirdParty", 412 | ); 413 | INFOPLIST_FILE = SFMixer/Info.plist; 414 | IPHONEOS_DEPLOYMENT_TARGET = 9.0; 415 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 416 | PRODUCT_BUNDLE_IDENTIFIER = com.swordfish.ios.SFMixer; 417 | PRODUCT_NAME = "$(TARGET_NAME)"; 418 | SWIFT_VERSION = 3.0; 419 | }; 420 | name = Release; 421 | }; 422 | /* End XCBuildConfiguration section */ 423 | 424 | /* Begin XCConfigurationList section */ 425 | 7B8870801E6E6B640031FDA4 /* Build configuration list for PBXProject "SFMixer" */ = { 426 | isa = XCConfigurationList; 427 | buildConfigurations = ( 428 | 7B8870951E6E6B640031FDA4 /* Debug */, 429 | 7B8870961E6E6B640031FDA4 /* Release */, 430 | ); 431 | defaultConfigurationIsVisible = 0; 432 | defaultConfigurationName = Release; 433 | }; 434 | 7B8870971E6E6B640031FDA4 /* Build configuration list for PBXNativeTarget "SFMixer" */ = { 435 | isa = XCConfigurationList; 436 | buildConfigurations = ( 437 | 7B8870981E6E6B640031FDA4 /* Debug */, 438 | 7B8870991E6E6B640031FDA4 /* Release */, 439 | ); 440 | defaultConfigurationIsVisible = 0; 441 | defaultConfigurationName = Release; 442 | }; 443 | /* End XCConfigurationList section */ 444 | }; 445 | rootObject = 7B88707D1E6E6B640031FDA4 /* Project object */; 446 | } 447 | -------------------------------------------------------------------------------- /SFMixer.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /SFMixer.xcodeproj/xcuserdata/chenwanfei.xcuserdatad/xcschemes/SFMixer.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 69 | 70 | 71 | 72 | 73 | 74 | 80 | 82 | 88 | 89 | 90 | 91 | 93 | 94 | 97 | 98 | 99 | -------------------------------------------------------------------------------- /SFMixer.xcodeproj/xcuserdata/chenwanfei.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | SFMixer.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 7B8870841E6E6B640031FDA4 16 | 17 | primary 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /SFMixer/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // SFMixer 4 | // 5 | // Created by CHENWANFEI on 07/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @UIApplicationMain 12 | class AppDelegate: UIResponder, UIApplicationDelegate { 13 | 14 | var window: UIWindow? 15 | 16 | 17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { 18 | // Override point for customization after application launch. 19 | 20 | return true 21 | } 22 | 23 | func applicationWillResignActive(_ application: UIApplication) { 24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 25 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 26 | } 27 | 28 | func applicationDidEnterBackground(_ application: UIApplication) { 29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 31 | } 32 | 33 | func applicationWillEnterForeground(_ application: UIApplication) { 34 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 35 | } 36 | 37 | func applicationDidBecomeActive(_ application: UIApplication) { 38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 39 | } 40 | 41 | func applicationWillTerminate(_ application: UIApplication) { 42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 43 | } 44 | 45 | 46 | } 47 | 48 | -------------------------------------------------------------------------------- /SFMixer/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "size" : "60x60", 35 | "idiom" : "iphone", 36 | "filename" : "icon@2x.png", 37 | "scale" : "2x" 38 | }, 39 | { 40 | "size" : "60x60", 41 | "idiom" : "iphone", 42 | "filename" : "icon@3x.png", 43 | "scale" : "3x" 44 | } 45 | ], 46 | "info" : { 47 | "version" : 1, 48 | "author" : "xcode" 49 | } 50 | } -------------------------------------------------------------------------------- /SFMixer/Assets.xcassets/AppIcon.appiconset/icon@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/Assets.xcassets/AppIcon.appiconset/icon@2x.png -------------------------------------------------------------------------------- /SFMixer/Assets.xcassets/AppIcon.appiconset/icon@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/Assets.xcassets/AppIcon.appiconset/icon@3x.png -------------------------------------------------------------------------------- /SFMixer/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /SFMixer/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 29 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /SFMixer/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /SFMixer/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UILaunchStoryboardName 24 | LaunchScreen 25 | UIMainStoryboardFile 26 | Main 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/PanDirectionGestureRecognizer.swift: -------------------------------------------------------------------------------- 1 | /** 2 | from:http://stackoverflow.com/questions/7100884/uipangesturerecognizer-only-vertical-or-horizontal 3 | */ 4 | 5 | import UIKit.UIGestureRecognizerSubclass 6 | 7 | enum PanDirection { 8 | case vertical 9 | case horizontal 10 | } 11 | 12 | class PanDirectionGestureRecognizer: UIPanGestureRecognizer { 13 | 14 | let direction: PanDirection 15 | 16 | init(direction: PanDirection, target: AnyObject, action: Selector) { 17 | self.direction = direction 18 | super.init(target: target, action: action) 19 | } 20 | 21 | override func touchesMoved(_ touches: Set, with event: UIEvent) { 22 | super.touchesMoved(touches, with: event) 23 | 24 | if state == .began { 25 | let vel = velocity(in: view) 26 | switch direction { 27 | case .horizontal where fabs(vel.y) > fabs(vel.x): 28 | state = .cancelled 29 | case .vertical where fabs(vel.x) > fabs(vel.y): 30 | state = .cancelled 31 | default: 32 | break 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/SFAudioBus.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SFAudioBus.swift 3 | // SFMixer 4 | // 5 | // Created by CHENWANFEI on 10/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | struct SFAudioBus:Equatable { 12 | var url:URL 13 | var name:String; 14 | var delayTime=TimeInterval(0) 15 | var accurateDuration = TimeInterval(0); 16 | var volumn = Float(1); 17 | var mute = false; 18 | 19 | var waveformImage:UIImage? 20 | 21 | init(url:URL,name:String) { 22 | self.url = url; 23 | self.name = name; 24 | } 25 | 26 | } 27 | 28 | func ==(lhs: SFAudioBus, rhs: SFAudioBus) -> Bool { 29 | return lhs.url.absoluteString == (rhs.url.absoluteString); 30 | } 31 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/SFAudioBusContainerView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SFAudioBusContainerView.swift 3 | // SFMixer 4 | // 5 | // Created by CHENWANFEI on 09/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import CoreGraphics 11 | import AVFoundation 12 | 13 | 14 | 15 | private class SFAudioBusContainerClipMaskLayer:CALayer{ 16 | 17 | 18 | var clipAreaColor:UIColor!; 19 | 20 | @NSManaged var clipMargins:CGPoint; 21 | 22 | class override func needsDisplay(forKey key:String) -> Bool { 23 | if key == "clipMargins" { 24 | return true 25 | } else { 26 | return super.needsDisplay(forKey: key) 27 | } 28 | } 29 | 30 | 31 | 32 | override func draw(in ctx: CGContext) { 33 | let context = ctx 34 | context.addRect(CGRect(x: 0, y: 0, width: clipMargins.x, height: self.bounds.height)) 35 | context.addRect(CGRect(x: self.bounds.width - clipMargins.y, y: 0, width: clipMargins.y, height: self.bounds.height)) 36 | context.setFillColor((clipAreaColor.withAlphaComponent(0.4).cgColor)); 37 | context.fillPath(); 38 | 39 | context.addLines(between: [CGPoint(x:clipMargins.x,y: 0),CGPoint(x:clipMargins.x,y:self.bounds.height)]); 40 | context.addLines(between: [CGPoint(x:self.bounds.width - clipMargins.y,y: 0),CGPoint(x:self.bounds.width - clipMargins.y,y:self.bounds.height)]); 41 | context.setStrokeColor(clipAreaColor.cgColor); 42 | context.strokePath(); 43 | } 44 | 45 | 46 | 47 | 48 | } 49 | 50 | 51 | class SFAudioBusContainerView: UIView { 52 | 53 | 54 | 55 | /* 56 | // Only override draw() if you perform custom drawing. 57 | // An empty implementation adversely affects performance during animation. 58 | override func draw(_ rect: CGRect) { 59 | // Drawing code 60 | } 61 | */ 62 | 63 | @IBInspectable var maxSeconds:Int = 120; 64 | @IBInspectable private var bigTickSeconds:Int = 20; 65 | @IBInspectable private var tickColor:UIColor = UIColor.white; 66 | @IBInspectable private var tickFont:UIFont = UIFont.systemFont(ofSize: 10); 67 | @IBInspectable private var tickMarkAreaHeight = CGFloat(25); 68 | @IBInspectable private var maxAudioRow:Int = 5; 69 | @IBInspectable fileprivate var clipAreaColor:UIColor = UIColor.init(red: 0, green: 122.0/255, blue: 1, alpha: 1); 70 | 71 | 72 | 73 | private weak var clipMaskLayer:SFAudioBusContainerClipMaskLayer! 74 | 75 | private var avPlayer:AVPlayer? 76 | 77 | 78 | private var playingBeginTime:TimeInterval? 79 | 80 | 81 | 82 | override func willMove(toSuperview newSuperview: UIView?) { 83 | if newSuperview != nil { 84 | 85 | 86 | 87 | 88 | 89 | //add mask layer 90 | let maskLayer = SFAudioBusContainerClipMaskLayer(); 91 | 92 | maskLayer.needsDisplayOnBoundsChange = true; 93 | maskLayer.zPosition = CGFloat(MAXFLOAT); 94 | maskLayer.clipAreaColor = self.clipAreaColor; 95 | maskLayer.clipMargins = CGPoint(x: 0, y: 0); 96 | 97 | self.layer.addSublayer(maskLayer); 98 | self.clipMaskLayer = maskLayer; 99 | }else{ 100 | stopPreview() 101 | } 102 | } 103 | 104 | 105 | 106 | 107 | private var audioRowHeight:CGFloat{ 108 | let topY = tickMarkAreaHeight; 109 | let rowHeiht = ( self.bounds.height - topY) / CGFloat(self.maxAudioRow); 110 | return rowHeiht; 111 | } 112 | 113 | 114 | 115 | 116 | override func draw(_ rect: CGRect) { 117 | // Drawing code 118 | drawRule(); 119 | drawGrid() 120 | 121 | } 122 | 123 | 124 | 125 | 126 | 127 | 128 | private func drawGrid(){ 129 | let totalWidth = self.bounds.width; 130 | var topY = self.tickFont.lineHeight + 2; 131 | let context = UIGraphicsGetCurrentContext() 132 | let gridGap = self.bigTickSeconds / 2; 133 | let numVerticalLine = self.maxSeconds / gridGap; 134 | 135 | let gap = totalWidth / CGFloat(numVerticalLine); 136 | 137 | 138 | for i in 1.. Int in 238 | if v is SFAudioBusView { 239 | return sum + 1; 240 | }else{ 241 | return sum; 242 | } 243 | } 244 | 245 | return buses; 246 | } 247 | 248 | deinit { 249 | print("--------\(self) is recycled-----------"); 250 | } 251 | 252 | // MARK:Public Area 253 | 254 | var onNumOfBusesChanged:((Void)->Void)? 255 | 256 | private var previewDidFinish:((Void)->Void)? 257 | 258 | weak var parentVC:SFMixerViewController? 259 | 260 | var clipMargins:CGPoint{ 261 | set{ 262 | // self.clipMaskLayer is an instance of SFAudioBusContainerClipMaskLayer 263 | // change the property animately 264 | self.clipMaskLayer.clipMargins = newValue; 265 | } 266 | get{ 267 | let p = self.clipMaskLayer.clipMargins; 268 | return p; 269 | } 270 | } 271 | 272 | 273 | 274 | 275 | var canAddAudioBus:Bool{ 276 | return self.numOfBuses < self.maxAudioRow; 277 | } 278 | 279 | func clippedDurationOfNewMargin(_ newMargin:CGPoint) -> Int{ 280 | let duration = ( CGFloat(self.maxSeconds) * (self.bounds.width - newMargin.x - newMargin.y) / self.bounds.width ).rounded(); 281 | return Int(duration); 282 | } 283 | 284 | 285 | func addAudioBus(audioBus:SFAudioBus) -> Void { 286 | if self.canAddAudioBus{ 287 | let busView = SFAudioBusView(audioBus: audioBus, frame: CGRect(x:0,y:0,width:self.bounds.width,height:audioRowHeight),maxDuration:CGFloat(self.maxSeconds)) 288 | self.addSubview(busView); 289 | onNumOfBusesChanged?(); 290 | } 291 | 292 | } 293 | 294 | 295 | func prepareRemoveAudioBusView(targetView:SFAudioBusView){ 296 | let ac = UIAlertController(title: "Warnning" ,message: "Are you sure to reomve [\(targetView.audioBus.name)]", preferredStyle: UIAlertControllerStyle.alert); 297 | 298 | ac.addAction(UIAlertAction(title: "Cancel", style: UIAlertActionStyle.cancel, handler: nil)); 299 | ac.addAction(UIAlertAction(title: "Ok", style: UIAlertActionStyle.default, handler: { [weak self] (_) in 300 | targetView.removeFromSuperview(); 301 | self?.onNumOfBusesChanged?(); 302 | })); 303 | self.parentVC?.present(ac, animated: true, completion: nil); 304 | } 305 | 306 | 307 | private func addAudioBus(_ audioBus:SFAudioBus,composition:AVMutableComposition) -> AVAudioMixInputParameters?{ 308 | let asset = AVURLAsset(url: audioBus.url); 309 | let track = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid); 310 | 311 | 312 | 313 | let startTime = CMTime(seconds: 0, preferredTimescale: 1); 314 | let endTime = CMTime(seconds: audioBus.accurateDuration, preferredTimescale: 1); 315 | 316 | let trackMixParameters = AVMutableAudioMixInputParameters(track: track); 317 | trackMixParameters.setVolume(audioBus.volumn, at: startTime); 318 | 319 | 320 | let sourceTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first! 321 | 322 | var timeRange = CMTimeRange(start: startTime, duration: endTime); 323 | var delayTime = CMTime(seconds: audioBus.delayTime, preferredTimescale: 1); 324 | 325 | if audioBus.delayTime < 0{ 326 | timeRange.start = CMTime(seconds: -audioBus.delayTime, preferredTimescale: 1); 327 | delayTime = startTime; 328 | } 329 | 330 | 331 | 332 | do { 333 | try track.insertTimeRange(timeRange, of: sourceTrack, at:delayTime); 334 | }catch{ 335 | print(error); 336 | return nil; 337 | } 338 | 339 | 340 | 341 | return trackMixParameters; 342 | 343 | } 344 | 345 | private func createAudioMixerAndComposition() -> (AVAudioMix,AVComposition)?{ 346 | let composition = AVMutableComposition(); 347 | var inputParams = [AVAudioMixInputParameters](); 348 | for v in self.subviews{ 349 | if let busView = v as? SFAudioBusView{ 350 | 351 | if busView.audioBus.mute == false{ 352 | 353 | if let param = self.addAudioBus(busView.audioBus, composition: composition){ 354 | inputParams.append(param); 355 | }else{ 356 | return nil; 357 | } 358 | 359 | } 360 | 361 | 362 | } 363 | } 364 | 365 | 366 | let audioMixer = AVMutableAudioMix(); 367 | audioMixer.inputParameters = inputParams; 368 | 369 | 370 | return (audioMixer,composition); 371 | 372 | } 373 | 374 | func saveOutput(completion:@escaping (String?) -> Void){ 375 | 376 | if let (audioMixer,composition) = self.createAudioMixerAndComposition() { 377 | 378 | let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)! 379 | exporter.audioMix = audioMixer; 380 | exporter.outputFileType = "com.apple.m4a-audio"; 381 | 382 | let fileName = "SFMixer.m4a"; 383 | let finalPath = (NSTemporaryDirectory() as NSString).appendingPathComponent(fileName) 384 | 385 | 386 | 387 | print(finalPath); 388 | 389 | let outputURL = URL(fileURLWithPath: finalPath); 390 | 391 | try? FileManager.default.removeItem(at: outputURL); 392 | exporter.outputURL = outputURL; 393 | 394 | let range = self.clipedMixRange; 395 | exporter.timeRange = CMTimeRange(start: CMTime(seconds: range.0, preferredTimescale: 1), duration: CMTime(seconds: range.1, preferredTimescale: 1)) 396 | 397 | exporter.exportAsynchronously { 398 | 399 | let status = exporter.status; 400 | if status == .completed{ 401 | DispatchQueue.main.async { 402 | completion(finalPath); 403 | } 404 | }else if(status == .failed){ 405 | DispatchQueue.main.async { 406 | completion(nil); 407 | } 408 | } 409 | }; 410 | }else{ 411 | completion(nil); 412 | } 413 | 414 | } 415 | 416 | 417 | 418 | 419 | var clipedMixRange:(TimeInterval,TimeInterval){ 420 | let leftClip = TimeInterval(self.maxSeconds) * TimeInterval( self.clipMargins.x / self.bounds.width); 421 | let rightClip = TimeInterval(self.maxSeconds) * TimeInterval( self.clipMargins.y / self.bounds.width); 422 | let duration = TimeInterval(self.maxSeconds) - leftClip - rightClip; 423 | return(leftClip,duration); 424 | //return Range( 425 | } 426 | 427 | var distanceBetweenClipLine:CGFloat{ 428 | let distance = self.bounds.width - self.clipMargins.x - self.clipMargins.y; 429 | return distance; 430 | } 431 | 432 | 433 | 434 | 435 | var trimedClipMargins:CGPoint{ 436 | //self.clipMargins 437 | var minRightMargin = CGFloat(self.bounds.width); 438 | var minLeftMargin = CGFloat(self.bounds.width); 439 | for v in self.subviews{ 440 | if let busView = v as? SFAudioBusView{ 441 | 442 | if busView.audioBus.mute == false{ 443 | 444 | let rightMargin = busView.rightMargin; 445 | if rightMargin < minRightMargin { 446 | minRightMargin = rightMargin; 447 | } 448 | 449 | let leftMargin = busView.leftMargin; 450 | if leftMargin < minLeftMargin{ 451 | minLeftMargin = leftMargin; 452 | } 453 | } 454 | 455 | 456 | } 457 | } 458 | //minRightMargin -= HAIRLINE_WIDTH; 459 | minRightMargin = max(0, minRightMargin); 460 | minRightMargin = min(self.bounds.width, minRightMargin); 461 | 462 | //minLeftMargin -= HAIRLINE_WIDTH; 463 | minLeftMargin = max(0,minLeftMargin); 464 | minLeftMargin = min(self.bounds.width,minLeftMargin); 465 | 466 | 467 | 468 | 469 | 470 | let newClipMargin = CGPoint(x:minLeftMargin,y:minRightMargin); 471 | return newClipMargin; 472 | } 473 | 474 | func stopPreview(){ 475 | self.isUserInteractionEnabled = true; 476 | NSObject.cancelPreviousPerformRequests(withTarget: self); 477 | avPlayer?.pause(); 478 | avPlayer = nil; 479 | self.previewDidFinish?(); 480 | 481 | } 482 | 483 | func pausePreview(){ 484 | avPlayer?.pause(); 485 | NSObject.cancelPreviousPerformRequests(withTarget: self); 486 | } 487 | 488 | 489 | 490 | func resumePreviewAtPosition(seekTime:TimeInterval){ 491 | let startTime = self.clipedMixRange.0 + seekTime; 492 | self.avPlayer?.seek(to: CMTime(seconds: startTime, preferredTimescale: 1)); 493 | self.playingBeginTime = Date.timeIntervalSinceReferenceDate - seekTime; 494 | self.avPlayer?.play(); 495 | self.perform(#selector(self.stopPreview), with: nil, afterDelay: self.clipedMixRange.1 - seekTime); 496 | } 497 | 498 | 499 | 500 | var passedTimeSincePlaying:TimeInterval?{ 501 | if let beginTime = self.playingBeginTime { 502 | return Date.timeIntervalSinceReferenceDate - beginTime; 503 | } 504 | return nil; 505 | } 506 | 507 | func previewMixedAudio(completion: @escaping (Void)->Void){ 508 | 509 | self.previewDidFinish = completion; 510 | 511 | if let (audioMixer,composition) = self.createAudioMixerAndComposition() { 512 | 513 | let playItem = AVPlayerItem(asset: composition); 514 | playItem.audioMix = audioMixer; 515 | let avPlayer = AVPlayer(playerItem: playItem); 516 | 517 | self.avPlayer = avPlayer; 518 | 519 | avPlayer.play(); 520 | 521 | 522 | let range = self.clipedMixRange; 523 | avPlayer.seek(to: CMTime(seconds: range.0, preferredTimescale: 1)); 524 | self.playingBeginTime = Date.timeIntervalSinceReferenceDate; 525 | 526 | self.perform(#selector(self.stopPreview), with: nil, afterDelay: range.1); 527 | 528 | self.isUserInteractionEnabled = false; 529 | 530 | 531 | } 532 | 533 | 534 | 535 | 536 | 537 | } 538 | 539 | 540 | 541 | } 542 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/SFAudioBusView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SFAudioBusView.swift 3 | // SFMixer 4 | // 5 | // Created by CHENWANFEI on 10/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | import MediaPlayer 12 | import AVFoundation 13 | 14 | class SFAudioBusView: UIView { 15 | 16 | 17 | /* 18 | // Only override draw() if you perform custom drawing. 19 | // An empty implementation adversely affects performance during animation. 20 | override func draw(_ rect: CGRect) { 21 | // Drawing code 22 | } 23 | */ 24 | 25 | var audioBus:SFAudioBus! 26 | 27 | private weak var nameLabel:UILabel! 28 | private weak var volumnLabel:UILabel! 29 | 30 | private weak var waveFormView:UIImageView! 31 | 32 | private weak var indictor:UIActivityIndicatorView! 33 | 34 | private let bottomLineHeight = CGFloat(20) 35 | private let waveformImageVerticalMargin = CGFloat(8) 36 | private let maxDuration:CGFloat; 37 | 38 | private var lastPanLocation:CGPoint? 39 | 40 | 41 | private weak var leftSliderFlagView:UIView! 42 | private weak var rightSliderFlagView:UIView! 43 | 44 | 45 | 46 | 47 | init(audioBus: SFAudioBus,frame:CGRect,maxDuration:CGFloat) { 48 | self.audioBus = audioBus; 49 | self.maxDuration = maxDuration; 50 | super.init(frame: frame); 51 | 52 | 53 | 54 | 55 | 56 | let nameLabel = UILabel(); 57 | nameLabel.font = UIFont.systemFont(ofSize: 12.0); 58 | nameLabel.textColor = UIColor.white; 59 | nameLabel.translatesAutoresizingMaskIntoConstraints = false; 60 | self.addSubview(nameLabel); 61 | self.nameLabel = nameLabel; 62 | 63 | var c = NSLayoutConstraint(item: nameLabel, attribute: NSLayoutAttribute.leading, relatedBy: NSLayoutRelation.equal, toItem: self, attribute: NSLayoutAttribute.leading, multiplier: 1, constant: 8); 64 | self.addConstraint(c); 65 | 66 | c = NSLayoutConstraint(item: nameLabel, attribute: NSLayoutAttribute.bottom, relatedBy: NSLayoutRelation.equal, toItem: self, attribute: NSLayoutAttribute.bottom, multiplier: 1, constant: -4); 67 | self.addConstraint(c); 68 | 69 | 70 | let volumnLabel = UILabel(); 71 | volumnLabel.font = UIFont.systemFont(ofSize: 12.0); 72 | volumnLabel.textColor = UIColor.white; 73 | volumnLabel.translatesAutoresizingMaskIntoConstraints = false; 74 | self.addSubview(volumnLabel); 75 | self.volumnLabel = volumnLabel; 76 | 77 | c = NSLayoutConstraint(item: volumnLabel, attribute: NSLayoutAttribute.trailing, relatedBy: NSLayoutRelation.equal, toItem: self, attribute: NSLayoutAttribute.trailing, multiplier: 1, constant: -8); 78 | self.addConstraint(c); 79 | 80 | c = NSLayoutConstraint(item: volumnLabel, attribute: NSLayoutAttribute.bottom, relatedBy: NSLayoutRelation.equal, toItem: self, attribute: NSLayoutAttribute.bottom, multiplier: 1, constant: -4); 81 | self.addConstraint(c); 82 | 83 | 84 | 85 | //data 86 | 87 | let asset = AVURLAsset(url: self.audioBus.url, options: [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true as Bool)]) 88 | let audioDuration = asset.duration; 89 | self.audioBus.accurateDuration = CMTimeGetSeconds(audioDuration); 90 | 91 | 92 | 93 | 94 | 95 | //add waveformView 96 | 97 | let waveformFrame = CGRect(x: 0, y: waveformImageVerticalMargin, width: self.bounds.width * CGFloat( self.audioBus.accurateDuration) / self.maxDuration, height:waveformHeight); 98 | let waveFormView = UIImageView(frame: waveformFrame); 99 | waveFormView.tintColor = UIColor.white; 100 | //waveFormView.backgroundColor = UIColor.red; 101 | self.addSubview(waveFormView); 102 | self.waveFormView = waveFormView; 103 | waveFormView.center = CGPoint(x: self.bounds.width / 2, y: self.waveFormView.center.y); 104 | 105 | 106 | self.audioBus.delayTime = TimeInterval( self.maxDuration * self.waveFormView.frame.minX / self.bounds.width ); 107 | 108 | 109 | 110 | 111 | 112 | //add indicator 113 | let indicator = UIActivityIndicatorView(activityIndicatorStyle: UIActivityIndicatorViewStyle.white); 114 | indicator.translatesAutoresizingMaskIntoConstraints = false; 115 | self.waveFormView.addSubview(indicator); 116 | indicator.startAnimating(); 117 | self.indictor = indicator; 118 | 119 | c = NSLayoutConstraint(item: indicator, attribute: NSLayoutAttribute.centerX, relatedBy: NSLayoutRelation.equal, toItem: self.waveFormView, attribute: NSLayoutAttribute.centerX, multiplier: 1, constant: 0); 120 | self.waveFormView.addConstraint(c); 121 | 122 | c = NSLayoutConstraint(item: indicator, attribute: NSLayoutAttribute.centerY, relatedBy: NSLayoutRelation.equal, toItem: self.waveFormView, attribute: NSLayoutAttribute.centerY, multiplier: 1, constant: 0); 123 | self.waveFormView.addConstraint(c); 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | //add gesturs 135 | let longPressG = UILongPressGestureRecognizer(target: self, action: #selector(onLongPress(g:))) 136 | self.addGestureRecognizer(longPressG); 137 | 138 | 139 | let tap = UITapGestureRecognizer(target: self, action: #selector(onTap(g:))) 140 | self.addGestureRecognizer(tap); 141 | 142 | 143 | 144 | // 145 | let iDuration = Int(self.audioBus.accurateDuration); 146 | self.nameLabel.text = "\(self.audioBus.name) \(iDuration / 60):\(iDuration % 60)" 147 | self.volumnLabel.text = "Vol:\(Int(self.audioBus.volumn * 100))"; 148 | 149 | 150 | let scale = UIScreen.main.scale; 151 | 152 | generateWaveformImage(audioURL: self.audioBus.url, imageSizeInPixel: CGSize(width:self.bounds.width * scale,height:self.waveformHeight * scale), waveColor: UIColor.white) { [weak self](waveformImage) in 153 | 154 | guard let `self` = self else{ 155 | return; 156 | } 157 | 158 | if let image = waveformImage{ 159 | self.audioBus.waveformImage = waveformImage; 160 | self.waveFormView.image = image.withRenderingMode(UIImageRenderingMode.alwaysTemplate); 161 | self.indictor.removeFromSuperview(); 162 | 163 | //add pan gestures 164 | var pan = PanDirectionGestureRecognizer(direction:.horizontal, target: self, action: #selector(self.onPanWaveformHorizental(gesture:))); 165 | self.addGestureRecognizer(pan); 166 | 167 | pan = PanDirectionGestureRecognizer(direction:.vertical, target: self, action: #selector(self.onPanWaveformVertical(gesture:))); 168 | 169 | self.addGestureRecognizer(pan); 170 | 171 | }else{ 172 | self.nameLabel.text = "\(self.audioBus.name) loading fails"; 173 | self.nameLabel.textColor = UIColor.red; 174 | } 175 | } 176 | 177 | 178 | } 179 | 180 | 181 | dynamic private func onPanWaveformVertical(gesture:UIGestureRecognizer){ 182 | guard self.audioBus.waveformImage != nil else { 183 | return; 184 | } 185 | 186 | if gesture.state == .began{ 187 | self.lastPanLocation = gesture.location(in: self); 188 | }else if(gesture.state == .changed) { 189 | let thisPanLocation = gesture.location(in: self); 190 | let deltaY = thisPanLocation.y - lastPanLocation!.y; 191 | let delta = UIScreen.main.scale / 2; 192 | if deltaY > 0 { 193 | //down 194 | self.waveFormView.bounds = CGRect(x:0,y:0,width:self.waveFormView.bounds.width,height:max(self.waveFormView.bounds.height - delta,0)); 195 | 196 | }else{ 197 | //up 198 | self.waveFormView.bounds = CGRect(x:0,y:0,width:self.waveFormView.bounds.width,height:min(self.waveFormView.bounds.height + delta,waveformHeight)); 199 | } 200 | 201 | self.audioBus.volumn = Float(self.waveFormView.bounds.height / waveformHeight); 202 | 203 | let iVolumn = Int((100.0 * self.audioBus.volumn).rounded()) 204 | self.volumnLabel.text = "Vol:\(iVolumn)" 205 | 206 | 207 | 208 | self.lastPanLocation = thisPanLocation; 209 | } 210 | } 211 | 212 | 213 | 214 | 215 | dynamic private func onPanWaveformHorizental(gesture:UIGestureRecognizer){ 216 | 217 | guard self.audioBus.waveformImage != nil else { 218 | return; 219 | } 220 | 221 | if gesture.state == .began{ 222 | self.lastPanLocation = gesture.location(in: self); 223 | 224 | }else if(gesture.state == .changed){ 225 | 226 | 227 | 228 | let thisPanLocation = gesture.location(in: self); 229 | let deltaX = thisPanLocation.x - self.lastPanLocation!.x; 230 | self.waveFormView.center = CGPoint(x: self.waveFormView.center.x + deltaX, y: self.waveFormView.center.y); 231 | 232 | if self.waveFormView.frame.maxX < 0{ 233 | self.waveFormView.frame = CGRect(x: -self.waveFormView.bounds.width, y: self.waveFormView.frame.origin.y, width: self.waveFormView.frame.width, height: self.waveFormView.frame.height); 234 | } 235 | 236 | if self.waveFormView.frame.minX > self.bounds.width{ 237 | self.waveFormView.frame = CGRect(x: self.bounds.width, y: self.waveFormView.frame.origin.y, width: self.waveFormView.frame.width, height: self.waveFormView.frame.height); 238 | } 239 | 240 | if self.leftSliderFlagView == nil && self.rightSliderFlagView == nil { 241 | 242 | 243 | //add flagView 244 | 245 | let leftFlagView = createFlagView(); 246 | self.superview!.addSubview(leftFlagView); 247 | self.leftSliderFlagView = leftFlagView; 248 | 249 | let rightFlagView = createFlagView(); 250 | self.superview!.addSubview(rightFlagView); 251 | self.rightSliderFlagView = rightFlagView; 252 | 253 | 254 | } 255 | 256 | 257 | 258 | let leftValue = Int(self.maxDuration * self.waveFormView.frame.minX / self.bounds.width); 259 | let rightValue = Int(self.maxDuration * self.waveFormView.frame.maxX / self.bounds.width); 260 | 261 | let leftLabel = self.leftSliderFlagView.subviews.last as! UILabel; 262 | leftLabel.text = String(format:"\(leftValue / 60):%02d",leftValue % 60); 263 | 264 | let rightLabel = self.rightSliderFlagView.subviews.last as! UILabel; 265 | rightLabel.text = String(format:"\(rightValue / 60):%02d",rightValue % 60); 266 | 267 | 268 | let referenceFrame = self.convert(self.waveFormView.frame, to: self.superview!); 269 | 270 | self.leftSliderFlagView.center = CGPoint(x : referenceFrame.minX,y:referenceFrame.minY - self.leftSliderFlagView.bounds.height / 2); 271 | 272 | self.rightSliderFlagView.center = CGPoint(x : referenceFrame.maxX,y:referenceFrame.minY - self.rightSliderFlagView.bounds.height / 2); 273 | 274 | 275 | 276 | 277 | self.audioBus.delayTime = TimeInterval( self.maxDuration * self.waveFormView.frame.minX / self.bounds.width ); 278 | 279 | self.lastPanLocation = thisPanLocation; 280 | 281 | }else if gesture.state == .cancelled || gesture.state == .ended{ 282 | 283 | UIView.animate(withDuration: ANIMATION_DURATION, animations: { [weak self] in 284 | 285 | self?.leftSliderFlagView?.alpha = 0; 286 | self?.rightSliderFlagView?.alpha = 0; 287 | 288 | }, completion: { [weak self] (_) in 289 | 290 | self?.leftSliderFlagView?.removeFromSuperview(); 291 | self?.rightSliderFlagView?.removeFromSuperview(); 292 | }) 293 | 294 | } 295 | 296 | } 297 | 298 | 299 | 300 | 301 | 302 | private var waveformHeight:CGFloat{ 303 | return self.bounds.height - self.bottomLineHeight - waveformImageVerticalMargin * 2 304 | } 305 | 306 | 307 | 308 | 309 | 310 | 311 | 312 | private func createFlagView() -> UIView{ 313 | let flagView = UIView(frame: CGRect(x: 0, y: 0, width: 33.0, height: 24.0)) 314 | let imageView = UIImageView(frame: flagView.bounds); 315 | imageView.image = UIImage(named: "popover"); 316 | flagView.addSubview(imageView); 317 | 318 | let label = UILabel(frame: CGRect(x: 0, y: 0, width: 33.0, height: 21.0)); 319 | label.font = UIFont.systemFont(ofSize: 10.0); 320 | label.textAlignment = NSTextAlignment.center; 321 | label.textColor = UIColor.white; 322 | flagView.addSubview(label); 323 | 324 | return flagView; 325 | 326 | } 327 | 328 | 329 | dynamic private func onTap(g:UIGestureRecognizer){ 330 | if g.state == .ended{ 331 | 332 | if self.audioBus.waveformImage != nil{ 333 | self.audioBus.mute = !self.audioBus.mute; 334 | if self.audioBus.mute{ 335 | self.nameLabel.textColor = UIColor.darkGray; 336 | self.volumnLabel.textColor = UIColor.darkGray; 337 | self.waveFormView.tintColor = UIColor.darkGray; 338 | if let panG = self.gestureRecognizers?.filter({ (g) -> Bool in 339 | g is UIPanGestureRecognizer 340 | }).first{ 341 | panG.isEnabled = false; 342 | } 343 | 344 | }else{ 345 | self.nameLabel.textColor = UIColor.white; 346 | self.volumnLabel.textColor = UIColor.white; 347 | self.waveFormView.tintColor = UIColor.white; 348 | 349 | if let panG = self.gestureRecognizers?.filter({ (g) -> Bool in 350 | g is UIPanGestureRecognizer 351 | }).first{ 352 | panG.isEnabled = true; 353 | } 354 | } 355 | } 356 | 357 | } 358 | } 359 | 360 | 361 | dynamic private func onLongPress(g:UIGestureRecognizer){ 362 | if g.state == .began{ 363 | if let containerView = self.superview as? SFAudioBusContainerView { 364 | containerView.prepareRemoveAudioBusView(targetView: self); 365 | } 366 | } 367 | } 368 | 369 | 370 | 371 | required init?(coder aDecoder: NSCoder) { 372 | fatalError("init(coder:) has not been implemented") 373 | } 374 | 375 | 376 | var rightMargin:CGFloat{ 377 | return self.bounds.width - self.waveFormView.frame.maxX; 378 | } 379 | var leftMargin:CGFloat{ 380 | return self.waveFormView.frame.minX; 381 | } 382 | 383 | } 384 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/SFCommon.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SFCommon.swift 3 | // SFMixer 4 | // 5 | // Created by CHENWANFEI on 10/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | let ANIMATION_DURATION = TimeInterval(0.3) 11 | let HAIRLINE_WIDTH = CGFloat(1.0) / UIScreen.main.scale; 12 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/SFMixerViewController.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 51 | 52 | 53 | 54 | 55 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 111 | 117 | 135 | 153 | 160 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/SFMixerViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SFMixerViewController.swift 3 | // SFMixer 4 | // 5 | // Created by CHENWANFEI on 07/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | 12 | class SFDashedLineLayer: CALayer { 13 | var lineColor = UIColor.red; 14 | 15 | override func draw(in ctx: CGContext) { 16 | //set the passed ctx in the stack's top 17 | UIGraphicsPushContext(ctx); 18 | 19 | 20 | let path = UIBezierPath() 21 | 22 | let p0 = CGPoint(x: self.bounds.width / 2, y: 0) 23 | let p1 = CGPoint(x: self.bounds.width / 2, y: self.bounds.height); 24 | path.move(to: p0); 25 | path.addLine(to: p1); 26 | 27 | let dashes: [ CGFloat ] = [ 3, 1 ] 28 | path.setLineDash(dashes, count: dashes.count, phase: 0.0) 29 | 30 | path.lineWidth = HAIRLINE_WIDTH 31 | 32 | lineColor.setStroke(); 33 | path.stroke() 34 | 35 | 36 | UIGraphicsPopContext(); 37 | 38 | } 39 | 40 | 41 | 42 | } 43 | 44 | class SFMixerViewController: UIViewController { 45 | 46 | @IBOutlet weak var addAudioBtn: UIButton! 47 | @IBOutlet weak var leftPositionFlagLeftMarginConstraint: NSLayoutConstraint! 48 | 49 | @IBOutlet weak var trimBtn: UIButton! 50 | @IBOutlet weak var rightPostionFlagRightMarginConstraint: NSLayoutConstraint! 51 | @IBOutlet weak var actionBtn: UIBarButtonItem! 52 | 53 | @IBOutlet weak var leftPositionFlagView: UIView! 54 | 55 | @IBOutlet weak var rightPositionFlagView: UIView! 56 | 57 | @IBOutlet weak var leftClipIndicator: UIView! 58 | 59 | @IBOutlet weak var rightClipIndicator: UIView! 60 | 61 | @IBOutlet weak var midClipIndicator: UIView! 62 | 63 | @IBOutlet weak var midPositionFlagView: SFPositionFlagView! 64 | 65 | @IBOutlet weak var movingLine:SFDashedLineLayer?; 66 | 67 | @IBOutlet weak var previewBtn: UIButton! 68 | 69 | private var hairLineWidth:CGFloat{ 70 | return CGFloat(1.0) / UIScreen.main.scale; 71 | } 72 | @IBOutlet weak var durationTitleLabel: UILabel! 73 | 74 | @IBOutlet weak var busContainerView: SFAudioBusContainerView! 75 | 76 | private var fingerX = CGFloat(0); 77 | 78 | private var halfFlagViewWidth = CGFloat(0); 79 | private var previewDisplayLink:CADisplayLink? 80 | 81 | 82 | 83 | override func viewDidLoad() { 84 | super.viewDidLoad() 85 | 86 | self.busContainerView.parentVC = self; 87 | self.busContainerView.onNumOfBusesChanged = { [weak self] in 88 | 89 | guard let `self` = self else { 90 | return; 91 | } 92 | 93 | self.addAudioBtn.isEnabled = self.busContainerView.canAddAudioBus; 94 | self.previewBtn.isEnabled = self.busContainerView.numOfBuses > 0; 95 | self.durationTitleLabel.isHidden = self.busContainerView.numOfBuses == 0; 96 | self.trimBtn.isEnabled = self.busContainerView.numOfBuses > 0; 97 | self.actionBtn.isEnabled = self.busContainerView.numOfBuses > 0; 98 | 99 | } 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | halfFlagViewWidth = self.rightPositionFlagView.bounds.width / 2 - hairLineWidth; 109 | 110 | self.leftPositionFlagLeftMarginConstraint.constant = -halfFlagViewWidth; 111 | 112 | self.rightPostionFlagRightMarginConstraint.constant = halfFlagViewWidth; 113 | 114 | // Do any additional setup after loading the view. 115 | } 116 | 117 | 118 | @IBAction func onExportResult(_ sender: Any) { 119 | 120 | let totalDuration = self.busContainerView.clipedMixRange.1; 121 | if totalDuration <= 0 { 122 | 123 | let ac = UIAlertController(title: "Info", message: "The length of final duration is not long enough", preferredStyle: UIAlertControllerStyle.alert); 124 | ac.addAction(UIAlertAction(title: "Ok", style: UIAlertActionStyle.default, handler: nil)); 125 | self.present(ac, animated: true, completion: nil); 126 | 127 | return; 128 | } 129 | 130 | let alertController = UIAlertController(title: "", message: "Please wait\n\n\n", preferredStyle: .alert) 131 | 132 | let spinnerIndicator = UIActivityIndicatorView(activityIndicatorStyle: .whiteLarge) 133 | 134 | spinnerIndicator.center = CGPoint(x: 135.0, y: 65.5) 135 | spinnerIndicator.color = UIColor.black 136 | spinnerIndicator.startAnimating() 137 | 138 | alertController.view.addSubview(spinnerIndicator) 139 | self.present(alertController, animated: false, completion: nil) 140 | 141 | self.busContainerView.saveOutput(completion: { [weak self](path) in 142 | alertController.dismiss(animated: true, completion: nil); 143 | if let path = path{ 144 | 145 | let url = URL(fileURLWithPath: path); 146 | 147 | let objectsToShare = [url]; 148 | let activityVC = UIActivityViewController(activityItems: objectsToShare, applicationActivities: nil) 149 | self?.present(activityVC, animated: true, completion: nil) 150 | 151 | 152 | }else{ 153 | let ac = UIAlertController(title: "Error", message: "Something wrong happens", preferredStyle: UIAlertControllerStyle.alert); 154 | ac.addAction(UIAlertAction(title: "Ok", style: UIAlertActionStyle.default, handler: nil)); 155 | self?.present(ac, animated: true, completion: nil); 156 | 157 | } 158 | }); 159 | 160 | 161 | 162 | 163 | 164 | 165 | } 166 | 167 | 168 | @IBAction func onMidPosistionFlagViewPan(_ sender: Any) { 169 | if let g = sender as? UIPanGestureRecognizer{ 170 | if g.state == UIGestureRecognizerState.began{ 171 | fingerX = g.location(in: self.view).x; 172 | self.view.bringSubview(toFront: self.midPositionFlagView); 173 | self.midClipIndicator.isHidden = false; 174 | self.busContainerView.pausePreview(); 175 | self.previewDisplayLink?.isPaused = true; 176 | 177 | }else{ 178 | let newFingerX = g.location(in: self.view).x; 179 | 180 | let deltaX = newFingerX - fingerX; 181 | 182 | 183 | var newCenterX = self.midPositionFlagView.center.x + deltaX; 184 | 185 | if newCenterX < self.leftPositionFlagView.center.x { 186 | newCenterX = self.leftPositionFlagView.center.x 187 | } 188 | 189 | if newCenterX > self.rightPositionFlagView.center.x { 190 | newCenterX = self.rightPositionFlagView.center.x 191 | } 192 | 193 | self.midPositionFlagView.center.x = newCenterX; 194 | CATransaction.setDisableActions(true); 195 | self.movingLine!.position = CGPoint(x:newCenterX,y:self.movingLine!.position.y); 196 | self.midClipIndicator.center.x = newCenterX; 197 | 198 | let timePassed = CGFloat(self.busContainerView.clipedMixRange.1) * (newCenterX - self.leftPositionFlagView.center.x) / self.busContainerView.distanceBetweenClipLine; 199 | 200 | 201 | 202 | if let label = self.midClipIndicator.subviews.last as? UILabel { 203 | let seconds = Int(timePassed); 204 | label.text = String(format: "%d:%02d", seconds / 60,seconds % 60); 205 | } 206 | 207 | 208 | 209 | 210 | if g.state == .cancelled || g.state == .ended{ 211 | 212 | self.busContainerView.resumePreviewAtPosition(seekTime:TimeInterval(timePassed)); 213 | self.previewDisplayLink?.isPaused = false; 214 | 215 | } 216 | 217 | fingerX = newFingerX; 218 | 219 | } 220 | } 221 | 222 | } 223 | 224 | 225 | @IBAction func onLeftPositionFlagViewPan(_ sender: Any) { 226 | 227 | if let g = sender as? UIPanGestureRecognizer{ 228 | if g.state == UIGestureRecognizerState.began{ 229 | fingerX = g.location(in: self.view).x; 230 | self.view.bringSubview(toFront: self.leftPositionFlagView); 231 | self.leftClipIndicator.isHidden = false; 232 | }else{ 233 | let newFingerX = g.location(in: self.view).x; 234 | 235 | let deltaX = newFingerX - fingerX; 236 | 237 | 238 | var newConstant = self.leftPositionFlagLeftMarginConstraint.constant + deltaX; 239 | 240 | if newConstant < -halfFlagViewWidth { 241 | newConstant = -halfFlagViewWidth; 242 | } 243 | 244 | if self.busContainerView.frame.minX + newConstant > self.busContainerView.frame.maxX + self.rightPostionFlagRightMarginConstraint.constant - self.rightPositionFlagView.bounds.width{ 245 | newConstant = self.busContainerView.frame.maxX + self.rightPostionFlagRightMarginConstraint.constant - self.rightPositionFlagView.bounds.width - self.busContainerView.frame.minX ; 246 | } 247 | 248 | 249 | leftPositionFlagLeftMarginConstraint.constant = newConstant; 250 | 251 | self.busContainerView.clipMargins.x = newConstant + halfFlagViewWidth + hairLineWidth; 252 | 253 | fingerX = g.location(in: self.view).x; 254 | 255 | if let label = self.leftClipIndicator.subviews.last as? UILabel{ 256 | let passed = Float(self.busContainerView.maxSeconds) * Float(self.busContainerView.clipMargins.x) / Float(self.busContainerView.bounds.width); 257 | let passSeconds = Int(passed.rounded()); 258 | 259 | let s = String(format: "%d:%02d", passSeconds / 60, passSeconds % 60); 260 | label.text = s; 261 | } 262 | 263 | let clippedDuration = Int(self.busContainerView.clipedMixRange.1); 264 | self.durationTitleLabel.text = String(format: "%d:%02d", clippedDuration / 60, clippedDuration % 60); 265 | 266 | 267 | if g.state == .cancelled || g.state == .ended{ 268 | 269 | UIView.animate(withDuration: ANIMATION_DURATION, animations: { [weak self] in 270 | self?.leftClipIndicator.alpha = 0; 271 | }, completion: { [weak self] (_) in 272 | self?.leftClipIndicator.alpha = 1; 273 | self?.leftClipIndicator.isHidden = true; 274 | }) 275 | 276 | } 277 | 278 | } 279 | } 280 | 281 | } 282 | 283 | 284 | @IBAction func onRightPositionFlagViewPan(_ sender: Any) { 285 | 286 | if let g = sender as? UIPanGestureRecognizer{ 287 | if g.state == UIGestureRecognizerState.began{ 288 | fingerX = g.location(in: self.view).x; 289 | self.view.bringSubview(toFront: self.rightPositionFlagView); 290 | self.rightClipIndicator.isHidden = false; 291 | }else{ 292 | let newFingerX = g.location(in: self.view).x; 293 | 294 | let deltaX = newFingerX - fingerX; 295 | 296 | 297 | var newConstant = self.rightPostionFlagRightMarginConstraint.constant + deltaX; 298 | 299 | if newConstant > halfFlagViewWidth { 300 | newConstant = halfFlagViewWidth; 301 | } 302 | 303 | 304 | if self.busContainerView.frame.maxX + newConstant - self.rightPositionFlagView.bounds.width < self.busContainerView.frame.minX + self.leftPositionFlagLeftMarginConstraint.constant { 305 | newConstant = self.busContainerView.frame.minX + self.leftPositionFlagLeftMarginConstraint.constant + self.rightPositionFlagView.bounds.width - self.busContainerView.frame.maxX; 306 | } 307 | 308 | rightPostionFlagRightMarginConstraint.constant = newConstant; 309 | self.busContainerView.clipMargins.y = -newConstant + halfFlagViewWidth + hairLineWidth; 310 | 311 | fingerX = g.location(in: self.view).x; 312 | 313 | 314 | if let label = self.rightClipIndicator.subviews.last as? UILabel{ 315 | let passed = Float(self.busContainerView.maxSeconds) * Float(self.busContainerView.bounds.width - self.busContainerView.clipMargins.y) / Float(self.busContainerView.bounds.width); 316 | let passSeconds = Int(passed.rounded()); 317 | 318 | let s = String(format: "%d:%02d", passSeconds / 60, passSeconds % 60); 319 | label.text = s; 320 | } 321 | 322 | let clippedDuration = Int(self.busContainerView.clipedMixRange.1); 323 | self.durationTitleLabel.text = String(format: "%d:%02d", clippedDuration / 60, clippedDuration % 60); 324 | 325 | if g.state == .cancelled || g.state == .ended{ 326 | 327 | UIView.animate(withDuration: ANIMATION_DURATION, animations: { [weak self] in 328 | self?.rightClipIndicator.alpha = 0; 329 | }, completion: { [weak self] (_) in 330 | self?.rightClipIndicator.alpha = 1; 331 | self?.rightClipIndicator.isHidden = true; 332 | }) 333 | 334 | } 335 | 336 | } 337 | } 338 | 339 | } 340 | 341 | 342 | override func viewDidLayoutSubviews() { 343 | super.viewDidLayoutSubviews() 344 | let leftEdge = self.busContainerView.frame.minX; 345 | let rightEdge = self.busContainerView.frame.maxX; 346 | let topEdge = self.busContainerView.frame.minY - 2; 347 | 348 | self.leftClipIndicator.center = CGPoint(x: leftEdge + self.busContainerView.clipMargins.x, y:topEdge - self.leftClipIndicator.bounds.height / 2); 349 | self.rightClipIndicator.center = CGPoint(x: rightEdge - self.busContainerView.clipMargins.y, y:topEdge - self.rightClipIndicator.bounds.height / 2); 350 | 351 | 352 | 353 | } 354 | 355 | deinit { 356 | print("--------\(self) is recycled-----------"); 357 | } 358 | 359 | 360 | override func didReceiveMemoryWarning() { 361 | super.didReceiveMemoryWarning() 362 | // Dispose of any resources that can be recreated. 363 | } 364 | @IBAction func onAddAudio(_ sender: Any) { 365 | if self.busContainerView.canAddAudioBus{ 366 | 367 | let no = self.busContainerView.numOfBuses; 368 | 369 | let url = Bundle.main.url(forResource: "audio_\(no)", withExtension: "mp3"); 370 | 371 | let audioBus = SFAudioBus(url: url!, name: "auido-\(no)"); 372 | self.busContainerView.addAudioBus(audioBus: audioBus); 373 | self.addAudioBtn.isEnabled = self.busContainerView.canAddAudioBus; 374 | } 375 | } 376 | 377 | 378 | 379 | 380 | private func onPreviewDidFinish(){ 381 | 382 | self.previewDisplayLink?.invalidate(); 383 | self.previewDisplayLink = nil; 384 | 385 | self.movingLine?.removeFromSuperlayer(); 386 | self.movingLine = nil; 387 | self.midPositionFlagView.isHidden = true; 388 | self.midClipIndicator.isHidden = true; 389 | 390 | self.addAudioBtn.isEnabled = true; 391 | self.previewBtn.isSelected = false; 392 | self.trimBtn.isEnabled = true; 393 | self.leftPositionFlagView.isUserInteractionEnabled = true; 394 | self.rightPositionFlagView.isUserInteractionEnabled = true; 395 | self.actionBtn.isEnabled = true; 396 | 397 | } 398 | 399 | func runTimedCode() { 400 | 401 | if let timePassed = self.busContainerView.passedTimeSincePlaying{ 402 | 403 | let distance = self.busContainerView.distanceBetweenClipLine; 404 | 405 | 406 | 407 | 408 | let delta = distance * CGFloat(timePassed) / CGFloat(self.busContainerView.clipedMixRange.1); 409 | 410 | let beginX = self.busContainerView.frame.minX + self.busContainerView.clipMargins.x; 411 | 412 | self.midClipIndicator.center = CGPoint(x:beginX + delta,y:self.midClipIndicator.center.y); 413 | self.midPositionFlagView.center = CGPoint(x:beginX + delta,y:self.midPositionFlagView.center.y); 414 | CATransaction.setDisableActions(true); 415 | self.movingLine?.position = CGPoint(x : beginX + delta,y : self.movingLine!.position.y); 416 | 417 | 418 | 419 | if let label = self.midClipIndicator.subviews.last as? UILabel { 420 | let seconds = Int(timePassed.rounded()) 421 | label.text = String(format: "%d:%02d", seconds / 60,seconds % 60); 422 | } 423 | 424 | } 425 | 426 | 427 | 428 | 429 | } 430 | 431 | 432 | @IBAction func onPreview(_ sender: Any) { 433 | 434 | 435 | if self.previewBtn.isSelected == false { 436 | let totalDuration = self.busContainerView.clipedMixRange.1; 437 | if totalDuration > 0 { 438 | 439 | self.addAudioBtn.isEnabled = false; 440 | self.previewBtn.isSelected = true; 441 | self.trimBtn.isEnabled = false; 442 | self.leftPositionFlagView.isUserInteractionEnabled = false; 443 | self.rightPositionFlagView.isUserInteractionEnabled = false; 444 | self.actionBtn.isEnabled = false; 445 | 446 | self.movingLine?.removeFromSuperlayer(); 447 | 448 | let line = SFDashedLineLayer() 449 | line.needsDisplayOnBoundsChange = true; 450 | line.frame = CGRect(x:self.leftPositionFlagView.frame.midX,y:self.busContainerView.frame.minY,width:1,height:self.busContainerView.bounds.height); 451 | self.view.layer.addSublayer(line); 452 | self.movingLine = line; 453 | 454 | 455 | self.midClipIndicator.isHidden = false; 456 | self.midClipIndicator.center = CGPoint(x:self.leftPositionFlagView.frame.midX,y:self.rightClipIndicator.center.y); 457 | self.midPositionFlagView.isHidden = false; 458 | self.midPositionFlagView.center = CGPoint(x:self.leftPositionFlagView.frame.midX,y:self.rightPositionFlagView.center.y); 459 | 460 | 461 | 462 | 463 | 464 | self.busContainerView.previewMixedAudio { [weak self] in 465 | 466 | self?.onPreviewDidFinish(); 467 | 468 | } 469 | 470 | let dpLink = CADisplayLink(target: self, selector: #selector(runTimedCode)) 471 | dpLink.frameInterval = 1 472 | 473 | dpLink.add(to: RunLoop.current, forMode: RunLoopMode.commonModes); 474 | self.previewDisplayLink = dpLink; 475 | 476 | 477 | }else{ 478 | 479 | let ac = UIAlertController(title: "Info", message: "The length of final duration is not long enough", preferredStyle: UIAlertControllerStyle.alert); 480 | ac.addAction(UIAlertAction(title: "Ok", style: UIAlertActionStyle.default, handler: nil)); 481 | self.present(ac, animated: true, completion: nil); 482 | } 483 | }else{ 484 | //stop 485 | self.busContainerView.stopPreview(); 486 | } 487 | 488 | 489 | 490 | 491 | 492 | 493 | 494 | 495 | } 496 | @IBAction func onTrim(_ sender: Any) { 497 | 498 | 499 | let trimedClipMargin = self.busContainerView.trimedClipMargins; 500 | self.rightPostionFlagRightMarginConstraint.constant = -(trimedClipMargin.y - halfFlagViewWidth); 501 | self.leftPositionFlagLeftMarginConstraint.constant = trimedClipMargin.x - halfFlagViewWidth; 502 | 503 | let clippedDuration = self.busContainerView.clippedDurationOfNewMargin(trimedClipMargin); 504 | 505 | self.durationTitleLabel.text = String(format: "%d:%02d", clippedDuration / 60, clippedDuration % 60); 506 | 507 | 508 | UIView.animate(withDuration: ANIMATION_DURATION, animations: { [weak self] in 509 | 510 | self?.busContainerView.clipMargins = trimedClipMargin; 511 | self?.view.layoutIfNeeded(); 512 | }) 513 | 514 | } 515 | 516 | 517 | 518 | 519 | @IBAction func dismissMixer(_ sender: Any) { 520 | self.busContainerView.stopPreview(); 521 | self.dismiss(animated: true, completion: nil); 522 | } 523 | 524 | 525 | 526 | /* 527 | // MARK: - Navigation 528 | 529 | // In a storyboard-based application, you will often want to do a little preparation before navigation 530 | override func prepare(for segue: UIStoryboardSegue, sender: Any?) { 531 | // Get the new view controller using segue.destinationViewController. 532 | // Pass the selected object to the new view controller. 533 | } 534 | */ 535 | 536 | } 537 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/SFPositionFlagView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SFPositionFlagView.swift 3 | // SFMixer 4 | // 5 | // Created by CHENWANFEI on 07/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | class SFPositionFlagView: UIView { 12 | 13 | 14 | 15 | override func draw(_ rect: CGRect) { 16 | let leftMargin = CGFloat(5.0); 17 | let rightMargin = CGFloat(5.0); 18 | let bottomMargin = CGFloat(5.0); 19 | 20 | let trangleHeight = CGFloat(10.0); 21 | let trangleBottom = rect.width - leftMargin - rightMargin; 22 | 23 | 24 | 25 | let p0 = CGPoint(x:rect.size.width / 2,y:rect.height - bottomMargin - trangleHeight); 26 | let p1 = CGPoint(x:p0.x - trangleBottom / 2, y: rect.height - bottomMargin ); 27 | let p2 = CGPoint(x:p0.x + trangleBottom / 2, y: rect.height - bottomMargin ); 28 | 29 | //let path = CGMutablePath(); 30 | let context = UIGraphicsGetCurrentContext(); 31 | context?.setLineWidth(HAIRLINE_WIDTH); 32 | context?.setFillColor(self.tintColor.cgColor); 33 | context?.setStrokeColor(self.tintColor.cgColor); 34 | context?.move(to: CGPoint(x: rect.size.width / 2, y: 0)); 35 | context?.addLine(to: p0); 36 | context?.strokePath(); 37 | //context?.closePath(); 38 | 39 | context?.move(to: p0); 40 | context?.addLine(to: p1); 41 | context?.addLine(to: p2); 42 | context?.fillPath(); 43 | 44 | 45 | 46 | 47 | } 48 | 49 | /* 50 | // Only override draw() if you perform custom drawing. 51 | // An empty implementation adversely affects performance during animation. 52 | override func draw(_ rect: CGRect) { 53 | // Drawing code 54 | } 55 | */ 56 | 57 | } 58 | -------------------------------------------------------------------------------- /SFMixer/SFMixer/audio_2.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/audio_2.mp3 -------------------------------------------------------------------------------- /SFMixer/SFMixer/dashedFlag@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/dashedFlag@2x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/dashedFlag@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/dashedFlag@3x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/playBtn@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/playBtn@2x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/playBtn@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/playBtn@3x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/plus@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/plus@2x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/plus@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/plus@3x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/popover@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/popover@2x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/popover@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/popover@3x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/positionFlag@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/positionFlag@2x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/positionFlag@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/positionFlag@3x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/stopBtn@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/stopBtn@2x.png -------------------------------------------------------------------------------- /SFMixer/SFMixer/stopBtn@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/SFMixer/stopBtn@3x.png -------------------------------------------------------------------------------- /SFMixer/ThirdParty/SFAudioWaveformHelper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SFAudioWaveformHelper.swift from :https://github.com/JagieChen/SFAudioWaveformHelper 3 | // 4 | // 5 | // Created by CHENWANFEI on 12/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | 8 | // references: 1. http://www.davidstarke.com/2015/04/waveforms.html 9 | // 2. https://github.com/fulldecent/FDWaveformView 10 | 11 | // 12 | 13 | import UIKit 14 | import MediaPlayer 15 | import AVFoundation 16 | import Accelerate 17 | 18 | fileprivate let noiseFloor: CGFloat = -50.0 19 | 20 | func generateWaveformImage(audioURL:URL, imageSizeInPixel:CGSize, waveColor:UIColor, completion:@escaping (_ waveformImage:UIImage?)->Void){ 21 | let asset = AVURLAsset(url: audioURL, options: [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true as Bool)]) 22 | 23 | 24 | guard let assetTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first else { 25 | NSLog("FDWaveformView failed to load AVAssetTrack") 26 | callCompletion(image: nil,completion: completion); 27 | return 28 | } 29 | 30 | asset.loadValuesAsynchronously(forKeys: ["duration"]) { 31 | var error: NSError? 32 | let status = asset.statusOfValue(forKey: "duration", error: &error) 33 | switch status { 34 | case .loaded: 35 | if let audioFormatDesc = assetTrack.formatDescriptions.first { 36 | let item = audioFormatDesc as! CMAudioFormatDescription // TODO: Can this be safer? 37 | if let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(item) { 38 | let numOfTotalSamples = (asbd.pointee.mSampleRate) * Float64(asset.duration.value) / Float64(asset.duration.timescale) 39 | 40 | 41 | guard let reader = try? AVAssetReader(asset: asset) else { 42 | completion(nil); 43 | return 44 | } 45 | 46 | reader.timeRange = CMTimeRange(start: CMTime(value: Int64(0), timescale: asset.duration.timescale), duration: CMTime(value: Int64(numOfTotalSamples), timescale: asset.duration.timescale)) 47 | let outputSettingsDict: [String : Any] = [ 48 | AVFormatIDKey: Int(kAudioFormatLinearPCM), 49 | AVLinearPCMBitDepthKey: 16, 50 | AVLinearPCMIsBigEndianKey: false, 51 | AVLinearPCMIsFloatKey: false, 52 | AVLinearPCMIsNonInterleaved: false 53 | ] 54 | 55 | let readerOutput = AVAssetReaderTrackOutput(track: assetTrack, outputSettings: outputSettingsDict) 56 | readerOutput.alwaysCopiesSampleData = false 57 | reader.add(readerOutput) 58 | 59 | var channelCount = 1 60 | 61 | let formatDesc = assetTrack.formatDescriptions 62 | for item in formatDesc { 63 | guard let fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription(item as! CMAudioFormatDescription) else { continue } // TODO: Can the forced downcast in here be safer? 64 | channelCount = Int(fmtDesc.pointee.mChannelsPerFrame) 65 | } 66 | 67 | var sampleMax = noiseFloor 68 | 69 | let widthInPixels = Int(imageSizeInPixel.width) 70 | let samplesPerPixel = max(1, channelCount * Int(numOfTotalSamples) / widthInPixels) 71 | let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel) 72 | 73 | var outputSamples = [CGFloat]() 74 | var sampleBuffer = Data() 75 | 76 | // 16-bit samples 77 | reader.startReading() 78 | 79 | while reader.status == .reading { 80 | guard let readSampleBuffer = readerOutput.copyNextSampleBuffer(), 81 | let readBuffer = CMSampleBufferGetDataBuffer(readSampleBuffer) else { 82 | break 83 | } 84 | 85 | // Append audio sample buffer into our current sample buffer 86 | var readBufferLength = 0 87 | var readBufferPointer: UnsafeMutablePointer? 88 | CMBlockBufferGetDataPointer(readBuffer, 0, &readBufferLength, nil, &readBufferPointer) 89 | sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength)) 90 | CMSampleBufferInvalidate(readSampleBuffer) 91 | 92 | let totalSamples = sampleBuffer.count / MemoryLayout.size 93 | let downSampledLength = totalSamples / samplesPerPixel 94 | let samplesToProcess = downSampledLength * samplesPerPixel 95 | 96 | guard samplesToProcess > 0 else { continue } 97 | 98 | processSamples(fromData: &sampleBuffer, 99 | sampleMax: &sampleMax, 100 | outputSamples: &outputSamples, 101 | samplesToProcess: samplesToProcess, 102 | downSampledLength: downSampledLength, 103 | samplesPerPixel: samplesPerPixel, 104 | filter: filter) 105 | } 106 | 107 | // Process the remaining samples at the end which didn't fit into samplesPerPixel 108 | let samplesToProcess = sampleBuffer.count / MemoryLayout.size 109 | if samplesToProcess > 0 { 110 | let downSampledLength = 1 111 | let samplesPerPixel = samplesToProcess 112 | 113 | let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel) 114 | 115 | processSamples(fromData: &sampleBuffer, 116 | sampleMax: &sampleMax, 117 | outputSamples: &outputSamples, 118 | samplesToProcess: samplesToProcess, 119 | downSampledLength: downSampledLength, 120 | samplesPerPixel: samplesPerPixel, 121 | filter: filter) 122 | } 123 | 124 | if reader.status == .completed { 125 | let image = plotLogGraph(outputSamples, maximumValue: sampleMax, zeroValue: noiseFloor, imageHeight: imageSizeInPixel.height, color:waveColor ); 126 | 127 | callCompletion(image: image,completion: completion); 128 | } else { 129 | callCompletion(image: nil,completion: completion); 130 | } 131 | 132 | 133 | } 134 | } 135 | case .failed, .cancelled, .loading, .unknown: 136 | callCompletion(image: nil,completion: completion); 137 | } 138 | } 139 | 140 | } 141 | 142 | 143 | private func callCompletion(image:UIImage?,completion: @escaping (_ waveformImage:UIImage?)->Void){ 144 | DispatchQueue.main.async { 145 | completion(image); 146 | } 147 | } 148 | 149 | private func processSamples(fromData sampleBuffer: inout Data, sampleMax: inout CGFloat, outputSamples: inout [CGFloat], samplesToProcess: Int, downSampledLength: Int, samplesPerPixel: Int, filter: [Float]) { 150 | sampleBuffer.withUnsafeBytes { (samples: UnsafePointer) in 151 | 152 | var processingBuffer = [Float](repeating: 0.0, count: samplesToProcess) 153 | 154 | let sampleCount = vDSP_Length(samplesToProcess) 155 | 156 | //Convert 16bit int samples to floats 157 | vDSP_vflt16(samples, 1, &processingBuffer, 1, sampleCount) 158 | 159 | //Take the absolute values to get amplitude 160 | vDSP_vabs(processingBuffer, 1, &processingBuffer, 1, sampleCount) 161 | 162 | //Convert to dB 163 | var zero: Float = 32768.0 164 | vDSP_vdbcon(processingBuffer, 1, &zero, &processingBuffer, 1, sampleCount, 1) 165 | 166 | //Clip to [noiseFloor, 0] 167 | var ceil: Float = 0.0 168 | var noiseFloorFloat = Float(noiseFloor) 169 | vDSP_vclip(processingBuffer, 1, &noiseFloorFloat, &ceil, &processingBuffer, 1, sampleCount) 170 | 171 | //Downsample and average 172 | var downSampledData = [Float](repeating: 0.0, count: downSampledLength) 173 | vDSP_desamp(processingBuffer, 174 | vDSP_Stride(samplesPerPixel), 175 | filter, &downSampledData, 176 | vDSP_Length(downSampledLength), 177 | vDSP_Length(samplesPerPixel)) 178 | 179 | let downSampledDataCG = downSampledData.map { (value: Float) -> CGFloat in 180 | let element = CGFloat(value) 181 | if element > sampleMax { sampleMax = element } 182 | return element 183 | } 184 | 185 | // Remove processed samples 186 | sampleBuffer.removeFirst(samplesToProcess * MemoryLayout.size) 187 | outputSamples += downSampledDataCG 188 | } 189 | } 190 | 191 | private func plotLogGraph(_ samples: [CGFloat], maximumValue max: CGFloat, zeroValue min: CGFloat, imageHeight: CGFloat,color:UIColor) -> UIImage? { 192 | let imageSize = CGSize(width: CGFloat(samples.count), height: imageHeight) 193 | UIGraphicsBeginImageContext(imageSize) 194 | guard let context = UIGraphicsGetCurrentContext() else { 195 | return nil 196 | } 197 | 198 | context.setAlpha(1.0) 199 | context.setLineWidth(1.0) 200 | context.setStrokeColor(color.cgColor) 201 | 202 | let sampleDrawingScale: CGFloat 203 | if max == min { 204 | sampleDrawingScale = 0 205 | } else { 206 | sampleDrawingScale = imageHeight / 2 / (max - min) 207 | } 208 | let verticalMiddle = imageHeight / 2 209 | for (x, sample) in samples.enumerated() { 210 | let height = (sample - min) * sampleDrawingScale 211 | context.move(to: CGPoint(x: CGFloat(x), y: verticalMiddle - height)) 212 | context.addLine(to: CGPoint(x: CGFloat(x), y: verticalMiddle + height)) 213 | context.strokePath(); 214 | } 215 | guard let image = UIGraphicsGetImageFromCurrentImageContext() else { 216 | return nil; 217 | } 218 | 219 | UIGraphicsEndImageContext() 220 | return image; 221 | } 222 | 223 | 224 | -------------------------------------------------------------------------------- /SFMixer/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // SFMixer 4 | // 5 | // Created by CHENWANFEI on 07/03/2017. 6 | // Copyright © 2017 SwordFish. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | class ViewController: UIViewController { 12 | 13 | override func viewDidLoad() { 14 | super.viewDidLoad() 15 | // Do any additional setup after loading the view, typically from a nib. 16 | } 17 | 18 | override func didReceiveMemoryWarning() { 19 | super.didReceiveMemoryWarning() 20 | // Dispose of any resources that can be recreated. 21 | } 22 | 23 | @IBAction func showMixer(_ sender: Any) { 24 | 25 | 26 | 27 | let nc = UIStoryboard(name: "SFMixerViewController", bundle: nil).instantiateInitialViewController(); 28 | 29 | self.present(nc!, animated: true, completion: nil); 30 | } 31 | 32 | } 33 | 34 | -------------------------------------------------------------------------------- /SFMixer/audio_0.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/audio_0.mp3 -------------------------------------------------------------------------------- /SFMixer/audio_1.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/audio_1.mp3 -------------------------------------------------------------------------------- /SFMixer/audio_3.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/audio_3.mp3 -------------------------------------------------------------------------------- /SFMixer/audio_4.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/SFMixer/audio_4.mp3 -------------------------------------------------------------------------------- /s1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Heilum/SFMixer/7baadf4bf2ee05e7898a62e2c188867a100b678f/s1.png --------------------------------------------------------------------------------