├── AudioWaveformGraph.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ ├── xcshareddata │ │ └── IDEWorkspaceChecks.plist │ └── xcuserdata │ │ └── jscalo.xcuserdatad │ │ └── UserInterfaceState.xcuserstate └── xcuserdata │ └── jscalo.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ └── xcschememanagement.plist ├── AudioWaveformGraph ├── AppDelegate.swift ├── Assets.xcassets │ ├── AccentColor.colorset │ │ └── Contents.json │ ├── AppIcon.appiconset │ │ └── Contents.json │ └── Contents.json ├── Audio & DSP │ ├── AudioDataFromFile.swift │ ├── TempiAudioContext.swift │ └── Tempo.swift ├── AudioWaveformGraph.entitlements ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Graph │ ├── AudioGraphView.swift │ ├── DataProvider.swift │ ├── RulerLayer.swift │ ├── ViewPort.swift │ └── WaveformLayer.swift ├── Info.plist ├── SceneDelegate.swift ├── Utilities │ ├── AutoLayout.swift │ ├── Logger.swift │ ├── MathUtilities.swift │ └── UIKitExtras.swift ├── ViewController.swift └── audio.mp3 ├── LICENSE └── README.md /AudioWaveformGraph.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 50; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | EF1512EB258BDA8000F064A6 /* UIKitExtras.swift in Sources */ = {isa = PBXBuildFile; fileRef = EF1512EA258BDA8000F064A6 /* UIKitExtras.swift */; }; 11 | EFC5909725840B1500E5D4B4 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC5909625840B1500E5D4B4 /* AppDelegate.swift */; }; 12 | EFC5909925840B1500E5D4B4 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC5909825840B1500E5D4B4 /* SceneDelegate.swift */; }; 13 | EFC5909B25840B1500E5D4B4 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC5909A25840B1500E5D4B4 /* ViewController.swift */; }; 14 | EFC5909E25840B1500E5D4B4 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = EFC5909C25840B1500E5D4B4 /* Main.storyboard */; }; 15 | EFC590A025840B1700E5D4B4 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = EFC5909F25840B1700E5D4B4 /* Assets.xcassets */; }; 16 | EFC590A325840B1700E5D4B4 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = EFC590A125840B1700E5D4B4 /* LaunchScreen.storyboard */; }; 17 | EFC590AC25840B7500E5D4B4 /* audio.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = EFC590AB25840B7500E5D4B4 /* audio.mp3 */; }; 18 | EFC590B825840BFD00E5D4B4 /* WaveformLayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590B125840BFD00E5D4B4 /* WaveformLayer.swift */; }; 19 | EFC590B925840BFD00E5D4B4 /* AudioGraphView.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590B225840BFD00E5D4B4 /* AudioGraphView.swift */; }; 20 | EFC590BA25840BFD00E5D4B4 /* DataProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590B325840BFD00E5D4B4 /* DataProvider.swift */; }; 21 | EFC590BB25840BFD00E5D4B4 /* ViewPort.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590B425840BFD00E5D4B4 /* ViewPort.swift */; }; 22 | EFC590BE25840BFD00E5D4B4 /* RulerLayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590B725840BFD00E5D4B4 /* RulerLayer.swift */; }; 23 | EFC590C425840C1500E5D4B4 /* Logger.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590C325840C1500E5D4B4 /* Logger.swift */; }; 24 | EFC590D325840C2900E5D4B4 /* AudioDataFromFile.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590C825840C2900E5D4B4 /* AudioDataFromFile.swift */; }; 25 | EFC590D625840C2900E5D4B4 /* TempiAudioContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590CB25840C2900E5D4B4 /* TempiAudioContext.swift */; }; 26 | EFC590D825840C2900E5D4B4 /* Tempo.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590CD25840C2900E5D4B4 /* Tempo.swift */; }; 27 | EFC590EC2584119C00E5D4B4 /* AutoLayout.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590C025840C0A00E5D4B4 /* AutoLayout.swift */; }; 28 | EFC590F02584126500E5D4B4 /* MathUtilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFC590D025840C2900E5D4B4 /* MathUtilities.swift */; }; 29 | /* End PBXBuildFile section */ 30 | 31 | /* Begin PBXFileReference section */ 32 | EF1512EA258BDA8000F064A6 /* UIKitExtras.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UIKitExtras.swift; sourceTree = ""; }; 33 | EFC5909325840B1500E5D4B4 /* AudioWaveformGraph.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AudioWaveformGraph.app; sourceTree = BUILT_PRODUCTS_DIR; }; 34 | EFC5909625840B1500E5D4B4 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 35 | EFC5909825840B1500E5D4B4 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; 36 | EFC5909A25840B1500E5D4B4 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 37 | EFC5909D25840B1500E5D4B4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 38 | EFC5909F25840B1700E5D4B4 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 39 | EFC590A225840B1700E5D4B4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 40 | EFC590A425840B1700E5D4B4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 41 | EFC590AB25840B7500E5D4B4 /* audio.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = audio.mp3; sourceTree = ""; }; 42 | EFC590AE25840BA500E5D4B4 /* AudioWaveformGraph.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = AudioWaveformGraph.entitlements; sourceTree = ""; }; 43 | EFC590B125840BFD00E5D4B4 /* WaveformLayer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = WaveformLayer.swift; sourceTree = ""; }; 44 | EFC590B225840BFD00E5D4B4 /* AudioGraphView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AudioGraphView.swift; sourceTree = ""; }; 45 | EFC590B325840BFD00E5D4B4 /* DataProvider.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DataProvider.swift; sourceTree = ""; }; 46 | EFC590B425840BFD00E5D4B4 /* ViewPort.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ViewPort.swift; sourceTree = ""; }; 47 | EFC590B725840BFD00E5D4B4 /* RulerLayer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RulerLayer.swift; sourceTree = ""; }; 48 | EFC590C025840C0A00E5D4B4 /* AutoLayout.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AutoLayout.swift; sourceTree = ""; }; 49 | EFC590C325840C1500E5D4B4 /* Logger.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Logger.swift; sourceTree = ""; }; 50 | EFC590C825840C2900E5D4B4 /* AudioDataFromFile.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AudioDataFromFile.swift; sourceTree = ""; }; 51 | EFC590CB25840C2900E5D4B4 /* TempiAudioContext.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = TempiAudioContext.swift; sourceTree = ""; }; 52 | EFC590CD25840C2900E5D4B4 /* Tempo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Tempo.swift; sourceTree = ""; }; 53 | EFC590D025840C2900E5D4B4 /* MathUtilities.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MathUtilities.swift; sourceTree = ""; }; 54 | /* End PBXFileReference section */ 55 | 56 | /* Begin PBXFrameworksBuildPhase section */ 57 | EFC5909025840B1500E5D4B4 /* Frameworks */ = { 58 | isa = PBXFrameworksBuildPhase; 59 | buildActionMask = 2147483647; 60 | files = ( 61 | ); 62 | runOnlyForDeploymentPostprocessing = 0; 63 | }; 64 | /* End PBXFrameworksBuildPhase section */ 65 | 66 | /* Begin PBXGroup section */ 67 | EFC5908A25840B1500E5D4B4 = { 68 | isa = PBXGroup; 69 | children = ( 70 | EFC5909525840B1500E5D4B4 /* AudioWaveformGraph */, 71 | EFC5909425840B1500E5D4B4 /* Products */, 72 | ); 73 | sourceTree = ""; 74 | }; 75 | EFC5909425840B1500E5D4B4 /* Products */ = { 76 | isa = PBXGroup; 77 | children = ( 78 | EFC5909325840B1500E5D4B4 /* AudioWaveformGraph.app */, 79 | ); 80 | name = Products; 81 | sourceTree = ""; 82 | }; 83 | EFC5909525840B1500E5D4B4 /* AudioWaveformGraph */ = { 84 | isa = PBXGroup; 85 | children = ( 86 | EFC590AE25840BA500E5D4B4 /* AudioWaveformGraph.entitlements */, 87 | EFC5909625840B1500E5D4B4 /* AppDelegate.swift */, 88 | EFC5909825840B1500E5D4B4 /* SceneDelegate.swift */, 89 | EFC5909A25840B1500E5D4B4 /* ViewController.swift */, 90 | EFC590B025840BFD00E5D4B4 /* Graph */, 91 | EFC590C625840C2900E5D4B4 /* Audio & DSP */, 92 | EFC590DE25840C3000E5D4B4 /* Utilities */, 93 | EFC5909C25840B1500E5D4B4 /* Main.storyboard */, 94 | EFC5909F25840B1700E5D4B4 /* Assets.xcassets */, 95 | EFC590A125840B1700E5D4B4 /* LaunchScreen.storyboard */, 96 | EFC590AB25840B7500E5D4B4 /* audio.mp3 */, 97 | EFC590A425840B1700E5D4B4 /* Info.plist */, 98 | ); 99 | path = AudioWaveformGraph; 100 | sourceTree = ""; 101 | }; 102 | EFC590B025840BFD00E5D4B4 /* Graph */ = { 103 | isa = PBXGroup; 104 | children = ( 105 | EFC590B225840BFD00E5D4B4 /* AudioGraphView.swift */, 106 | EFC590B125840BFD00E5D4B4 /* WaveformLayer.swift */, 107 | EFC590B725840BFD00E5D4B4 /* RulerLayer.swift */, 108 | EFC590B325840BFD00E5D4B4 /* DataProvider.swift */, 109 | EFC590B425840BFD00E5D4B4 /* ViewPort.swift */, 110 | ); 111 | path = Graph; 112 | sourceTree = ""; 113 | }; 114 | EFC590C625840C2900E5D4B4 /* Audio & DSP */ = { 115 | isa = PBXGroup; 116 | children = ( 117 | EFC590C825840C2900E5D4B4 /* AudioDataFromFile.swift */, 118 | EFC590CB25840C2900E5D4B4 /* TempiAudioContext.swift */, 119 | EFC590CD25840C2900E5D4B4 /* Tempo.swift */, 120 | ); 121 | path = "Audio & DSP"; 122 | sourceTree = ""; 123 | }; 124 | EFC590DE25840C3000E5D4B4 /* Utilities */ = { 125 | isa = PBXGroup; 126 | children = ( 127 | EFC590C025840C0A00E5D4B4 /* AutoLayout.swift */, 128 | EFC590C325840C1500E5D4B4 /* Logger.swift */, 129 | EFC590D025840C2900E5D4B4 /* MathUtilities.swift */, 130 | EF1512EA258BDA8000F064A6 /* UIKitExtras.swift */, 131 | ); 132 | path = Utilities; 133 | sourceTree = ""; 134 | }; 135 | /* End PBXGroup section */ 136 | 137 | /* Begin PBXNativeTarget section */ 138 | EFC5909225840B1500E5D4B4 /* AudioWaveformGraph */ = { 139 | isa = PBXNativeTarget; 140 | buildConfigurationList = EFC590A725840B1700E5D4B4 /* Build configuration list for PBXNativeTarget "AudioWaveformGraph" */; 141 | buildPhases = ( 142 | EFC5908F25840B1500E5D4B4 /* Sources */, 143 | EFC5909025840B1500E5D4B4 /* Frameworks */, 144 | EFC5909125840B1500E5D4B4 /* Resources */, 145 | ); 146 | buildRules = ( 147 | ); 148 | dependencies = ( 149 | ); 150 | name = AudioWaveformGraph; 151 | productName = AudioWaveformGraph; 152 | productReference = EFC5909325840B1500E5D4B4 /* AudioWaveformGraph.app */; 153 | productType = "com.apple.product-type.application"; 154 | }; 155 | /* End PBXNativeTarget section */ 156 | 157 | /* Begin PBXProject section */ 158 | EFC5908B25840B1500E5D4B4 /* Project object */ = { 159 | isa = PBXProject; 160 | attributes = { 161 | LastSwiftUpdateCheck = 1220; 162 | LastUpgradeCheck = 1220; 163 | TargetAttributes = { 164 | EFC5909225840B1500E5D4B4 = { 165 | CreatedOnToolsVersion = 12.2; 166 | }; 167 | }; 168 | }; 169 | buildConfigurationList = EFC5908E25840B1500E5D4B4 /* Build configuration list for PBXProject "AudioWaveformGraph" */; 170 | compatibilityVersion = "Xcode 9.3"; 171 | developmentRegion = en; 172 | hasScannedForEncodings = 0; 173 | knownRegions = ( 174 | en, 175 | Base, 176 | ); 177 | mainGroup = EFC5908A25840B1500E5D4B4; 178 | productRefGroup = EFC5909425840B1500E5D4B4 /* Products */; 179 | projectDirPath = ""; 180 | projectRoot = ""; 181 | targets = ( 182 | EFC5909225840B1500E5D4B4 /* AudioWaveformGraph */, 183 | ); 184 | }; 185 | /* End PBXProject section */ 186 | 187 | /* Begin PBXResourcesBuildPhase section */ 188 | EFC5909125840B1500E5D4B4 /* Resources */ = { 189 | isa = PBXResourcesBuildPhase; 190 | buildActionMask = 2147483647; 191 | files = ( 192 | EFC590A325840B1700E5D4B4 /* LaunchScreen.storyboard in Resources */, 193 | EFC590AC25840B7500E5D4B4 /* audio.mp3 in Resources */, 194 | EFC590A025840B1700E5D4B4 /* Assets.xcassets in Resources */, 195 | EFC5909E25840B1500E5D4B4 /* Main.storyboard in Resources */, 196 | ); 197 | runOnlyForDeploymentPostprocessing = 0; 198 | }; 199 | /* End PBXResourcesBuildPhase section */ 200 | 201 | /* Begin PBXSourcesBuildPhase section */ 202 | EFC5908F25840B1500E5D4B4 /* Sources */ = { 203 | isa = PBXSourcesBuildPhase; 204 | buildActionMask = 2147483647; 205 | files = ( 206 | EFC590BA25840BFD00E5D4B4 /* DataProvider.swift in Sources */, 207 | EFC590BE25840BFD00E5D4B4 /* RulerLayer.swift in Sources */, 208 | EFC590EC2584119C00E5D4B4 /* AutoLayout.swift in Sources */, 209 | EFC590D825840C2900E5D4B4 /* Tempo.swift in Sources */, 210 | EFC590F02584126500E5D4B4 /* MathUtilities.swift in Sources */, 211 | EFC590B925840BFD00E5D4B4 /* AudioGraphView.swift in Sources */, 212 | EFC590D325840C2900E5D4B4 /* AudioDataFromFile.swift in Sources */, 213 | EFC5909B25840B1500E5D4B4 /* ViewController.swift in Sources */, 214 | EF1512EB258BDA8000F064A6 /* UIKitExtras.swift in Sources */, 215 | EFC5909725840B1500E5D4B4 /* AppDelegate.swift in Sources */, 216 | EFC590B825840BFD00E5D4B4 /* WaveformLayer.swift in Sources */, 217 | EFC590C425840C1500E5D4B4 /* Logger.swift in Sources */, 218 | EFC590BB25840BFD00E5D4B4 /* ViewPort.swift in Sources */, 219 | EFC5909925840B1500E5D4B4 /* SceneDelegate.swift in Sources */, 220 | EFC590D625840C2900E5D4B4 /* TempiAudioContext.swift in Sources */, 221 | ); 222 | runOnlyForDeploymentPostprocessing = 0; 223 | }; 224 | /* End PBXSourcesBuildPhase section */ 225 | 226 | /* Begin PBXVariantGroup section */ 227 | EFC5909C25840B1500E5D4B4 /* Main.storyboard */ = { 228 | isa = PBXVariantGroup; 229 | children = ( 230 | EFC5909D25840B1500E5D4B4 /* Base */, 231 | ); 232 | name = Main.storyboard; 233 | sourceTree = ""; 234 | }; 235 | EFC590A125840B1700E5D4B4 /* LaunchScreen.storyboard */ = { 236 | isa = PBXVariantGroup; 237 | children = ( 238 | EFC590A225840B1700E5D4B4 /* Base */, 239 | ); 240 | name = LaunchScreen.storyboard; 241 | sourceTree = ""; 242 | }; 243 | /* End PBXVariantGroup section */ 244 | 245 | /* Begin XCBuildConfiguration section */ 246 | EFC590A525840B1700E5D4B4 /* Debug */ = { 247 | isa = XCBuildConfiguration; 248 | buildSettings = { 249 | ALWAYS_SEARCH_USER_PATHS = NO; 250 | CLANG_ANALYZER_NONNULL = YES; 251 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 252 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 253 | CLANG_CXX_LIBRARY = "libc++"; 254 | CLANG_ENABLE_MODULES = YES; 255 | CLANG_ENABLE_OBJC_ARC = YES; 256 | CLANG_ENABLE_OBJC_WEAK = YES; 257 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 258 | CLANG_WARN_BOOL_CONVERSION = YES; 259 | CLANG_WARN_COMMA = YES; 260 | CLANG_WARN_CONSTANT_CONVERSION = YES; 261 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 262 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 263 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 264 | CLANG_WARN_EMPTY_BODY = YES; 265 | CLANG_WARN_ENUM_CONVERSION = YES; 266 | CLANG_WARN_INFINITE_RECURSION = YES; 267 | CLANG_WARN_INT_CONVERSION = YES; 268 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 269 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 270 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 271 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 272 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 273 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 274 | CLANG_WARN_STRICT_PROTOTYPES = YES; 275 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 276 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 277 | CLANG_WARN_UNREACHABLE_CODE = YES; 278 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 279 | COPY_PHASE_STRIP = NO; 280 | DEBUG_INFORMATION_FORMAT = dwarf; 281 | ENABLE_STRICT_OBJC_MSGSEND = YES; 282 | ENABLE_TESTABILITY = YES; 283 | GCC_C_LANGUAGE_STANDARD = gnu11; 284 | GCC_DYNAMIC_NO_PIC = NO; 285 | GCC_NO_COMMON_BLOCKS = YES; 286 | GCC_OPTIMIZATION_LEVEL = 0; 287 | GCC_PREPROCESSOR_DEFINITIONS = ( 288 | "DEBUG=1", 289 | "$(inherited)", 290 | ); 291 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 292 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 293 | GCC_WARN_UNDECLARED_SELECTOR = YES; 294 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 295 | GCC_WARN_UNUSED_FUNCTION = YES; 296 | GCC_WARN_UNUSED_VARIABLE = YES; 297 | IPHONEOS_DEPLOYMENT_TARGET = 14.2; 298 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 299 | MTL_FAST_MATH = YES; 300 | ONLY_ACTIVE_ARCH = YES; 301 | SDKROOT = iphoneos; 302 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 303 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 304 | }; 305 | name = Debug; 306 | }; 307 | EFC590A625840B1700E5D4B4 /* Release */ = { 308 | isa = XCBuildConfiguration; 309 | buildSettings = { 310 | ALWAYS_SEARCH_USER_PATHS = NO; 311 | CLANG_ANALYZER_NONNULL = YES; 312 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 313 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 314 | CLANG_CXX_LIBRARY = "libc++"; 315 | CLANG_ENABLE_MODULES = YES; 316 | CLANG_ENABLE_OBJC_ARC = YES; 317 | CLANG_ENABLE_OBJC_WEAK = YES; 318 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 319 | CLANG_WARN_BOOL_CONVERSION = YES; 320 | CLANG_WARN_COMMA = YES; 321 | CLANG_WARN_CONSTANT_CONVERSION = YES; 322 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 323 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 324 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 325 | CLANG_WARN_EMPTY_BODY = YES; 326 | CLANG_WARN_ENUM_CONVERSION = YES; 327 | CLANG_WARN_INFINITE_RECURSION = YES; 328 | CLANG_WARN_INT_CONVERSION = YES; 329 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 330 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 331 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 332 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 333 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 334 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 335 | CLANG_WARN_STRICT_PROTOTYPES = YES; 336 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 337 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 338 | CLANG_WARN_UNREACHABLE_CODE = YES; 339 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 340 | COPY_PHASE_STRIP = NO; 341 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 342 | ENABLE_NS_ASSERTIONS = NO; 343 | ENABLE_STRICT_OBJC_MSGSEND = YES; 344 | GCC_C_LANGUAGE_STANDARD = gnu11; 345 | GCC_NO_COMMON_BLOCKS = YES; 346 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 347 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 348 | GCC_WARN_UNDECLARED_SELECTOR = YES; 349 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 350 | GCC_WARN_UNUSED_FUNCTION = YES; 351 | GCC_WARN_UNUSED_VARIABLE = YES; 352 | IPHONEOS_DEPLOYMENT_TARGET = 14.2; 353 | MTL_ENABLE_DEBUG_INFO = NO; 354 | MTL_FAST_MATH = YES; 355 | SDKROOT = iphoneos; 356 | SWIFT_COMPILATION_MODE = wholemodule; 357 | SWIFT_OPTIMIZATION_LEVEL = "-O"; 358 | VALIDATE_PRODUCT = YES; 359 | }; 360 | name = Release; 361 | }; 362 | EFC590A825840B1700E5D4B4 /* Debug */ = { 363 | isa = XCBuildConfiguration; 364 | buildSettings = { 365 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 366 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 367 | CODE_SIGN_ENTITLEMENTS = AudioWaveformGraph/AudioWaveformGraph.entitlements; 368 | CODE_SIGN_STYLE = Automatic; 369 | DEVELOPMENT_TEAM = 899AA7RGN5; 370 | INFOPLIST_FILE = AudioWaveformGraph/Info.plist; 371 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 372 | LD_RUNPATH_SEARCH_PATHS = ( 373 | "$(inherited)", 374 | "@executable_path/Frameworks", 375 | ); 376 | PRODUCT_BUNDLE_IDENTIFIER = com.madebywindmill.AudioWaveformGraph; 377 | PRODUCT_NAME = "$(TARGET_NAME)"; 378 | SUPPORTS_MACCATALYST = YES; 379 | SWIFT_VERSION = 5.0; 380 | TARGETED_DEVICE_FAMILY = "1,2"; 381 | }; 382 | name = Debug; 383 | }; 384 | EFC590A925840B1700E5D4B4 /* Release */ = { 385 | isa = XCBuildConfiguration; 386 | buildSettings = { 387 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 388 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 389 | CODE_SIGN_ENTITLEMENTS = AudioWaveformGraph/AudioWaveformGraph.entitlements; 390 | CODE_SIGN_STYLE = Automatic; 391 | DEVELOPMENT_TEAM = 899AA7RGN5; 392 | INFOPLIST_FILE = AudioWaveformGraph/Info.plist; 393 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 394 | LD_RUNPATH_SEARCH_PATHS = ( 395 | "$(inherited)", 396 | "@executable_path/Frameworks", 397 | ); 398 | PRODUCT_BUNDLE_IDENTIFIER = com.madebywindmill.AudioWaveformGraph; 399 | PRODUCT_NAME = "$(TARGET_NAME)"; 400 | SUPPORTS_MACCATALYST = YES; 401 | SWIFT_VERSION = 5.0; 402 | TARGETED_DEVICE_FAMILY = "1,2"; 403 | }; 404 | name = Release; 405 | }; 406 | /* End XCBuildConfiguration section */ 407 | 408 | /* Begin XCConfigurationList section */ 409 | EFC5908E25840B1500E5D4B4 /* Build configuration list for PBXProject "AudioWaveformGraph" */ = { 410 | isa = XCConfigurationList; 411 | buildConfigurations = ( 412 | EFC590A525840B1700E5D4B4 /* Debug */, 413 | EFC590A625840B1700E5D4B4 /* Release */, 414 | ); 415 | defaultConfigurationIsVisible = 0; 416 | defaultConfigurationName = Release; 417 | }; 418 | EFC590A725840B1700E5D4B4 /* Build configuration list for PBXNativeTarget "AudioWaveformGraph" */ = { 419 | isa = XCConfigurationList; 420 | buildConfigurations = ( 421 | EFC590A825840B1700E5D4B4 /* Debug */, 422 | EFC590A925840B1700E5D4B4 /* Release */, 423 | ); 424 | defaultConfigurationIsVisible = 0; 425 | defaultConfigurationName = Release; 426 | }; 427 | /* End XCConfigurationList section */ 428 | }; 429 | rootObject = EFC5908B25840B1500E5D4B4 /* Project object */; 430 | } 431 | -------------------------------------------------------------------------------- /AudioWaveformGraph.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /AudioWaveformGraph.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /AudioWaveformGraph.xcodeproj/project.xcworkspace/xcuserdata/jscalo.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/madebywindmill/AudioWaveformGraph/2a2c3d616a103007fe86d4fc85d70d3493ae7c78/AudioWaveformGraph.xcodeproj/project.xcworkspace/xcuserdata/jscalo.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /AudioWaveformGraph.xcodeproj/xcuserdata/jscalo.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | -------------------------------------------------------------------------------- /AudioWaveformGraph.xcodeproj/xcuserdata/jscalo.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | AudioWaveformGraph.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /AudioWaveformGraph/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | 8 | import UIKit 9 | 10 | @main 11 | class AppDelegate: UIResponder, UIApplicationDelegate { 12 | 13 | 14 | 15 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 16 | // Override point for customization after application launch. 17 | return true 18 | } 19 | 20 | // MARK: UISceneSession Lifecycle 21 | 22 | func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration { 23 | // Called when a new scene session is being created. 24 | // Use this method to select a configuration to create the new scene with. 25 | return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role) 26 | } 27 | 28 | func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set) { 29 | // Called when the user discards a scene session. 30 | // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions. 31 | // Use this method to release any resources that were specific to the discarded scenes, as they will not return. 32 | } 33 | 34 | 35 | } 36 | 37 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "scale" : "2x", 6 | "size" : "20x20" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "scale" : "3x", 11 | "size" : "20x20" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "scale" : "2x", 16 | "size" : "29x29" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "scale" : "3x", 21 | "size" : "29x29" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "scale" : "2x", 26 | "size" : "40x40" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "scale" : "3x", 31 | "size" : "40x40" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "scale" : "2x", 36 | "size" : "60x60" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "scale" : "3x", 41 | "size" : "60x60" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "scale" : "1x", 46 | "size" : "20x20" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "scale" : "2x", 51 | "size" : "20x20" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "scale" : "1x", 56 | "size" : "29x29" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "scale" : "2x", 61 | "size" : "29x29" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "scale" : "1x", 66 | "size" : "40x40" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "scale" : "2x", 71 | "size" : "40x40" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "scale" : "1x", 76 | "size" : "76x76" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "scale" : "2x", 81 | "size" : "76x76" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "scale" : "2x", 86 | "size" : "83.5x83.5" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "scale" : "1x", 91 | "size" : "1024x1024" 92 | } 93 | ], 94 | "info" : { 95 | "author" : "xcode", 96 | "version" : 1 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Audio & DSP/AudioDataFromFile.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AudioDataFromFile.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | 8 | import AVFoundation 9 | 10 | // A big hammer: return ALL audio data from a file. Probably best only for testing/demo. 11 | func AudioDataFromFile(url: URL, sampleRate: Float = 44100) -> [Float]? { 12 | let avAsset: AVURLAsset = AVURLAsset(url: url) 13 | 14 | let assetReader: AVAssetReader 15 | do { 16 | assetReader = try AVAssetReader(asset: avAsset) 17 | } catch let e as NSError { 18 | Logger.log("*** AVAssetReader failed with \(e)") 19 | return nil 20 | } 21 | 22 | let settings: [String : AnyObject] = [ AVFormatIDKey : Int(kAudioFormatLinearPCM) as AnyObject, 23 | AVSampleRateKey : sampleRate as AnyObject, 24 | AVLinearPCMBitDepthKey : 32 as AnyObject, 25 | AVLinearPCMIsFloatKey : true as AnyObject, 26 | AVNumberOfChannelsKey : 1 as AnyObject ] 27 | 28 | let output: AVAssetReaderAudioMixOutput = AVAssetReaderAudioMixOutput(audioTracks: avAsset.tracks, audioSettings: settings) 29 | 30 | assetReader.add(output) 31 | 32 | if !assetReader.startReading() { 33 | Logger.log("assetReader.startReading() failed") 34 | return nil 35 | } 36 | 37 | var fileSamples: [Float] = [Float]() 38 | 39 | repeat { 40 | var status: OSStatus = 0 41 | guard let nextBuffer = output.copyNextSampleBuffer() else { 42 | break 43 | } 44 | 45 | let bufferSampleCnt = CMSampleBufferGetNumSamples(nextBuffer) 46 | 47 | var bufferList = AudioBufferList( 48 | mNumberBuffers: 1, 49 | mBuffers: AudioBuffer( 50 | mNumberChannels: 1, 51 | mDataByteSize: 4, 52 | mData: nil)) 53 | 54 | var blockBuffer: CMBlockBuffer? 55 | 56 | status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, 57 | bufferListSizeNeededOut: nil, 58 | bufferListOut: &bufferList, 59 | bufferListSize: MemoryLayout.size, 60 | blockBufferAllocator: nil, 61 | blockBufferMemoryAllocator: nil, 62 | flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, 63 | blockBufferOut: &blockBuffer) 64 | 65 | if status != 0 { 66 | Logger.log("*** CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer failed with error \(status)") 67 | break 68 | } 69 | 70 | // Move samples from mData into our native [Float] format. 71 | let audioBuffer = AudioBuffer(mNumberChannels: bufferList.mBuffers.mNumberChannels, 72 | mDataByteSize: bufferList.mBuffers.mDataByteSize, 73 | mData: bufferList.mBuffers.mData) 74 | let data = UnsafeRawPointer(audioBuffer.mData) 75 | for i in 0.. ()) { 30 | 31 | let asset = AVURLAsset( 32 | url: audioURL, 33 | options: [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true as Bool)]) 34 | 35 | guard let assetTrack = asset.tracks(withMediaType: AVMediaType.audio).first else { 36 | NSLog("TempiAudioContext failed to load AVAssetTrack") 37 | completionHandler(nil) 38 | return 39 | } 40 | 41 | asset.loadValuesAsynchronously(forKeys: ["duration"]) { 42 | var error: NSError? 43 | let status = asset.statusOfValue(forKey: "duration", error: &error) 44 | switch status { 45 | case .loaded: 46 | guard 47 | let formatDescriptions = assetTrack.formatDescriptions as? [CMAudioFormatDescription], 48 | let audioFormatDesc = formatDescriptions.first, 49 | let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDesc) 50 | else { break } 51 | 52 | let sampleRate = asbd.pointee.mSampleRate 53 | let totalSamples = Int(sampleRate * Float64(asset.duration.value) / Float64(asset.duration.timescale)) 54 | 55 | let audioContext = TempiAudioContext( 56 | audioURL: audioURL, 57 | totalSamples: totalSamples, 58 | asset: asset, 59 | assetTrack: assetTrack) 60 | 61 | audioContext.sampleRate = sampleRate 62 | DispatchQueue.main.async { 63 | completionHandler(audioContext) 64 | } 65 | return 66 | case .failed, .cancelled, .loading, .unknown: 67 | Logger.log("*** TempiAudioContext could not load asset: \(error?.localizedDescription ?? "Unknown error")") 68 | @unknown default: 69 | assertionFailure() 70 | } 71 | DispatchQueue.main.async { 72 | completionHandler(nil) 73 | } 74 | } 75 | } 76 | 77 | func readSamples(completion: @escaping (_ samples: [Float]?) -> ()) { 78 | DispatchQueue.global().async { 79 | let samples = self.readSamplesSync() 80 | DispatchQueue.main.async { 81 | completion(samples) 82 | } 83 | } 84 | } 85 | 86 | private func readSamplesSync() -> [Float]? { 87 | let assetReader: AVAssetReader 88 | do { 89 | assetReader = try AVAssetReader(asset: asset) 90 | } catch let e as NSError { 91 | NSLog("*** AVAssetReader failed with \(e)") 92 | return nil 93 | } 94 | 95 | let settings: [String : Any] = [ AVFormatIDKey : Int(kAudioFormatLinearPCM), 96 | AVSampleRateKey : sampleRate, 97 | AVLinearPCMBitDepthKey : 32, 98 | AVLinearPCMIsFloatKey : true, 99 | AVNumberOfChannelsKey : 1 ] 100 | 101 | let output: AVAssetReaderAudioMixOutput = AVAssetReaderAudioMixOutput(audioTracks: asset.tracks, audioSettings: settings) 102 | 103 | assetReader.add(output) 104 | 105 | if !assetReader.startReading() { 106 | Logger.log("*** assetReader.startReading() failed") 107 | return nil 108 | } 109 | 110 | var samples: [Float] = [Float]() 111 | 112 | repeat { 113 | var status: OSStatus = 0 114 | guard let nextBuffer = output.copyNextSampleBuffer() else { 115 | break 116 | } 117 | 118 | let bufferSampleCnt = CMSampleBufferGetNumSamples(nextBuffer) 119 | 120 | var bufferList = AudioBufferList( 121 | mNumberBuffers: 1, 122 | mBuffers: AudioBuffer( 123 | mNumberChannels: 1, 124 | mDataByteSize: 4, 125 | mData: nil)) 126 | 127 | var blockBuffer: CMBlockBuffer? 128 | 129 | status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer( 130 | nextBuffer, 131 | bufferListSizeNeededOut: nil, 132 | bufferListOut: &bufferList, 133 | bufferListSize: MemoryLayout.size, 134 | blockBufferAllocator: nil, 135 | blockBufferMemoryAllocator: nil, 136 | flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, 137 | blockBufferOut: &blockBuffer) 138 | 139 | if status != 0 { 140 | Logger.log("*** CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer failed with error \(status)") 141 | break 142 | } 143 | 144 | // Move samples from mData into our native [Float] format. 145 | let audioBuffer = AudioBuffer(mNumberChannels: bufferList.mBuffers.mNumberChannels, 146 | mDataByteSize: bufferList.mBuffers.mDataByteSize, 147 | mData: bufferList.mBuffers.mData) 148 | let data = UnsafeRawPointer(audioBuffer.mData) 149 | for i in 0.. sampleMax { 153 | sampleMax = sample 154 | } 155 | if sample < sampleMin { 156 | sampleMin = sample 157 | } 158 | } 159 | } 160 | } while true 161 | 162 | return samples 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Audio & DSP/Tempo.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Tempo.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | 8 | import Foundation 9 | 10 | typealias Tempo = Double 11 | 12 | extension Tempo { 13 | static func from(_ timeInterval: TimeInterval) -> Tempo { 14 | return 60.0 / Double(timeInterval) 15 | } 16 | 17 | var timeInterval: Double { 18 | return 60.0 / self 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /AudioWaveformGraph/AudioWaveformGraph.entitlements: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | com.apple.security.app-sandbox 6 | 7 | com.apple.security.network.client 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Graph/AudioGraphView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AudioGraphView.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | // AudioGraphView: top-level view that manages interior layers and controls all aspects of the graph. 8 | // 9 | 10 | import UIKit 11 | 12 | class AudioGraphView: UIView { 13 | 14 | var zoom: CGFloat { 15 | set (v) { 16 | viewPort.zoom = v 17 | } 18 | get { 19 | return viewPort.zoom 20 | } 21 | } 22 | 23 | var waveformColor: UIColor = #colorLiteral(red: 0, green: 0.8695636392, blue: 0.5598542094, alpha: 1) { 24 | didSet { 25 | waveformLayer.color = waveformColor 26 | } 27 | } 28 | 29 | var rulerHeight: CGFloat = 30 { 30 | didSet { 31 | update() 32 | } 33 | } 34 | 35 | var scrollView: UIScrollView! 36 | 37 | // MARK: - Private Vars 38 | 39 | private var viewPort: ViewPort! { 40 | didSet { 41 | scrollView.delegate = viewPort 42 | 43 | viewPort.onZoom { [weak self] in 44 | guard let self = self else { return } 45 | self.viewPortZoomed() 46 | } 47 | viewPort.onTranslate { [weak self] in 48 | guard let self = self else { return } 49 | self.viewPortTranslated() 50 | } 51 | // Now's a good time to do this since viewPort has `zoom` needed for sizing the scroll view 52 | updateScrollViewSize() 53 | } 54 | } 55 | private var dataProvider: DataProvider! { 56 | didSet { 57 | waveformLayer.dataProvider = dataProvider 58 | rulerLayer.dataProvider = dataProvider 59 | viewPort.dataProvider = dataProvider 60 | } 61 | } 62 | 63 | private var waveformLayer: WaveformLayer! 64 | private var rulerLayer: RulerLayer! 65 | private let logFPS = true 66 | private var lastBounds = CGRect.zero 67 | private var pauseUpdates = false 68 | 69 | override init(frame: CGRect) { 70 | super.init(frame: frame) 71 | commonInit() 72 | } 73 | 74 | required init?(coder aDecoder: NSCoder) { 75 | super.init(coder: aDecoder) 76 | commonInit() 77 | } 78 | 79 | private func commonInit() { 80 | lastBounds = bounds 81 | 82 | scrollView = UIScrollView(frame: CGRect(x: 0, y: 0, width: bounds.width, height: bounds.height)).forAutoLayout() 83 | scrollView.translatesAutoresizingMaskIntoConstraints = false 84 | scrollView.showsHorizontalScrollIndicator = true 85 | scrollView.alwaysBounceHorizontal = true 86 | scrollView.bounces = true 87 | scrollView.contentSize = bounds.size 88 | scrollView.backgroundColor = UIColor.clear 89 | addSubview(scrollView) 90 | scrollView.constrainToSuperviewEdges() 91 | 92 | viewPort = ViewPort() 93 | viewPort.graphView = self 94 | viewPort.zoom = 10.0 95 | 96 | waveformLayer = WaveformLayer() 97 | waveformLayer.viewPort = viewPort 98 | 99 | rulerLayer = RulerLayer() 100 | rulerLayer.viewPort = viewPort 101 | 102 | let pinchGR = UIPinchGestureRecognizer(target: self, action: #selector(pinchGesture(_:))) 103 | self.addGestureRecognizer(pinchGR) 104 | 105 | dataProvider = DataProvider() 106 | } 107 | 108 | override func layoutSubviews() { 109 | super.layoutSubviews() 110 | 111 | if let window = window { 112 | viewPort.screenScale = window.screen.scale 113 | } 114 | 115 | if lastBounds.width > 0 { 116 | let scale = bounds.width / lastBounds.width 117 | scrollView.recenterForScale(scale) 118 | } 119 | lastBounds = bounds 120 | 121 | updateScrollViewSize() 122 | 123 | update() 124 | } 125 | 126 | func update() { 127 | 128 | if pauseUpdates { 129 | return 130 | } 131 | 132 | let startTime = CFAbsoluteTimeGetCurrent() 133 | 134 | if dataProvider == nil { return } 135 | 136 | if dataProvider.summarySamples == nil { 137 | dataProvider.summarize(targetSampleCnt: viewPort.xAxisUnits) 138 | } 139 | 140 | if dataProvider.samples == nil || dataProvider.samples!.count == 0 { 141 | return 142 | } 143 | 144 | updateWaveformLayer() 145 | updateRulerLayer() 146 | 147 | if logFPS { 148 | let fps = 1.0/(CFAbsoluteTimeGetCurrent() - startTime) 149 | Logger.log("fps: \(Int(fps))") 150 | } 151 | } 152 | 153 | func setSamples(_ samples: [Float], sampleRate: Double) { 154 | dataProvider.samples = samples 155 | dataProvider.sampleRate = sampleRate 156 | } 157 | 158 | private func updateScrollViewSize() { 159 | scrollView.contentSize = CGSize(width: bounds.width * viewPort.zoom, height: bounds.height) 160 | } 161 | 162 | private func updateWaveformLayer() { 163 | if let waveformLayer = waveformLayer { 164 | if waveformLayer.superlayer == nil { 165 | layer.addSublayer(waveformLayer) 166 | } 167 | waveformLayer.frame = CGRect(x: 0, y: rulerHeight, width: bounds.width, height: bounds.height - rulerHeight) 168 | waveformLayer.update() 169 | } 170 | } 171 | 172 | private func updateRulerLayer() { 173 | if let rulerLayer = rulerLayer { 174 | if rulerLayer.superlayer == nil { 175 | layer.addSublayer(rulerLayer) 176 | } 177 | rulerLayer.frame = CGRect(x: 0, y: 0, width: bounds.width, height: rulerHeight) 178 | rulerLayer.backgroundColor = UIColor(white: 0.1, alpha: 1.0).cgColor 179 | rulerLayer.update() 180 | } 181 | } 182 | 183 | // MARK: - Zoom Gesture 184 | 185 | @objc func pinchGesture(_ gc: UIPinchGestureRecognizer) { 186 | switch gc.state { 187 | case .changed: 188 | // Recentering the scroll view and changing the zoom are both going to end up calling update(), so suppress the first one with pauseUpdates. 189 | pauseUpdates = true 190 | scrollView.recenterForScale(gc.scale) 191 | pauseUpdates = false 192 | let newScale = viewPort.zoom * gc.scale 193 | viewPort.zoom = max(newScale, 1.0) 194 | gc.scale = 1.0 195 | case .ended, .cancelled, .failed: 196 | break 197 | default: break 198 | } 199 | } 200 | 201 | // MARK: - ViewPort Callbacks 202 | 203 | private func viewPortZoomed() { 204 | // nil out dataProvider's summarySamples so it can create them at the new scale 205 | dataProvider?.summarySamples = nil 206 | updateScrollViewSize() 207 | 208 | // Since CALayer's implicit animation duration is 0.25s, turn it off for this update to avoid chunkiness while zooming. 209 | CATransaction.begin() 210 | CATransaction.setDisableActions(true) 211 | update() 212 | CATransaction.commit() 213 | } 214 | 215 | private func viewPortTranslated() { 216 | // Since CALayer's implicit animation duration is 0.25s, turn it off for this update to avoid chunkiness while scrolling. 217 | CATransaction.begin() 218 | CATransaction.setDisableActions(true) 219 | update() 220 | CATransaction.commit() 221 | } 222 | 223 | } 224 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Graph/DataProvider.swift: -------------------------------------------------------------------------------- 1 | // 2 | // DataProvider.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | // DataProvider: Centralized access for audio sample data. 8 | // 9 | 10 | import UIKit 11 | import Accelerate 12 | 13 | class DataProvider { 14 | var samples: [Float]? 15 | 16 | var sampleRate: Double = 0 17 | var summarySampleRate: Double? { 18 | guard let samples = samples, let summarySamples = summarySamples else { return 0 } 19 | let ratio = Double(samples.count) / Double(summarySamples.count) 20 | return sampleRate / ratio 21 | } 22 | 23 | // = `samples` with a moving window average to create a smaller set of displayable samples as a waveform. 24 | var summarySamples: [Float]? 25 | var summarySampleCnt: Int { 26 | get { 27 | return summarySamples?.count ?? 0 28 | } 29 | } 30 | 31 | var summarySampleMax: Float = 0 32 | var summarySampleMin: Float = 0 33 | 34 | private let windowSize = 128 35 | 36 | var duration: Double { 37 | guard let samples = samples else { return 0 } 38 | return Double(samples.count) / sampleRate 39 | } 40 | 41 | func summarize(targetSampleCnt: Int) { 42 | guard let samples = samples else { return } 43 | let startTime = CFAbsoluteTimeGetCurrent() 44 | var workingAvgSamples = [Float]() 45 | 46 | var hopSize = Int(CGFloat(samples.count) / CGFloat(targetSampleCnt)) 47 | hopSize = max(hopSize, 1) 48 | 49 | var idx = 0 50 | while idx + windowSize < samples.count { 51 | let avg = fast_mean(samples, startIdx: idx, endIdx:idx+windowSize) 52 | workingAvgSamples.append(avg) 53 | idx += hopSize 54 | } 55 | 56 | summarySampleMax = vDSP.maximum(workingAvgSamples) 57 | summarySampleMin = vDSP.minimum(workingAvgSamples) 58 | 59 | summarySamples = workingAvgSamples 60 | 61 | let fps = Int(1.0 / (CFAbsoluteTimeGetCurrent() - startTime)) 62 | if fps < 60 { 63 | Logger.log("*** bad performance: \(fps)fps") 64 | } 65 | } 66 | 67 | func sample(at time: Double) -> Float { 68 | guard let summarySamples = summarySamples else { return 0 } 69 | var summarySampleIdx = Int(time / duration * Double(summarySamples.count)) 70 | 71 | // fail safe 72 | summarySampleIdx = min(summarySampleIdx, summarySamples.count - 1) 73 | 74 | return summarySamples[summarySampleIdx] 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Graph/RulerLayer.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RulerLayer.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | // RulerLayer: Draw major (1 second) and minor (1/10 second) ticks in a ruler using CALayers. 8 | // 9 | 10 | import UIKit 11 | import Accelerate 12 | 13 | class RulerLayer: CALayer { 14 | 15 | // weak so as not to create circular refs 16 | weak var dataProvider: DataProvider! 17 | weak var viewPort: ViewPort! 18 | 19 | private var majorTickLayers = [CALayer]() 20 | private var minorTickLayers = [CALayer]() 21 | 22 | func update() { 23 | prepare() 24 | updateMinorTicks() 25 | updateMajorTicks() 26 | } 27 | 28 | func prepare() { 29 | let totalDur = dataProvider.duration 30 | let visibleDur = totalDur / Double(viewPort.zoom) 31 | let oneSecWidth = CGFloat(1.0 / visibleDur) * viewPort.visibleWidth 32 | let numSecs = Int(viewPort.visibleWidth / oneSecWidth) + 1 33 | let numTenths = numSecs * 10 + 1 34 | 35 | if canDrawMinor() { 36 | let cnt = minorTickLayers.count 37 | if cnt < numTenths { 38 | for _ in 0.. Bool { 110 | let totalDur = dataProvider.duration 111 | let visibleDur = totalDur / Double(viewPort.zoom) 112 | let minorCnt = ceil(Double(viewPort.visibleWidth) / visibleDur * 10.0) 113 | 114 | return minorCnt / visibleDur > 3 115 | } 116 | 117 | } 118 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Graph/ViewPort.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewPort.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | // ViewPort: manages the view's basic geometry, translation, and scale. 8 | // 9 | 10 | import UIKit 11 | 12 | class ViewPort: NSObject, UIScrollViewDelegate { 13 | 14 | typealias ZoomNotifyBlock = ()->() 15 | typealias TranslateNotifyBlock = ()->() 16 | 17 | // weak so as not to create circular refs 18 | weak var graphView: AudioGraphView! 19 | weak var dataProvider: DataProvider! 20 | 21 | var xTrans: CGFloat = 0 { 22 | didSet { 23 | translateObserverBlocks.forEach { $0() } 24 | } 25 | } 26 | 27 | var startingXUnit: Int { 28 | return Int(-xTrans * screenScale) 29 | } 30 | 31 | var zoom: CGFloat = 1 { 32 | didSet { 33 | zoomObserverBlocks.forEach { $0() } 34 | } 35 | } 36 | 37 | var visibleWidth: CGFloat { 38 | return graphView.bounds.width 39 | } 40 | var visibleHeight: CGFloat { 41 | return graphView.bounds.height 42 | } 43 | 44 | var screenScale: CGFloat = 1 45 | 46 | var visibleXAxisUnits: Int { 47 | guard let samples = dataProvider.samples else { return 0 } 48 | return min( 49 | Int(graphView.bounds.width * screenScale), 50 | samples.count) 51 | } 52 | 53 | // The total x plot points across the entire view port (visible and non-visible). 54 | // Since we plot one sample per pixel, this is basically just boundsWidth * zoom. 55 | var xAxisUnits: Int { 56 | guard let samples = dataProvider.samples else { return 0 } 57 | return min( 58 | Int(CGFloat(visibleXAxisUnits) * zoom), 59 | samples.count) 60 | } 61 | 62 | private var zoomObserverBlocks = [ZoomNotifyBlock]() 63 | private var translateObserverBlocks = [TranslateNotifyBlock]() 64 | 65 | // MARK: - 66 | 67 | func onZoom(_ block: @escaping ZoomNotifyBlock) { 68 | zoomObserverBlocks.append(block) 69 | } 70 | 71 | func onTranslate(_ block: @escaping TranslateNotifyBlock) { 72 | translateObserverBlocks.append(block) 73 | } 74 | 75 | // MARK: - UIScrollViewDelegate 76 | 77 | func scrollViewDidScroll(_ scrollView: UIScrollView) { 78 | xTrans = -scrollView.contentOffset.x 79 | } 80 | 81 | } 82 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Graph/WaveformLayer.swift: -------------------------------------------------------------------------------- 1 | // 2 | // WaveformLayer.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | // WaveformLayer: Plot a waveform using CALayers. 8 | // 9 | 10 | import UIKit 11 | import Accelerate 12 | 13 | class WaveformLayer: CALayer { 14 | 15 | var color = #colorLiteral(red: 0, green: 0.8695636392, blue: 0.5598542094, alpha: 1) 16 | 17 | // weak so as not to create circular refs 18 | weak var dataProvider: DataProvider! 19 | weak var viewPort: ViewPort! 20 | 21 | private var xAxisUnits: Int = 0 22 | private var layers = [CALayer]() 23 | private let midlineLayer = CALayer() 24 | 25 | private var firstSampleToPlotIdx: Int { 26 | get { 27 | let idx = Int(-viewPort.xTrans * viewPort.screenScale) 28 | return (0.. 0 { 87 | // Special case: the user is rubberbanding while scrolling. 88 | xPos = viewPort.xTrans 89 | } 90 | 91 | while idx < cnt && xPos < bounds.maxX && idx < layers.count { 92 | let time = startTime + Double(xPos) / Double(viewPort.visibleWidth) * visibleDur 93 | let sample = dataProvider.sample(at: time) 94 | let layer = layers[idx] 95 | layer.isHidden = false 96 | 97 | let y = CGFloat(sample) * yScalingFactor 98 | layer.frame = CGRect(x: xPos, y: yMidline - y, width: 1, height: 2 * y) 99 | 100 | xPos += 1.0 / viewPort.screenScale 101 | idx += 1 102 | } 103 | } 104 | 105 | 106 | private func createLayers() { 107 | // Creating new layers and adding them is expensive so only ever make new ones, don't destroy unneeded ones. (Unused layers will be hidden.) 108 | let lineCnt = samplesToPlotInVisibleCnt 109 | let currentCnt = layers.count 110 | if currentCnt < lineCnt { 111 | for _ in 0.. 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | $(PRODUCT_BUNDLE_PACKAGE_TYPE) 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UIApplicationSceneManifest 24 | 25 | UIApplicationSupportsMultipleScenes 26 | 27 | UISceneConfigurations 28 | 29 | UIWindowSceneSessionRoleApplication 30 | 31 | 32 | UISceneConfigurationName 33 | Default Configuration 34 | UISceneDelegateClassName 35 | $(PRODUCT_MODULE_NAME).SceneDelegate 36 | UISceneStoryboardFile 37 | Main 38 | 39 | 40 | 41 | 42 | UIApplicationSupportsIndirectInputEvents 43 | 44 | UILaunchStoryboardName 45 | LaunchScreen 46 | UIMainStoryboardFile 47 | Main 48 | UIRequiredDeviceCapabilities 49 | 50 | armv7 51 | 52 | UISupportedInterfaceOrientations 53 | 54 | UIInterfaceOrientationLandscapeLeft 55 | UIInterfaceOrientationLandscapeRight 56 | 57 | UISupportedInterfaceOrientations~ipad 58 | 59 | UIInterfaceOrientationPortrait 60 | UIInterfaceOrientationPortraitUpsideDown 61 | UIInterfaceOrientationLandscapeLeft 62 | UIInterfaceOrientationLandscapeRight 63 | 64 | 65 | 66 | -------------------------------------------------------------------------------- /AudioWaveformGraph/SceneDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SceneDelegate.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | 8 | import UIKit 9 | 10 | class SceneDelegate: UIResponder, UIWindowSceneDelegate { 11 | 12 | var window: UIWindow? 13 | 14 | 15 | func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { 16 | // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. 17 | // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. 18 | // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). 19 | guard let _ = (scene as? UIWindowScene) else { return } 20 | } 21 | 22 | func sceneDidDisconnect(_ scene: UIScene) { 23 | // Called as the scene is being released by the system. 24 | // This occurs shortly after the scene enters the background, or when its session is discarded. 25 | // Release any resources associated with this scene that can be re-created the next time the scene connects. 26 | // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). 27 | } 28 | 29 | func sceneDidBecomeActive(_ scene: UIScene) { 30 | // Called when the scene has moved from an inactive state to an active state. 31 | // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. 32 | } 33 | 34 | func sceneWillResignActive(_ scene: UIScene) { 35 | // Called when the scene will move from an active state to an inactive state. 36 | // This may occur due to temporary interruptions (ex. an incoming phone call). 37 | } 38 | 39 | func sceneWillEnterForeground(_ scene: UIScene) { 40 | // Called as the scene transitions from the background to the foreground. 41 | // Use this method to undo the changes made on entering the background. 42 | } 43 | 44 | func sceneDidEnterBackground(_ scene: UIScene) { 45 | // Called as the scene transitions from the foreground to the background. 46 | // Use this method to save data, release shared resources, and store enough scene-specific state information 47 | // to restore the scene back to its current state. 48 | } 49 | 50 | } 51 | 52 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Utilities/AutoLayout.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AutoLayout.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | 8 | #if os(iOS) 9 | import UIKit 10 | typealias View = UIView 11 | #else // os(OSX) 12 | import AppKit 13 | typealias View = NSView 14 | #endif 15 | 16 | extension View { 17 | 18 | // Auto layout convenience: 19 | func autoResizeTranslationCheck() { 20 | if self.translatesAutoresizingMaskIntoConstraints { 21 | print("*** Warning: This view has translatesAutoresizingMaskIntoConstraints set yet is trying to do autolayout stuff.") 22 | } 23 | } 24 | @discardableResult func forAutoLayout() -> Self { 25 | self.translatesAutoresizingMaskIntoConstraints = false 26 | return self 27 | } 28 | @discardableResult func constrainWidth(_ w: CGFloat, activate: Bool = true) -> NSLayoutConstraint { 29 | self.autoResizeTranslationCheck() 30 | let c = NSLayoutConstraint(item: self, attribute: .width, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1.0, constant: w) 31 | c.isActive = activate 32 | return c 33 | } 34 | @discardableResult func constrainHeight(_ h: CGFloat, activate: Bool = true) -> NSLayoutConstraint { 35 | self.autoResizeTranslationCheck() 36 | let c = NSLayoutConstraint(item: self, attribute: .height, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1.0, constant: h) 37 | c.isActive = activate 38 | return c 39 | } 40 | func constrainSizeTo(view: View) { 41 | self.autoResizeTranslationCheck() 42 | self.heightAnchor.constraint(equalTo: view.heightAnchor, constant: 0).isActive = true 43 | self.widthAnchor.constraint(equalTo: view.widthAnchor, constant: 0).isActive = true 44 | } 45 | func constrainToSuperviewEdges(offset: CGFloat = 0, activate: Bool = true) { 46 | self.autoResizeTranslationCheck() 47 | self.constrainToEdgesOf(self.superview!, offset: offset, activate: activate) 48 | } 49 | func constrainToEdgesOf(_ view: UIView, offset:CGFloat = 0, activate: Bool = true) { 50 | self.autoResizeTranslationCheck() 51 | self.leftAnchor.constraint(equalTo: view.leftAnchor, constant: offset).isActive = activate 52 | self.rightAnchor.constraint(equalTo: view.rightAnchor, constant: offset * -1).isActive = activate 53 | self.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: offset * -1).isActive = activate 54 | self.topAnchor.constraint(equalTo: view.topAnchor, constant: offset).isActive = activate 55 | } 56 | @discardableResult func constrainToSuperviewLeading(offset: CGFloat = 0, activate: Bool = true, useSafeArea: Bool = false) -> NSLayoutConstraint { 57 | self.autoResizeTranslationCheck() 58 | let c: NSLayoutConstraint 59 | if useSafeArea { 60 | #if os(iOS) 61 | c = self.leadingAnchor.constraint(equalTo: self.superview!.safeAreaLayoutGuide.leadingAnchor, constant: offset) 62 | #else 63 | c = NSLayoutConstraint(); assertionFailure("safeAreaLayoutGuide not supported on macOS") 64 | #endif 65 | } else { 66 | c = self.leadingAnchor.constraint(equalTo: self.superview!.leadingAnchor, constant: offset) 67 | } 68 | c.isActive = activate 69 | return c 70 | } 71 | @discardableResult func constrainToSuperviewLeft(offset: CGFloat = 0, activate: Bool = true, useSafeArea: Bool = false) -> NSLayoutConstraint { 72 | self.autoResizeTranslationCheck() 73 | let c: NSLayoutConstraint 74 | if useSafeArea { 75 | #if os(iOS) 76 | c = self.leftAnchor.constraint(equalTo: self.superview!.safeAreaLayoutGuide.leftAnchor, constant: offset) 77 | #else 78 | c = NSLayoutConstraint(); assertionFailure("safeAreaLayoutGuide not supported on macOS") 79 | #endif 80 | } else { 81 | c = self.leftAnchor.constraint(equalTo: self.superview!.leftAnchor, constant: offset) 82 | } 83 | c.isActive = activate 84 | return c 85 | } 86 | @discardableResult func constrainToSuperviewTrailing(offset: CGFloat = 0, activate: Bool = true, useSafeArea: Bool = false) -> NSLayoutConstraint { 87 | // NB: offset is negated to be more intuitive 88 | self.autoResizeTranslationCheck() 89 | let c: NSLayoutConstraint 90 | if useSafeArea { 91 | #if os(iOS) 92 | c = self.trailingAnchor.constraint(equalTo: self.superview!.safeAreaLayoutGuide.trailingAnchor, constant: -offset) 93 | #else 94 | c = NSLayoutConstraint(); assertionFailure("safeAreaLayoutGuide not supported on macOS") 95 | #endif 96 | } else { 97 | c = self.trailingAnchor.constraint(equalTo: self.superview!.trailingAnchor, constant: -offset) 98 | } 99 | c.isActive = activate 100 | return c 101 | } 102 | @discardableResult func constrainToSuperviewRight(offset: CGFloat = 0, activate: Bool = true, useSafeArea: Bool = false) -> NSLayoutConstraint { 103 | // NB: offset is negated to be more intuitive 104 | self.autoResizeTranslationCheck() 105 | let c: NSLayoutConstraint 106 | if useSafeArea { 107 | #if os(iOS) 108 | c = self.rightAnchor.constraint(equalTo: self.superview!.safeAreaLayoutGuide.rightAnchor, constant: -offset) 109 | #else 110 | c = NSLayoutConstraint(); assertionFailure("safeAreaLayoutGuide not supported on macOS") 111 | #endif 112 | } else { 113 | c = self.rightAnchor.constraint(equalTo: self.superview!.rightAnchor, constant: -offset) 114 | } 115 | c.isActive = activate 116 | return c 117 | } 118 | @discardableResult func constrainToSuperviewTop(offset: CGFloat = 0, activate: Bool = true, useSafeArea: Bool = false) -> NSLayoutConstraint { 119 | self.autoResizeTranslationCheck() 120 | let c: NSLayoutConstraint 121 | if useSafeArea { 122 | #if os(iOS) 123 | c = self.topAnchor.constraint(equalTo: self.superview!.safeAreaLayoutGuide.topAnchor, constant: offset) 124 | #else 125 | c = NSLayoutConstraint(); assertionFailure("safeAreaLayoutGuide not supported on macOS") 126 | #endif 127 | } else { 128 | c = self.topAnchor.constraint(equalTo: self.superview!.topAnchor, constant: offset) 129 | } 130 | c.isActive = activate 131 | return c 132 | } 133 | @discardableResult func constrainToSuperviewBottom(offset: CGFloat = 0, activate: Bool = true, useSafeArea: Bool = false) -> NSLayoutConstraint { 134 | // NB: offset is negated to be more intuitive 135 | self.autoResizeTranslationCheck() 136 | let c: NSLayoutConstraint 137 | if useSafeArea { 138 | #if os(iOS) 139 | c = self.bottomAnchor.constraint(equalTo: self.superview!.safeAreaLayoutGuide.bottomAnchor, constant: -offset) 140 | #else 141 | c = NSLayoutConstraint(); assertionFailure("safeAreaLayoutGuide not supported on macOS") 142 | #endif 143 | } else { 144 | c = self.bottomAnchor.constraint(equalTo: self.superview!.bottomAnchor, constant: -offset) 145 | } 146 | c.isActive = activate 147 | return c 148 | } 149 | @discardableResult func constrainToSuperviewXCenter(offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 150 | self.autoResizeTranslationCheck() 151 | let c = self.centerXAnchor.constraint(equalTo: self.superview!.centerXAnchor, constant: offset) 152 | c.isActive = activate 153 | return c 154 | } 155 | @discardableResult func constrainToSuperviewYCenter(offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 156 | self.autoResizeTranslationCheck() 157 | let c = self.centerYAnchor.constraint(equalTo: self.superview!.centerYAnchor, constant: offset) 158 | c.isActive = activate 159 | return c 160 | } 161 | @discardableResult func constrainToSuperviewYCenter(multiplier: CGFloat) -> NSLayoutConstraint { 162 | self.autoResizeTranslationCheck() 163 | let c = NSLayoutConstraint(item: self, attribute: .centerY, relatedBy: .equal, toItem: self.superview, attribute: .centerY, multiplier: multiplier, constant: 0) 164 | c.isActive = true 165 | return c 166 | } 167 | @discardableResult func constrainToXCenterOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 168 | self.autoResizeTranslationCheck() 169 | let c = self.centerXAnchor.constraint(equalTo: view.centerXAnchor, constant: offset) 170 | c.isActive = activate 171 | return c 172 | } 173 | @discardableResult func constrainToYCenterOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 174 | self.autoResizeTranslationCheck() 175 | let c = self.centerYAnchor.constraint(equalTo: view.centerYAnchor, constant: offset) 176 | c.isActive = activate 177 | return c 178 | } 179 | @discardableResult func constrainToTopOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 180 | self.autoResizeTranslationCheck() 181 | let c = view.topAnchor.constraint(equalTo: self.bottomAnchor, constant: offset) 182 | c.isActive = activate 183 | return c 184 | } 185 | @discardableResult func constrainTopToTopOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 186 | self.autoResizeTranslationCheck() 187 | let c = view.topAnchor.constraint(equalTo: self.topAnchor, constant: offset) 188 | c.isActive = activate 189 | return c 190 | } 191 | @discardableResult func constrainTopToBottomOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 192 | // NB: offset is negated to be more intuitive 193 | self.autoResizeTranslationCheck() 194 | let c = view.bottomAnchor.constraint(equalTo: self.topAnchor, constant: -offset) 195 | c.isActive = activate 196 | return c 197 | } 198 | @discardableResult func constrainBottomToBottomOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 199 | self.autoResizeTranslationCheck() 200 | let c = view.bottomAnchor.constraint(equalTo: self.bottomAnchor, constant: offset) 201 | c.isActive = activate 202 | return c 203 | } 204 | @discardableResult func constrainBottomToTopOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 205 | self.autoResizeTranslationCheck() 206 | let c = view.topAnchor.constraint(equalTo: self.bottomAnchor, constant: offset) 207 | c.isActive = activate 208 | return c 209 | } 210 | @discardableResult func constrainToLeadingOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 211 | self.autoResizeTranslationCheck() 212 | let c = view.leadingAnchor.constraint(equalTo: self.trailingAnchor, constant: -offset) 213 | c.isActive = activate 214 | return c 215 | } 216 | @discardableResult func constrainLeadingToLeadingOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 217 | self.autoResizeTranslationCheck() 218 | let c = view.leadingAnchor.constraint(equalTo: self.leadingAnchor, constant: -offset) 219 | if activate { 220 | c.isActive = activate 221 | } 222 | return c 223 | } 224 | @discardableResult func constrainLeadingToTrailingOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 225 | self.autoResizeTranslationCheck() 226 | let c = view.trailingAnchor.constraint(equalTo: self.leadingAnchor, constant: -offset) 227 | c.isActive = activate 228 | return c 229 | } 230 | @discardableResult func constrainLeftToRightOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 231 | self.autoResizeTranslationCheck() 232 | let c = view.rightAnchor.constraint(equalTo: self.leftAnchor, constant: -offset) 233 | c.isActive = activate 234 | return c 235 | } 236 | @discardableResult func constrainTrailingToLeadingOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 237 | self.autoResizeTranslationCheck() 238 | let c = view.leadingAnchor.constraint(equalTo: self.trailingAnchor, constant: offset) 239 | c.isActive = activate 240 | return c 241 | } 242 | @discardableResult func constrainToTrailingOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 243 | self.autoResizeTranslationCheck() 244 | let c = view.trailingAnchor.constraint(equalTo: self.leadingAnchor, constant: -offset) 245 | c.isActive = activate 246 | return c 247 | } 248 | @discardableResult func constrainTrailingToTrailingOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 249 | self.autoResizeTranslationCheck() 250 | let c = view.trailingAnchor.constraint(equalTo: self.trailingAnchor, constant: -offset) 251 | c.isActive = activate 252 | return c 253 | } 254 | @discardableResult func constrainWidthEqualTo(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 255 | self.autoResizeTranslationCheck() 256 | let c = view.widthAnchor.constraint(equalTo: self.widthAnchor, constant: offset) 257 | c.isActive = activate 258 | return c 259 | } 260 | @discardableResult func constrainHeightEqualTo(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 261 | self.autoResizeTranslationCheck() 262 | let c = view.heightAnchor.constraint(equalTo: self.heightAnchor, constant: offset) 263 | c.isActive = activate 264 | return c 265 | } 266 | @discardableResult func alignBottomToBottomOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 267 | self.autoResizeTranslationCheck() 268 | let c = view.bottomAnchor.constraint(equalTo: self.bottomAnchor, constant: offset) 269 | c.isActive = activate 270 | return c 271 | } 272 | @discardableResult func alignBottomToTopOf(_ view: View, offset: CGFloat = 0, activate: Bool = true) -> NSLayoutConstraint { 273 | self.autoResizeTranslationCheck() 274 | let c = view.topAnchor.constraint(equalTo: self.bottomAnchor, constant: offset) 275 | c.isActive = activate 276 | return c 277 | } 278 | } 279 | 280 | extension NSLayoutConstraint { 281 | // Use with care. But there are legit scenarios where layout warnings are benign, notably when there's a bunch of incoming constraints that might conflict serially but not as a batch. 282 | static func setWarningsEnabled(_ enabled: Bool) { 283 | UserDefaults.standard.setValue(enabled, forKey: "_UIConstraintBasedLayoutLogUnsatisfiable") 284 | } 285 | } 286 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Utilities/Logger.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Logger.swift 3 | // AudioWaveformGraph 4 | 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | 8 | import Foundation 9 | 10 | class Logger { 11 | class func log(_ str: String, file: String = #file, line: Int = #line, function: String = #function) { 12 | #if DEBUG 13 | let shortenedFile = file.components(separatedBy: "/").last ?? "" 14 | let s = "[\(shortenedFile):\(function):\(line)] \(str)" 15 | NSLog(s) 16 | #endif 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Utilities/MathUtilities.swift: -------------------------------------------------------------------------------- 1 | // 2 | // MathUtilities.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | 8 | import Foundation 9 | import Accelerate 10 | 11 | func fast_mean(_ a: [Float], startIdx: Int, endIdx: Int) -> Float { 12 | var mean: Float = 0 13 | // todo: dangling pointer warning is safe in this case since it's never accessed outside of scope, but should be fixed 14 | let ptr = UnsafePointer(a) 15 | vDSP_meanv(ptr + startIdx, 1, &mean, UInt(endIdx - startIdx)) 16 | 17 | return mean 18 | } 19 | 20 | extension ClosedRange { 21 | func clamp(_ value: Bound) -> Bound { 22 | return self.lowerBound > value ? self.lowerBound : 23 | self.upperBound < value ? self.upperBound : 24 | value 25 | } 26 | } 27 | 28 | extension Range { 29 | func clamp(_ value: Bound) -> Bound { 30 | return self.lowerBound > value ? self.lowerBound : 31 | self.upperBound < value ? self.upperBound : 32 | value 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /AudioWaveformGraph/Utilities/UIKitExtras.swift: -------------------------------------------------------------------------------- 1 | // 2 | // UIKitExtras.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/17/20. 6 | // 7 | 8 | import UIKit 9 | 10 | extension UIScrollView { 11 | func recenterForScale(_ scale: CGFloat) { 12 | 13 | // Keep the scroll content centered while zooming or resizing. This is worked out by seeing that while scaling the graph, the viewable area (scrollView.bounds) remains fixed while the total width (scrollView.contentSize) and offset (scrollView.contentOffset) change. We can keep the center fixed by scaling the content offset with a fixed ratio, where the ratio is: 14 | // 15 | // r = offset / (contentWidth - boundsWidth) 16 | // 17 | // We then calculate the new totalWidth by multiplying by the new scale and solve for offset: 18 | // 19 | // newContentWidth = offset * scale 20 | // newOffset = r * (newContentWidth - boundsWidth) 21 | // 22 | 23 | if scale != 1.0 && contentSize.width != bounds.width { 24 | let oldOffset = contentOffset.x 25 | let ratio = oldOffset / (contentSize.width - bounds.width) 26 | let newContentW = contentSize.width * scale 27 | let newOffset = ratio * (newContentW - bounds.width) 28 | contentOffset = CGPoint(x: newOffset, y: contentOffset.y) 29 | } 30 | } 31 | } 32 | 33 | 34 | -------------------------------------------------------------------------------- /AudioWaveformGraph/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // AudioWaveformGraph 4 | // 5 | // Created by John Scalo on 12/11/20. 6 | // 7 | 8 | import UIKit 9 | 10 | class ViewController: UIViewController { 11 | 12 | @IBOutlet weak var audioGraphView: AudioGraphView! 13 | 14 | private var audioContext: TempiAudioContext! 15 | 16 | override func viewDidLoad() { 17 | super.viewDidLoad() 18 | 19 | loadData() 20 | } 21 | 22 | override var prefersStatusBarHidden: Bool { 23 | return true 24 | } 25 | 26 | private func loadData() { 27 | guard let url = Bundle.main.url(forResource: "audio", withExtension: "mp3") else { 28 | fatalError() 29 | } 30 | 31 | TempiAudioContext.load(fromAudioURL: url) { (finishedContext) in 32 | guard let finishedContext = finishedContext else { return } 33 | self.audioContext = finishedContext 34 | 35 | finishedContext.readSamples() { [weak self] (samples) in 36 | guard let self = self else { return } 37 | guard let samples = samples else { return } 38 | 39 | self.audioGraphView.setSamples(samples, sampleRate: finishedContext.sampleRate) 40 | self.audioGraphView.waveformColor = #colorLiteral(red: 0, green: 0.8695636392, blue: 0.5598542094, alpha: 1) 41 | self.audioGraphView.setNeedsLayout() 42 | } 43 | 44 | } 45 | } 46 | } 47 | 48 | -------------------------------------------------------------------------------- /AudioWaveformGraph/audio.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/madebywindmill/AudioWaveformGraph/2a2c3d616a103007fe86d4fc85d70d3493ae7c78/AudioWaveformGraph/audio.mp3 -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Made by Windmill 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AudioWaveformGraph for iOS and Mac 2 | ## Description 3 | 4 | AudioWaveformGraph is the companion project to the [Drawing Big Graphs with Swift and UIKit](https://blog.madebywindmill.com/drawing-big-graphs-with-swift-and-uikit-167f48c3967a) tutorial. It demonstrates how to render huge graphs that are scrollable, zoomable, and resizable using UIKit and Swift. 5 | 6 | ## Technologies 7 | 8 | - Swift 9 | - iOS 10 | - Catalyst 11 | - AVFoundation 12 | - CALayer 13 | 14 | 15 | ## Contact 16 | 17 | Contact John on Twitter - [@scalo](https://twitter.com/intent/user?screen_name=scalo) 18 | 19 | 20 | ## License 21 | 22 | MIT License 23 | 24 | Copyright (c) 2020 Made by Windmill 25 | 26 | Permission is hereby granted, free of charge, to any person obtaining a copy 27 | of this software and associated documentation files (the "Software"), to deal 28 | in the Software without restriction, including without limitation the rights 29 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 30 | copies of the Software, and to permit persons to whom the Software is 31 | furnished to do so, subject to the following conditions: 32 | 33 | The above copyright notice and this permission notice shall be included in all 34 | copies or substantial portions of the Software. 35 | 36 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 37 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 38 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 39 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 40 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 41 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 42 | SOFTWARE. 43 | --------------------------------------------------------------------------------