├── .github └── FUNDING.yml ├── .gitignore ├── .ios_toolchain.yml ├── .ruby-version ├── .swiftlint.yml ├── .swiftpm └── xcode │ └── xcshareddata │ └── xcschemes │ └── DSWaveformImage.xcscheme ├── Example ├── DSWaveformImageExample-VisionOS │ ├── Assets.xcassets │ │ ├── AppIcon.solidimagestack │ │ │ ├── Back.solidimagestacklayer │ │ │ │ ├── Content.imageset │ │ │ │ │ └── Contents.json │ │ │ │ └── Contents.json │ │ │ ├── Contents.json │ │ │ ├── Front.solidimagestacklayer │ │ │ │ ├── Content.imageset │ │ │ │ │ └── Contents.json │ │ │ │ └── Contents.json │ │ │ └── Middle.solidimagestacklayer │ │ │ │ ├── Content.imageset │ │ │ │ └── Contents.json │ │ │ │ └── Contents.json │ │ └── Contents.json │ ├── ContentView.swift │ ├── DSWaveformImageExample_VisionOSApp.swift │ ├── Info.plist │ └── Preview Content │ │ └── Preview Assets.xcassets │ │ └── Contents.json ├── DSWaveformImageExample-iOS │ ├── AppDelegate.swift │ ├── Assets.xcassets │ │ ├── AccentColor.colorset │ │ │ └── Contents.json │ │ ├── AppIcon.appiconset │ │ │ ├── Contents.json │ │ │ └── Icon.png │ │ ├── Contents.json │ │ └── background.imageset │ │ │ ├── Contents.json │ │ │ └── alexander-popov-R25Q-pAUeY8-unsplash.jpg │ ├── Base.lproj │ │ ├── LaunchScreen.storyboard │ │ └── Main.storyboard │ ├── DSWaveformImageExample-iOS-Bridging-Header.h │ ├── Info.plist │ ├── ProgressViewController.swift │ ├── RecordingViewController.swift │ ├── SCAudioManager.h │ ├── SCAudioManager.m │ ├── SceneDelegate.swift │ ├── SwiftUIExample │ │ ├── ProgressWaveformView.swift │ │ ├── RecordingIndicatorView.swift │ │ ├── StaticWaveformRenderer.swift │ │ ├── SwiftUIExample.swift │ │ └── SwiftUIExampleView.swift │ ├── ViewController.swift │ ├── example_sound.m4a │ └── example_sound_2.m4a ├── DSWaveformImageExample-macOS │ ├── Assets.xcassets │ │ ├── AccentColor.colorset │ │ │ └── Contents.json │ │ ├── AppIcon.appiconset │ │ │ └── Contents.json │ │ └── Contents.json │ ├── ContentView.swift │ ├── DSWaveformImageExample_macOS.entitlements │ ├── DSWaveformImageExample_macOSApp.swift │ └── Preview Content │ │ └── Preview Assets.xcassets │ │ └── Contents.json └── DSWaveformImageExample.xcodeproj │ ├── project.pbxproj │ └── project.xcworkspace │ ├── contents.xcworkspacedata │ └── xcshareddata │ └── IDEWorkspaceChecks.plist ├── LICENSE ├── Package.swift ├── Promotion ├── appstore.svg ├── progress-example.png ├── recorder-example.png ├── screenshot.png └── screenshot3.png ├── README.md └── Sources ├── DSWaveformImage ├── Renderers │ ├── CircularWaveformRenderer.swift │ ├── LinearWaveformRenderer.swift │ └── WaveformRenderer.swift ├── TempiFFT.swift ├── WaveformAnalyzer.swift ├── WaveformImageDrawer+iOS.swift ├── WaveformImageDrawer+macOS.swift ├── WaveformImageDrawer.swift └── WaveformImageTypes.swift └── DSWaveformImageViews ├── SwiftUI ├── DefaultShapeStyler.swift ├── VersionMigrations.swift ├── WaveformLiveCanvas.swift ├── WaveformShape.swift └── WaveformView.swift └── UIKit ├── WaveformImageView.swift └── WaveformLiveView.swift /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [dmrschmidt] 4 | custom: ["https://www.buymeacoffee.com/dmrschmidt"] 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | .DS_Store 3 | */build/* 4 | *.pbxuser 5 | !default.pbxuser 6 | *.mode1v3 7 | !default.mode1v3 8 | *.mode2v3 9 | !default.mode2v3 10 | *.perspectivev3 11 | !default.perspectivev3 12 | xcuserdata 13 | profile 14 | *.moved-aside 15 | DerivedData 16 | .idea/ 17 | *.hmap 18 | *.xccheckout 19 | 20 | build 21 | derived 22 | 23 | #CocoaPods 24 | Pods 25 | -------------------------------------------------------------------------------- /.ios_toolchain.yml: -------------------------------------------------------------------------------- 1 | --- 2 | project-file-path: "/Users/dschmidt/workspace/DSWaveformImage/DSWaveformImage.xcodeproj" 3 | default-scheme: DSWaveformImage 4 | default-sdk: iphoneos10.2 5 | default-32bit-test-device: "'iOS Simulator,OS=10.2,name=iPhone 5'" 6 | default-64bit-test-device: "'iOS Simulator,OS=10.2,name=iPhone 7'" 7 | app-targets: 8 | - DSWaveformImage 9 | - DSWaveformImageExample 10 | test-targets: 11 | - DSWaveformImageTests 12 | ui-test-targets: [] 13 | provisioning-path: "/Users/dschmidt/workspace/DSWaveformImage/provisioning" 14 | crashlytics-framework-path: 15 | -------------------------------------------------------------------------------- /.ruby-version: -------------------------------------------------------------------------------- 1 | 2.7.2 2 | -------------------------------------------------------------------------------- /.swiftlint.yml: -------------------------------------------------------------------------------- 1 | line_length: 130 2 | excluded: 3 | - DSWaveformImageExample 4 | -------------------------------------------------------------------------------- /.swiftpm/xcode/xcshareddata/xcschemes/DSWaveformImage.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 43 | 44 | 50 | 51 | 57 | 58 | 59 | 60 | 62 | 63 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Assets.xcassets/AppIcon.solidimagestack/Back.solidimagestacklayer/Content.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "vision", 5 | "scale" : "2x" 6 | } 7 | ], 8 | "info" : { 9 | "author" : "xcode", 10 | "version" : 1 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Assets.xcassets/AppIcon.solidimagestack/Back.solidimagestacklayer/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Assets.xcassets/AppIcon.solidimagestack/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | }, 6 | "layers" : [ 7 | { 8 | "filename" : "Front.solidimagestacklayer" 9 | }, 10 | { 11 | "filename" : "Middle.solidimagestacklayer" 12 | }, 13 | { 14 | "filename" : "Back.solidimagestacklayer" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Assets.xcassets/AppIcon.solidimagestack/Front.solidimagestacklayer/Content.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "vision", 5 | "scale" : "2x" 6 | } 7 | ], 8 | "info" : { 9 | "author" : "xcode", 10 | "version" : 1 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Assets.xcassets/AppIcon.solidimagestack/Front.solidimagestacklayer/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Assets.xcassets/AppIcon.solidimagestack/Middle.solidimagestacklayer/Content.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "vision", 5 | "scale" : "2x" 6 | } 7 | ], 8 | "info" : { 9 | "author" : "xcode", 10 | "version" : 1 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Assets.xcassets/AppIcon.solidimagestack/Middle.solidimagestacklayer/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/ContentView.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | import DSWaveformImage 3 | import DSWaveformImageViews 4 | 5 | struct ContentView: View { 6 | private static let colors = [UIColor.systemPink, UIColor.systemBlue, UIColor.systemGreen] 7 | private static var randomColor: UIColor { 8 | colors[Int.random(in: 0.. 2 | 3 | 4 | 5 | UIApplicationSceneManifest 6 | 7 | UIApplicationPreferredDefaultSceneSessionRole 8 | UIWindowSceneSessionRoleApplication 9 | UIApplicationSupportsMultipleScenes 10 | 11 | UISceneConfigurations 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-VisionOS/Preview Content/Preview Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // DSWaveformImageExample-iOS 4 | // 5 | // Created by Dennis Schmidt on 27.09.22. 6 | // 7 | 8 | import UIKit 9 | 10 | @main 11 | class AppDelegate: UIResponder, UIApplicationDelegate { 12 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 13 | // Override point for customization after application launch. 14 | return true 15 | } 16 | 17 | // MARK: UISceneSession Lifecycle 18 | 19 | func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration { 20 | // Called when a new scene session is being created. 21 | // Use this method to select a configuration to create the new scene with. 22 | return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role) 23 | } 24 | 25 | func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set) { 26 | // Called when the user discards a scene session. 27 | // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions. 28 | // Use this method to release any resources that were specific to the discarded scenes, as they will not return. 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "platform" : "ios", 6 | "reference" : "systemPinkColor" 7 | }, 8 | "idiom" : "universal" 9 | } 10 | ], 11 | "info" : { 12 | "author" : "xcode", 13 | "version" : 1 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "Icon.png", 5 | "idiom" : "universal", 6 | "platform" : "ios", 7 | "size" : "1024x1024" 8 | } 9 | ], 10 | "info" : { 11 | "author" : "xcode", 12 | "version" : 1 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Assets.xcassets/AppIcon.appiconset/Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Example/DSWaveformImageExample-iOS/Assets.xcassets/AppIcon.appiconset/Icon.png -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Assets.xcassets/background.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "alexander-popov-R25Q-pAUeY8-unsplash.jpg", 5 | "idiom" : "universal" 6 | } 7 | ], 8 | "info" : { 9 | "author" : "xcode", 10 | "version" : 1 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Assets.xcassets/background.imageset/alexander-popov-R25Q-pAUeY8-unsplash.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Example/DSWaveformImageExample-iOS/Assets.xcassets/background.imageset/alexander-popov-R25Q-pAUeY8-unsplash.jpg -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 126 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | 300 | 301 | 302 | 303 | 304 | 305 | 306 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | 325 | 326 | 327 | 328 | 329 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/DSWaveformImageExample-iOS-Bridging-Header.h: -------------------------------------------------------------------------------- 1 | #import "SCAudioManager.h" 2 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | UIApplicationSceneManifest 6 | 7 | UIApplicationSupportsMultipleScenes 8 | 9 | UISceneConfigurations 10 | 11 | UIWindowSceneSessionRoleApplication 12 | 13 | 14 | UISceneConfigurationName 15 | Default Configuration 16 | UISceneDelegateClassName 17 | $(PRODUCT_MODULE_NAME).SceneDelegate 18 | UISceneStoryboardFile 19 | Main 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/ProgressViewController.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | import SwiftUI 4 | import DSWaveformImage 5 | 6 | class ProgressViewController: UIViewController { 7 | @IBOutlet var waveformImageView: UIImageView! 8 | @IBOutlet var playbackWaveformImageView: UIImageView! 9 | 10 | private let waveformImageDrawer = WaveformImageDrawer() 11 | private let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")! 12 | 13 | override func viewDidAppear(_ animated: Bool) { 14 | super.viewDidAppear(animated) 15 | updateWaveformImages() 16 | } 17 | 18 | @IBAction func shuffleProgressUIKit() { 19 | // In a real app, progress would come from your player. 20 | // Since there is various ways to play audio, eg AVPlayer, 21 | // the purpose of this example here is only to show how one 22 | // might visualize the progress, not how to calculate it. 23 | let progress = Double.random(in: 0...1) 24 | 25 | // Typically, this also does not need to be animated if your 26 | // progress updates come in at a high enough frequency 27 | // (every 0.1s for instance). 28 | updateProgressWaveform(progress) 29 | } 30 | 31 | @IBAction func openSwiftUIExample() { 32 | let hostingViewController = UIHostingController(rootView: ProgressExampleView()) 33 | present(hostingViewController, animated: true) 34 | } 35 | 36 | private func updateProgressWaveform(_ progress: Double) { 37 | let fullRect = playbackWaveformImageView.bounds 38 | let newWidth = Double(fullRect.size.width) * progress 39 | 40 | let maskLayer = CAShapeLayer() 41 | let maskRect = CGRect(x: 0.0, y: 0.0, width: newWidth, height: Double(fullRect.size.height)) 42 | 43 | let path = CGPath(rect: maskRect, transform: nil) 44 | maskLayer.path = path 45 | 46 | playbackWaveformImageView.layer.mask = maskLayer 47 | } 48 | 49 | private func updateWaveformImages() { 50 | Task { 51 | let image = try await waveformImageDrawer.waveformImage(fromAudioAt: audioURL, with: .init(size: playbackWaveformImageView.bounds.size, style: .filled(.darkGray))) 52 | 53 | DispatchQueue.main.async { 54 | self.waveformImageView.image = image 55 | self.playbackWaveformImageView.image = image.withTintColor(.red, renderingMode: .alwaysTemplate) 56 | self.shuffleProgressUIKit() 57 | } 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/RecordingViewController.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import AVFoundation 3 | import UIKit 4 | import DSWaveformImage 5 | import DSWaveformImageViews 6 | 7 | class RecordingViewController: UIViewController { 8 | @IBOutlet weak var recordButton: UIButton! 9 | @IBOutlet weak var waveformView: WaveformLiveView! 10 | @IBOutlet weak var styleSelector: UISegmentedControl! 11 | 12 | private let audioManager: SCAudioManager! 13 | private let imageDrawer: WaveformImageDrawer! 14 | 15 | required init?(coder: NSCoder) { 16 | audioManager = SCAudioManager() 17 | imageDrawer = WaveformImageDrawer() 18 | 19 | super.init(coder: coder) 20 | 21 | audioManager.recordingDelegate = self 22 | } 23 | 24 | override func viewDidAppear(_ animated: Bool) { 25 | super.viewDidAppear(animated) 26 | waveformView.configuration = waveformView.configuration.with( 27 | style: styleForSelection(index: styleSelector.selectedSegmentIndex) 28 | ) 29 | audioManager.prepareAudioRecording() 30 | } 31 | 32 | @IBAction func didChangeStyle(_ sender: UISegmentedControl) { 33 | waveformView.configuration = waveformView.configuration.with( 34 | style: styleForSelection(index: sender.selectedSegmentIndex) 35 | ) 36 | } 37 | 38 | @IBAction func didChangeSilence(_ sender: UISwitch) { 39 | waveformView.shouldDrawSilencePadding = sender.isOn 40 | } 41 | 42 | @IBAction func didChangeDampingPercentage(_ sender: UISlider) { 43 | waveformView.configuration = waveformView.configuration.with( 44 | damping: waveformView.configuration.damping?.with(percentage: sender.value) 45 | ) 46 | } 47 | 48 | @IBAction func didChangeDampingSides(_ sender: UISegmentedControl) { 49 | waveformView.configuration = waveformView.configuration.with( 50 | damping: waveformView.configuration.damping?.with( 51 | sides: sideForSelection(index: sender.selectedSegmentIndex) 52 | ) 53 | ) 54 | } 55 | 56 | @IBAction func didTapRecording() { 57 | if audioManager.recording() { 58 | audioManager.stopRecording() 59 | recordButton.setTitle("Start Recording", for: .normal) 60 | } else { 61 | waveformView.reset() 62 | audioManager.startRecording() 63 | recordButton.setTitle("Stop Recording", for: .normal) 64 | } 65 | } 66 | 67 | private func styleForSelection(index: Int) -> Waveform.Style { 68 | switch index { 69 | case 0: return .filled(.red) 70 | case 1: return .gradient([.red, .yellow]) 71 | case 2: return .striped(.init(color: .red, width: 3, spacing: 3)) 72 | default: fatalError() 73 | } 74 | } 75 | 76 | private func sideForSelection(index: Int) -> Waveform.Damping.Sides { 77 | switch index { 78 | case 0: return .left 79 | case 1: return .right 80 | case 2: return .both 81 | default: fatalError() 82 | } 83 | } 84 | } 85 | 86 | extension RecordingViewController: RecordingDelegate { 87 | func audioManager(_ manager: SCAudioManager!, didAllowRecording success: Bool) { 88 | if !success { 89 | preconditionFailure("Recording must be allowed in Settings to work.") 90 | } 91 | } 92 | 93 | func audioManager(_ manager: SCAudioManager!, didFinishRecordingSuccessfully success: Bool) { 94 | print("did finish recording with success=\(success)") 95 | recordButton.setTitle("Start Recording", for: .normal) 96 | } 97 | 98 | func audioManager(_ manager: SCAudioManager!, didUpdateRecordProgress progress: CGFloat) { 99 | print("current power: \(manager.lastAveragePower()) dB") 100 | let linear = 1 - pow(10, manager.lastAveragePower() / 20) 101 | 102 | // Here we add the same sample 3 times to speed up the animation. 103 | // Usually you'd just add the sample once. 104 | waveformView.add(samples: [linear, linear, linear]) 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/SCAudioManager.h: -------------------------------------------------------------------------------- 1 | // 2 | // SCAudioRecorder.h 3 | // soundcard 4 | // 5 | // Created by Dennis Schmidt on 27.09.13. 6 | // Copyright (c) 2013 soundcard.io. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @class SCAudioManager; 13 | 14 | @protocol RecordingDelegate 15 | - (void)audioManager:(SCAudioManager *)manager didAllowRecording:(BOOL)flag; 16 | - (void)audioManager:(SCAudioManager *)manager didFinishRecordingSuccessfully:(BOOL)flag; 17 | - (void)audioManager:(SCAudioManager *)manager didUpdateRecordProgress:(CGFloat)progress; 18 | @end 19 | 20 | @protocol PlaybackDelegate 21 | - (void)audioManager:(SCAudioManager *)manager didFinishPlayingSuccessfully:(BOOL)flag; 22 | - (void)audioManager:(SCAudioManager *)manager didUpdatePlayProgress:(CGFloat)progress; 23 | @end 24 | 25 | @interface SCAudioManager : NSObject 26 | @property(nonatomic, weak) id recordingDelegate; 27 | @property(nonatomic, weak) id playbackDelegate; 28 | @property(nonatomic) NSTimeInterval currentRecordingTime; 29 | 30 | - (NSURL *)recordingsFolderURL; 31 | - (NSURL *)recordedAudioFileURL; 32 | - (NSURL *)downloadedAudioFileURL; 33 | 34 | - (void)prepareAudioRecording; 35 | 36 | - (BOOL)recording; 37 | - (void)startRecording; 38 | - (void)stopRecording; 39 | - (BOOL)hasCapturedSufficientAudioLength; 40 | - (void)setRecordingToBeSentAgainFromAudioAtURL:(NSURL *)audioURL; 41 | 42 | - (float)lastAveragePower; 43 | 44 | - (BOOL)playing; 45 | - (void)playDownloadedAudio; 46 | - (void)startPlayingRecordedAudio; 47 | - (void)playAudioFileFromURL:(NSURL *)audioURL; 48 | - (void)stopPlayingRecordedAudio; 49 | - (void)reset; 50 | @end 51 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/SCAudioManager.m: -------------------------------------------------------------------------------- 1 | // 2 | // SCAudioManager.m 3 | // soundcard 4 | // 5 | // Created by Dennis Schmidt on 27.09.13. 6 | // Copyright (c) 2013 soundcard.io. All rights reserved. 7 | // 8 | 9 | #import "SCAudioManager.h" 10 | 11 | @interface SCAudioManager () 12 | @property(nonatomic, strong) AVAudioRecorder *recorder; 13 | @property(nonatomic, strong) AVAudioPlayer *player; 14 | @property(nonatomic, strong) NSTimer *updateProgressIndicatorTimer; 15 | @property(nonatomic, strong) NSString *currentRecordedAudioFilename; 16 | @end 17 | 18 | @implementation SCAudioManager 19 | 20 | static const NSTimeInterval kMinRecordingTime = 0.3; 21 | static const NSTimeInterval kMaxRecordingTime = 90.0; 22 | static NSString const *kSCTemporaryRecordedAudioFilename = @"audio_temp.m4a"; 23 | static NSString const *kSCDownloadedAudioFilename = @"loaded_sound.m4a"; 24 | static NSString const *kSCRecordingsFolderName = @"recordings"; 25 | 26 | #pragma mark - 27 | #pragma mark Public Interface 28 | #pragma mark Helper methods 29 | 30 | - (NSURL *)recordingsFolderURL { 31 | NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject]; 32 | NSArray *pathComponents = [NSArray arrayWithObjects:documentsDirectory, kSCRecordingsFolderName, nil]; 33 | return [NSURL fileURLWithPathComponents:pathComponents]; 34 | } 35 | 36 | - (NSURL *)recordedAudioFileURL { 37 | NSArray *pathComponents = [NSArray arrayWithObjects:[[self recordingsFolderURL] path], self.currentRecordedAudioFilename, nil]; 38 | return [NSURL fileURLWithPathComponents:pathComponents]; 39 | } 40 | 41 | - (NSURL *)downloadedAudioFileURL { 42 | NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject]; 43 | NSArray *pathComponents = [NSArray arrayWithObjects:documentsDirectory, kSCDownloadedAudioFilename, nil]; 44 | return [NSURL fileURLWithPathComponents:pathComponents]; 45 | } 46 | 47 | #pragma mark Audio Recording methods 48 | 49 | - (BOOL)recording { 50 | return self.recorder.isRecording; 51 | } 52 | 53 | - (void)startRecording { 54 | if (!self.recorder.isRecording) { 55 | // Stop the audio player before recording 56 | if (self.player.playing) { 57 | [self.player stop]; 58 | [self.updateProgressIndicatorTimer invalidate]; 59 | } 60 | 61 | AVAudioSession *session = [AVAudioSession sharedInstance]; 62 | [session setActive:YES error:nil]; 63 | 64 | // Start recording 65 | self.currentRecordingTime = 0.0; 66 | [self.recorder record]; 67 | [self.updateProgressIndicatorTimer invalidate]; 68 | self.updateProgressIndicatorTimer = [NSTimer scheduledTimerWithTimeInterval:.01 target:self selector:@selector(recordingStatusDidUpdate) userInfo:nil repeats:YES]; 69 | } 70 | } 71 | 72 | - (float)lastAveragePower { 73 | return [self.recorder averagePowerForChannel:0]; 74 | } 75 | 76 | - (void)stopRecording { 77 | if (self.recorder.isRecording) { 78 | [self.recorder stop]; 79 | 80 | AVAudioSession *audioSession = [AVAudioSession sharedInstance]; 81 | [audioSession setActive:NO error:nil]; 82 | 83 | [self.updateProgressIndicatorTimer invalidate]; 84 | } 85 | } 86 | 87 | - (void)reset { 88 | [self.player stop]; 89 | [self stopRecording]; 90 | [self.recorder prepareToRecord]; 91 | self.currentRecordingTime = 0.0; 92 | } 93 | 94 | - (void)setRecordingToBeSentAgainFromAudioAtURL:(NSURL *)audioURL { 95 | self.currentRecordingTime = kMinRecordingTime + 1; // just something to say we captured enough 96 | [self copyTemporaryAudioFileToPersistentLocation: audioURL]; 97 | [self.recordingDelegate audioManager:self didFinishRecordingSuccessfully:YES]; 98 | } 99 | 100 | #pragma mark - 101 | #pragma mark Audio Recording / Playback Feedback methods 102 | 103 | - (void)recordingStatusDidUpdate { 104 | self.currentRecordingTime = self.recorder.currentTime; 105 | CGFloat progress = fmax(0, fmin(1, self.currentRecordingTime / kMaxRecordingTime)); 106 | 107 | [self.recorder updateMeters]; 108 | [self.recordingDelegate audioManager:self didUpdateRecordProgress:progress]; 109 | 110 | if(progress >= 1.0) { 111 | [self stopRecording]; 112 | } 113 | } 114 | 115 | - (void)playbackStatusDidUpdate { 116 | CGFloat currentPlayTime = (CGFloat) self.player.currentTime / (CGFloat) self.player.duration; 117 | CGFloat progress = fmax(0, fmin(1, currentPlayTime)); 118 | [self.playbackDelegate audioManager:self didUpdatePlayProgress:progress]; 119 | } 120 | 121 | - (BOOL)hasCapturedSufficientAudioLength { 122 | return self.currentRecordingTime > kMinRecordingTime; 123 | } 124 | 125 | #pragma mark - 126 | #pragma mark Audio Playback methods 127 | 128 | - (void)playAudioFileFromURL:(NSURL *)audioURL { 129 | [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil]; 130 | UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker; 131 | 132 | #pragma clang diagnostic push 133 | #pragma clang diagnostic ignored "-Wdeprecated-declarations" 134 | AudioSessionSetProperty(kAudioSessionProperty_OverrideAudioRoute, sizeof(audioRouteOverride), &audioRouteOverride); 135 | #pragma clang diagnostic pop 136 | 137 | if (!self.recorder.recording) { 138 | [self.updateProgressIndicatorTimer invalidate]; 139 | self.updateProgressIndicatorTimer = [NSTimer scheduledTimerWithTimeInterval:.01 target:self selector:@selector(playbackStatusDidUpdate) userInfo:nil repeats:YES]; 140 | 141 | self.player = [[AVAudioPlayer alloc] initWithContentsOfURL:audioURL error:nil]; 142 | [self.player setDelegate:self]; 143 | [self.player play]; 144 | } 145 | } 146 | 147 | - (BOOL)playing { 148 | return self.player.playing; 149 | } 150 | 151 | - (void)startPlayingRecordedAudio { 152 | [self playAudioFileFromURL:self.recordedAudioFileURL]; 153 | } 154 | 155 | - (void)stopPlayingRecordedAudio { 156 | if([self.player isPlaying]) { 157 | [self.player stop]; 158 | [self.updateProgressIndicatorTimer invalidate]; 159 | [self.playbackDelegate audioManager:self didFinishPlayingSuccessfully:NO]; 160 | } 161 | } 162 | 163 | - (void)playDownloadedAudio { 164 | [self playAudioFileFromURL:self.downloadedAudioFileURL]; 165 | } 166 | 167 | #pragma mark - 168 | #pragma mark AVAudioRecorderDelegate methods 169 | 170 | - (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)avrecorder successfully:(BOOL)flag { 171 | [self.updateProgressIndicatorTimer invalidate]; 172 | 173 | if([self hasCapturedSufficientAudioLength]) { 174 | [self copyTemporaryAudioFileToPersistentLocation:[self temporaryRecordedAudioFileURL]]; 175 | } 176 | 177 | [self.recordingDelegate audioManager:self didFinishRecordingSuccessfully:flag]; 178 | } 179 | 180 | #pragma mark - 181 | #pragma mark AVAudioPlayerDelegate methods 182 | 183 | - (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag { 184 | [self.updateProgressIndicatorTimer invalidate]; 185 | 186 | [self.playbackDelegate audioManager:self didFinishPlayingSuccessfully:flag]; 187 | } 188 | 189 | #pragma mark - 190 | #pragma mark Private methods 191 | 192 | - (NSURL *)temporaryRecordedAudioFileURL { 193 | NSString *homeDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject]; 194 | NSArray *pathComponents = [NSArray arrayWithObjects:homeDirectory, kSCTemporaryRecordedAudioFilename, nil]; 195 | return [NSURL fileURLWithPathComponents:pathComponents]; 196 | } 197 | 198 | - (void)prepareAudioRecording { 199 | // Set the temporary audio file 200 | NSURL *outputFileURL = [self temporaryRecordedAudioFileURL]; 201 | 202 | // Setup audio session 203 | AVAudioSession *session = [AVAudioSession sharedInstance]; 204 | [session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil]; 205 | 206 | // Define the recorder setting 207 | NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init]; 208 | 209 | [recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey]; 210 | [recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey]; 211 | [recordSetting setValue:[NSNumber numberWithInt:1] forKey:AVNumberOfChannelsKey]; 212 | 213 | // Initiate and prepare the recorder 214 | self.recorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL]; 215 | self.recorder.delegate = self; 216 | self.recorder.meteringEnabled = YES; 217 | 218 | [session requestRecordPermission:^(BOOL granted) { 219 | [self.recordingDelegate audioManager:self didAllowRecording:granted]; 220 | [self.recorder prepareToRecord]; 221 | }]; 222 | } 223 | 224 | - (void)copyTemporaryAudioFileToPersistentLocation:(NSURL *)audioURL { 225 | self.currentRecordedAudioFilename = [NSString stringWithFormat:@"%@.m4a", [[NSUUID UUID] UUIDString]]; 226 | NSData *recordedAudioData = [NSData dataWithContentsOfURL:audioURL]; 227 | 228 | [[NSFileManager defaultManager] createDirectoryAtPath:[[self recordingsFolderURL] path] withIntermediateDirectories:YES attributes:nil error:nil]; 229 | [recordedAudioData writeToURL:[self recordedAudioFileURL] atomically:YES]; 230 | NSLog(@"new audio file recorded to %@", [self recordedAudioFileURL]); 231 | } 232 | 233 | @end 234 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/SceneDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SceneDelegate.swift 3 | // DSWaveformImageExample-iOS 4 | // 5 | // Created by Dennis Schmidt on 27.09.22. 6 | // 7 | 8 | import UIKit 9 | 10 | class SceneDelegate: UIResponder, UIWindowSceneDelegate { 11 | 12 | var window: UIWindow? 13 | 14 | 15 | func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { 16 | // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. 17 | // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. 18 | // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). 19 | guard let _ = (scene as? UIWindowScene) else { return } 20 | } 21 | 22 | func sceneDidDisconnect(_ scene: UIScene) { 23 | // Called as the scene is being released by the system. 24 | // This occurs shortly after the scene enters the background, or when its session is discarded. 25 | // Release any resources associated with this scene that can be re-created the next time the scene connects. 26 | // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). 27 | } 28 | 29 | func sceneDidBecomeActive(_ scene: UIScene) { 30 | // Called when the scene has moved from an inactive state to an active state. 31 | // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. 32 | } 33 | 34 | func sceneWillResignActive(_ scene: UIScene) { 35 | // Called when the scene will move from an active state to an inactive state. 36 | // This may occur due to temporary interruptions (ex. an incoming phone call). 37 | } 38 | 39 | func sceneWillEnterForeground(_ scene: UIScene) { 40 | // Called as the scene transitions from the background to the foreground. 41 | // Use this method to undo the changes made on entering the background. 42 | } 43 | 44 | func sceneDidEnterBackground(_ scene: UIScene) { 45 | // Called as the scene transitions from the foreground to the background. 46 | // Use this method to save data, release shared resources, and store enough scene-specific state information 47 | // to restore the scene back to its current state. 48 | } 49 | 50 | 51 | } 52 | 53 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/SwiftUIExample/ProgressWaveformView.swift: -------------------------------------------------------------------------------- 1 | import DSWaveformImage 2 | import DSWaveformImageViews 3 | import SwiftUI 4 | 5 | struct ProgressWaveformView: View { 6 | let audioURL: URL 7 | let progress: Double 8 | 9 | var body: some View { 10 | GeometryReader { geometry in 11 | WaveformView(audioURL: audioURL) { shape in 12 | shape.fill(.white) 13 | shape.fill(.red).mask(alignment: .leading) { 14 | Rectangle().frame(width: geometry.size.width * progress) 15 | } 16 | } 17 | } 18 | } 19 | } 20 | 21 | struct ProgressExampleView: View { 22 | private let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")! 23 | @State private var progress: Double = .random(in: 0...1) 24 | 25 | var body: some View { 26 | VStack { 27 | ProgressWaveformView(audioURL: audioURL, progress: progress) 28 | 29 | Button(action: { withAnimation { progress = .random(in: 0...1) }}) { 30 | Label("Progress", systemImage: "dice.fill") 31 | }.buttonStyle(.borderedProminent) 32 | } 33 | .background(Color(.systemYellow).ignoresSafeArea()) 34 | } 35 | } 36 | 37 | struct ProgressExampleView_Previews: PreviewProvider { 38 | static var previews: some View { 39 | ProgressExampleView() 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/SwiftUIExample/RecordingIndicatorView.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | import DSWaveformImage 3 | import DSWaveformImageViews 4 | 5 | struct RecordingIndicatorView: View { 6 | let samples: [Float] 7 | let duration: TimeInterval 8 | let shouldDrawSilence: Bool 9 | 10 | @Binding var isRecording: Bool 11 | 12 | @State var configuration: Waveform.Configuration = .init( 13 | style: .striped(.init(color: .systemGray, width: 3, spacing: 3)), 14 | damping: .init() 15 | ) 16 | 17 | static let timeFormatter: DateComponentsFormatter = { 18 | let formatter = DateComponentsFormatter() 19 | formatter.allowedUnits = [.minute, .second] 20 | formatter.zeroFormattingBehavior = .pad 21 | return formatter 22 | }() 23 | 24 | var body: some View { 25 | HStack { 26 | WaveformLiveCanvas(samples: samples, configuration: configuration, shouldDrawSilencePadding: shouldDrawSilence) 27 | .padding(.vertical, 2) 28 | 29 | Text(Self.timeFormatter.string(from: duration) ?? "00:00") 30 | .font(.subheadline) 31 | .monospacedDigit() 32 | .foregroundColor(Color(.systemGray)) 33 | 34 | Button(action: { isRecording.toggle() }) { 35 | Image(systemName: isRecording ? "stop.circle" : "record.circle") 36 | .resizable() 37 | .scaledToFit() 38 | } 39 | .padding(.vertical, 4) 40 | .padding(.trailing) 41 | .foregroundColor(Color(.systemRed)) 42 | } 43 | .background(Color(.systemGray6)) 44 | .cornerRadius(10) 45 | .frame(height: 32) 46 | } 47 | } 48 | 49 | #if DEBUG 50 | struct RecordingIndicatorView_Previews: PreviewProvider { 51 | static var previews: some View { 52 | RecordingIndicatorView(samples: [], duration: 120, shouldDrawSilence: true, isRecording: .constant(true)) 53 | } 54 | } 55 | #endif 56 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/SwiftUIExample/StaticWaveformRenderer.swift: -------------------------------------------------------------------------------- 1 | // 2 | // StaticWaveformRenderer.swift 3 | // DSWaveformImageExample-iOS 4 | // 5 | // Created by Dennis Schmidt on 16.01.24. 6 | // 7 | 8 | import Foundation 9 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/SwiftUIExample/SwiftUIExample.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | import SwiftUI 4 | 5 | // This wrapper only exists to connect InterfaceBuilder & SwiftUI. 6 | class SwiftUIExampleViewController: UIHostingController { 7 | @MainActor @objc required dynamic init?(coder aDecoder: NSCoder) { 8 | super.init(coder: aDecoder, rootView: SwiftUIExampleView()) 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/SwiftUIExample/SwiftUIExampleView.swift: -------------------------------------------------------------------------------- 1 | import DSWaveformImage 2 | import DSWaveformImageViews 3 | import SwiftUI 4 | 5 | struct SwiftUIExampleView: View { 6 | private enum ActiveTab: Hashable { 7 | case recorder, shape, overview 8 | } 9 | 10 | private static let colors = [UIColor.systemPink, UIColor.systemBlue, UIColor.systemGreen] 11 | private static var randomColor: UIColor { colors.randomElement()! } 12 | 13 | private static var audioURLs: [URL?] = [ 14 | Bundle.main.url(forResource: "example_sound", withExtension: "m4a"), 15 | Bundle.main.url(forResource: "example_sound_2", withExtension: "m4a") 16 | ] 17 | private static func randomURL(_ current: URL?) -> URL? { audioURLs.filter { $0 != current }.randomElement()! } 18 | 19 | @StateObject private var audioRecorder: AudioRecorder = AudioRecorder() 20 | 21 | @State private var configuration: Waveform.Configuration = Waveform.Configuration( 22 | style: .striped(Waveform.Style.StripeConfig(color: Self.randomColor, width: 3, lineCap: .round)), 23 | verticalScalingFactor: 0.9 24 | ) 25 | 26 | @State private var liveConfiguration: Waveform.Configuration = Waveform.Configuration( 27 | style: .striped(.init(color: randomColor, width: 3, spacing: 3)) 28 | ) 29 | 30 | @State private var audioURL: URL? = audioURLs.first! 31 | @State private var samples: [Float] = [] 32 | @State private var silence: Bool = true 33 | @State private var selection: ActiveTab = .overview 34 | 35 | var body: some View { 36 | VStack { 37 | Text("SwiftUI examples") 38 | .font(.largeTitle.bold()) 39 | 40 | Picker("Hey", selection: $selection) { 41 | Text("Recorder").tag(ActiveTab.recorder) 42 | Text("Shape").tag(ActiveTab.shape) 43 | Text("Overview").tag(ActiveTab.overview) 44 | } 45 | .pickerStyle(.segmented) 46 | .padding(.horizontal) 47 | 48 | switch selection { 49 | case .recorder: recordingExample 50 | case .shape: shape 51 | case .overview: overview 52 | } 53 | } 54 | .padding(.vertical, 20) 55 | } 56 | 57 | @ViewBuilder 58 | private var recordingExample: some View { 59 | VStack { 60 | WaveformLiveCanvas( 61 | samples: audioRecorder.samples, 62 | configuration: liveConfiguration, 63 | renderer: CircularWaveformRenderer(kind: .circle), 64 | shouldDrawSilencePadding: silence 65 | ) 66 | 67 | Toggle("draw silence", isOn: $silence) 68 | .controlSize(.mini) 69 | .padding(.horizontal) 70 | 71 | RecordingIndicatorView( 72 | samples: audioRecorder.samples, 73 | duration: audioRecorder.recordingTime, 74 | shouldDrawSilence: silence, 75 | isRecording: $audioRecorder.isRecording 76 | ) 77 | .padding(.horizontal) 78 | } 79 | } 80 | 81 | @ViewBuilder 82 | private var shape: some View { 83 | VStack { 84 | Text("WaveformView").font(.monospaced(.title.bold())()) 85 | 86 | HStack { 87 | Button { 88 | configuration = configuration.with(style: .striped(Waveform.Style.StripeConfig(color: Self.randomColor, width: 3, lineCap: .round))) 89 | liveConfiguration = liveConfiguration.with(style: .striped(.init(color: Self.randomColor, width: 3, spacing: 3))) 90 | } label: { 91 | Label("color", systemImage: "dice") 92 | .frame(maxWidth: .infinity) 93 | } 94 | .font(.body.bold()) 95 | .padding(8) 96 | .background(Color(.systemGray6)) 97 | .cornerRadius(10) 98 | 99 | Button { 100 | audioURL = Self.randomURL(audioURL) 101 | print("will draw \(audioURL!)") 102 | } label: { 103 | Label("waveform", systemImage: "dice") 104 | .frame(maxWidth: .infinity) 105 | } 106 | .font(.body.bold()) 107 | .padding(8) 108 | .background(Color(.systemGray6)) 109 | .cornerRadius(10) 110 | } 111 | .padding(.horizontal) 112 | 113 | // the if let is left here intentionally to illustrate how to deal with optional URLs 114 | // as this was asked in an older GitHub issue 115 | if let audioURL { 116 | WaveformView(audioURL: audioURL, configuration: configuration) 117 | 118 | WaveformView( 119 | audioURL: audioURL, 120 | configuration: configuration, 121 | renderer: CircularWaveformRenderer(kind: .ring(0.7)) 122 | ) { shape in 123 | // you may completely override the shape styling this way 124 | shape 125 | .stroke( 126 | LinearGradient(colors: [.red, Color(Self.randomColor)], startPoint: .zero, endPoint: .topTrailing), 127 | style: StrokeStyle(lineWidth: 3, lineCap: .round)) 128 | } 129 | 130 | Divider() 131 | Text("WaveformShape").font(.monospaced(.title.bold())()) 132 | 133 | /// **Note:** It's possible, but discouraged to use WaveformShape directly. 134 | /// As Shapes should not do any expensive computations, the analyzing should happen outside, 135 | /// hence making the API a tiny bit clumsy if used directly, since we do require to know the size, 136 | /// even though the Shape of course intrinsically knows its size already. 137 | GeometryReader { geometry in 138 | WaveformShape(samples: samples) 139 | .fill(Color.orange) 140 | .task { 141 | do { 142 | let samplesNeeded = Int(geometry.size.width * configuration.scale) 143 | let samples = try await WaveformAnalyzer().samples(fromAudioAt: audioURL, count: samplesNeeded) 144 | await MainActor.run { self.samples = samples } 145 | } catch { 146 | assertionFailure(error.localizedDescription) 147 | } 148 | } 149 | } 150 | } 151 | } 152 | } 153 | 154 | @ViewBuilder 155 | private var overview: some View { 156 | if let audioURL { 157 | HStack { 158 | VStack { 159 | WaveformView(audioURL: audioURL, configuration: .init(style: .filled(.red))) 160 | WaveformView(audioURL: audioURL, configuration: .init(style: .outlined(.blue, 0.5))) 161 | WaveformView(audioURL: audioURL, configuration: .init(style: .gradient([.yellow, .orange]))) 162 | WaveformView(audioURL: audioURL, configuration: .init(style: .gradientOutlined([.yellow, .orange], 1))) 163 | WaveformView(audioURL: audioURL, configuration: .init(style: .striped(.init(color: .red, width: 2, spacing: 1)))) 164 | 165 | WaveformView(audioURL: audioURL, configuration: .init(style: .striped(.init(color: .black)))) { shape in 166 | shape // override the shape styling 167 | .stroke(LinearGradient(colors: [.blue, .pink], startPoint: .bottom, endPoint: .top), lineWidth: 3) 168 | } placeholder: { 169 | ProgressView() 170 | } 171 | } 172 | 173 | VStack { 174 | WaveformView(audioURL: audioURL, configuration: .init(style: .filled(.red)), renderer: CircularWaveformRenderer()) 175 | WaveformView(audioURL: audioURL, configuration: .init(style: .outlined(.blue, 0.5)), renderer: CircularWaveformRenderer()) 176 | WaveformView(audioURL: audioURL, configuration: .init(style: .gradient([.yellow, .orange])), renderer: CircularWaveformRenderer()) 177 | WaveformView(audioURL: audioURL, configuration: .init(style: .gradientOutlined([.yellow, .orange], 1)), renderer: CircularWaveformRenderer()) 178 | WaveformView(audioURL: audioURL, configuration: .init(style: .striped(.init(color: .red, width: 2, spacing: 2))), renderer: CircularWaveformRenderer()) 179 | 180 | WaveformView(audioURL: audioURL, configuration: .init(style: .striped(.init(color: .black))), renderer: CircularWaveformRenderer()) { shape in 181 | shape // override the shape styling 182 | .stroke(LinearGradient(colors: [.blue, .pink], startPoint: .bottom, endPoint: .top), lineWidth: 3) 183 | } placeholder: { 184 | ProgressView() 185 | } 186 | } 187 | 188 | VStack { 189 | WaveformView(audioURL: audioURL, configuration: .init(style: .filled(.red)), renderer: CircularWaveformRenderer(kind: .ring(0.5))) 190 | WaveformView(audioURL: audioURL, configuration: .init(style: .outlined(.blue, 0.5)), renderer: CircularWaveformRenderer(kind: .ring(0.5))) 191 | WaveformView(audioURL: audioURL, configuration: .init(style: .gradient([.yellow, .orange])), renderer: CircularWaveformRenderer(kind: .ring(0.5))) 192 | WaveformView(audioURL: audioURL, configuration: .init(style: .gradientOutlined([.yellow, .orange], 1)), renderer: CircularWaveformRenderer(kind: .ring(0.5))) 193 | WaveformView(audioURL: audioURL, configuration: .init(style: .striped(.init(color: .red, width: 2, spacing: 2))), renderer: CircularWaveformRenderer(kind: .ring(0.5))) 194 | 195 | WaveformView(audioURL: audioURL, configuration: .init(style: .striped(.init(color: .black))), renderer: CircularWaveformRenderer(kind: .ring(0.5))) { shape in 196 | shape // override the shape styling 197 | .stroke(LinearGradient(colors: [.blue, .pink], startPoint: .bottom, endPoint: .top), lineWidth: 3) 198 | } placeholder: { 199 | ProgressView() 200 | } 201 | } 202 | } 203 | } 204 | } 205 | } 206 | 207 | struct SwiftUIExampleView_Previews: PreviewProvider { 208 | static var previews: some View { 209 | SwiftUIExampleView() 210 | } 211 | } 212 | 213 | private class AudioRecorder: NSObject, ObservableObject, RecordingDelegate { 214 | @Published var samples: [Float] = [] 215 | @Published var recordingTime: TimeInterval = 0 216 | @Published var isRecording: Bool = false { 217 | didSet { 218 | guard oldValue != isRecording else { return } 219 | isRecording ? startRecording() : stopRecording() 220 | } 221 | } 222 | 223 | private let audioManager: SCAudioManager 224 | 225 | override init() { 226 | audioManager = SCAudioManager() 227 | 228 | super.init() 229 | 230 | audioManager.prepareAudioRecording() 231 | audioManager.recordingDelegate = self 232 | } 233 | 234 | func startRecording() { 235 | samples = [] 236 | audioManager.startRecording() 237 | isRecording = true 238 | } 239 | 240 | func stopRecording() { 241 | audioManager.stopRecording() 242 | isRecording = false 243 | } 244 | 245 | // MARK: - RecordingDelegate 246 | 247 | func audioManager(_ manager: SCAudioManager!, didAllowRecording flag: Bool) {} 248 | 249 | func audioManager(_ manager: SCAudioManager!, didFinishRecordingSuccessfully flag: Bool) {} 250 | 251 | func audioManager(_ manager: SCAudioManager!, didUpdateRecordProgress progress: CGFloat) { 252 | let linear = 1 - pow(10, manager.lastAveragePower() / 20) 253 | 254 | // Here we add the same sample 3 times to speed up the animation. 255 | // Usually you'd just add the sample once. 256 | recordingTime = audioManager.currentRecordingTime 257 | samples += [linear, linear, linear] 258 | } 259 | } 260 | 261 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // DSWaveformImageExample 4 | // 5 | // Created by Dennis Schmidt on 06/02/2017. 6 | // Copyright © 2017 Dennis Schmidt. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import DSWaveformImage 11 | import DSWaveformImageViews 12 | 13 | class ViewController: UIViewController { 14 | @IBOutlet weak var topWaveformView: UIImageView! 15 | @IBOutlet weak var middleWaveformView: WaveformImageView! 16 | @IBOutlet weak var bottomWaveformView: UIImageView! 17 | 18 | private let waveformImageDrawer = WaveformImageDrawer() 19 | private let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")! 20 | 21 | override func viewDidAppear(_ animated: Bool) { 22 | super.viewDidAppear(animated) 23 | 24 | updateWaveformImages() 25 | 26 | Task { 27 | // get access to the raw, normalized amplitude samples 28 | let waveformAnalyzer = WaveformAnalyzer() 29 | let samples = try await waveformAnalyzer.samples(fromAudioAt: audioURL, count: 10) 30 | print("sampled down to 10, results are \(samples)") 31 | } 32 | } 33 | 34 | override func viewDidLayoutSubviews() { 35 | super.viewDidLayoutSubviews() 36 | 37 | // you might want to call updateWaveformImages() here 38 | // to adapt to view changes 39 | } 40 | 41 | private func updateWaveformImages() { 42 | Task { 43 | // always uses background thread rendering 44 | let image = try await waveformImageDrawer.waveformImage( 45 | fromAudioAt: audioURL, 46 | with: .init( 47 | size: topWaveformView.bounds.size, 48 | style: .gradient( 49 | [ 50 | UIColor(red: 255/255.0, green: 159/255.0, blue: 28/255.0, alpha: 1), 51 | UIColor(red: 255/255.0, green: 191/255.0, blue: 105/255.0, alpha: 1), 52 | UIColor.red 53 | ] 54 | ), 55 | damping: .init(percentage: 0.2, sides: .right, easing: { x in pow(x, 4) }), 56 | verticalScalingFactor: 2 57 | ), 58 | renderer: CircularWaveformRenderer() 59 | ) 60 | 61 | // need to jump back to main queue 62 | await MainActor.run { 63 | self.topWaveformView.image = image 64 | } 65 | } 66 | 67 | middleWaveformView.configuration = Waveform.Configuration( 68 | backgroundColor: .lightGray.withAlphaComponent(0.1), 69 | style: .striped(.init(color: UIColor(red: 51/255.0, green: 92/255.0, blue: 103/255.0, alpha: 1), width: 5, spacing: 5)), 70 | verticalScalingFactor: 0.5 71 | ) 72 | middleWaveformView.waveformAudioURL = audioURL 73 | 74 | Task { 75 | let image = try! await waveformImageDrawer.waveformImage(fromAudioAt: audioURL, with: bottomWaveformConfiguration, position: .top) 76 | 77 | await MainActor.run { 78 | // as an added bonus, use CALayer's compositingFilter for more elaborate image display 79 | self.bottomWaveformView.layer.compositingFilter = "overlayBlendMode" 80 | self.bottomWaveformView.image = image 81 | } 82 | } 83 | 84 | // Photo by Alexander Popov on Unsplash 85 | // https://unsplash.com/@5tep5?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText 86 | // https://unsplash.com/s/photos/techno?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText 87 | } 88 | 89 | private var bottomWaveformConfiguration: Waveform.Configuration { 90 | Waveform.Configuration( 91 | size: bottomWaveformView.bounds.size, 92 | style: .filled(.black) 93 | ) 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/example_sound.m4a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Example/DSWaveformImageExample-iOS/example_sound.m4a -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-iOS/example_sound_2.m4a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Example/DSWaveformImageExample-iOS/example_sound_2.m4a -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-macOS/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-macOS/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "mac", 5 | "scale" : "1x", 6 | "size" : "16x16" 7 | }, 8 | { 9 | "idiom" : "mac", 10 | "scale" : "2x", 11 | "size" : "16x16" 12 | }, 13 | { 14 | "idiom" : "mac", 15 | "scale" : "1x", 16 | "size" : "32x32" 17 | }, 18 | { 19 | "idiom" : "mac", 20 | "scale" : "2x", 21 | "size" : "32x32" 22 | }, 23 | { 24 | "idiom" : "mac", 25 | "scale" : "1x", 26 | "size" : "128x128" 27 | }, 28 | { 29 | "idiom" : "mac", 30 | "scale" : "2x", 31 | "size" : "128x128" 32 | }, 33 | { 34 | "idiom" : "mac", 35 | "scale" : "1x", 36 | "size" : "256x256" 37 | }, 38 | { 39 | "idiom" : "mac", 40 | "scale" : "2x", 41 | "size" : "256x256" 42 | }, 43 | { 44 | "idiom" : "mac", 45 | "scale" : "1x", 46 | "size" : "512x512" 47 | }, 48 | { 49 | "idiom" : "mac", 50 | "scale" : "2x", 51 | "size" : "512x512" 52 | } 53 | ], 54 | "info" : { 55 | "author" : "xcode", 56 | "version" : 1 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-macOS/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-macOS/ContentView.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | import DSWaveformImage 3 | import DSWaveformImageViews 4 | 5 | struct ContentView: View { 6 | private static let colors = [NSColor.systemPink, NSColor.systemBlue, NSColor.systemGreen] 7 | private static var randomColor: NSColor { 8 | colors[Int.random(in: 0.. 2 | 3 | 4 | 5 | com.apple.security.app-sandbox 6 | 7 | com.apple.security.files.user-selected.read-only 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-macOS/DSWaveformImageExample_macOSApp.swift: -------------------------------------------------------------------------------- 1 | // 2 | // DSWaveformImageExample_macOSApp.swift 3 | // DSWaveformImageExample-macOS 4 | // 5 | // Created by Dennis Schmidt on 27.09.22. 6 | // 7 | 8 | import SwiftUI 9 | 10 | @main 11 | struct DSWaveformImageExample_macOSApp: App { 12 | var body: some Scene { 13 | WindowGroup { 14 | ContentView() 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample-macOS/Preview Content/Preview Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /Example/DSWaveformImageExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Dennis Schmidt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /Package.swift: -------------------------------------------------------------------------------- 1 | // swift-tools-version:5.7 2 | // The swift-tools-version declares the minimum version of Swift required to build this package. 3 | 4 | import PackageDescription 5 | 6 | let package = Package( 7 | name: "DSWaveformImage", 8 | platforms: [ 9 | .iOS(.v15), 10 | .macOS(.v12), 11 | ], 12 | products: [ 13 | // Products define the executables and libraries a package produces, and make them visible to other packages. 14 | .library( 15 | name: "DSWaveformImage", 16 | targets: ["DSWaveformImage"]), 17 | .library( 18 | name: "DSWaveformImageViews", 19 | targets: ["DSWaveformImageViews"]), 20 | ], 21 | dependencies: [ 22 | // Dependencies declare other packages that this package depends on. 23 | // .package(url: /* package url */, from: "1.0.0"), 24 | ], 25 | targets: [ 26 | .target(name: "DSWaveformImage"), 27 | .target( 28 | name: "DSWaveformImageViews", 29 | dependencies: ["DSWaveformImage"] 30 | ), 31 | ] 32 | ) 33 | -------------------------------------------------------------------------------- /Promotion/appstore.svg: -------------------------------------------------------------------------------- 1 | 2 | Download_on_the_App_Store_Badge_US-UK_RGB_blk_4SVG_092917 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /Promotion/progress-example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Promotion/progress-example.png -------------------------------------------------------------------------------- /Promotion/recorder-example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Promotion/recorder-example.png -------------------------------------------------------------------------------- /Promotion/screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Promotion/screenshot.png -------------------------------------------------------------------------------- /Promotion/screenshot3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Promotion/screenshot3.png -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | DSWaveformImage - iOS, macOS & visionOS realtime audio waveform rendering 2 | =============== 3 | [![Swift Package Manager compatible](https://img.shields.io/badge/spm-compatible-brightgreen.svg?style=flat)](https://swift.org/package-manager) 4 | 5 | DSWaveformImage offers a native interfaces for drawing the envelope waveform of audio data 6 | in **iOS**, **iPadOS**, **macOS**, **visionOS** or via Catalyst. To do so, you can use 7 | 8 | * [`WaveformImageView`](Sources/DSWaveformImageViews/UIKit/WaveformImageView.swift) (UIKit) / [`WaveformView`](Sources/DSWaveformImageViews/SwiftUI/WaveformView.swift) (SwiftUI) to render a static waveform from an audio file or 9 | * [`WaveformLiveView`](Sources/DSWaveformImageViews/UIKit/WaveformLiveView.swift) (UIKit) / [`WaveformLiveCanvas`](Sources/DSWaveformImageViews/SwiftUI/WaveformLiveCanvas.swift) (SwiftUI) to realtime render a waveform of live audio data (e.g. from `AVAudioRecorder`) 10 | * `WaveformImageDrawer` to generate a waveform `UIImage` from an audio file 11 | 12 | Additionally, you can get a waveform's (normalized) `[Float]` samples directly as well by 13 | creating an instance of `WaveformAnalyzer`. 14 | 15 | Example UI (included in repository) 16 | ------------ 17 | 18 | For a practical real-world example usage of a SwiftUI live audio recording waveform rendering, see [RecordingIndicatorView](Example/DSWaveformImageExample-iOS/SwiftUIExample/SwiftUIExampleView.swift). 19 | 20 | 21 | Audio Recorder Example 22 | 23 | More related iOS Controls 24 | ------------ 25 | 26 | You may also find the following iOS controls written in Swift interesting: 27 | 28 | * [SwiftColorWheel](https://github.com/dmrschmidt/SwiftColorWheel) - a delightful color picker 29 | * [QRCode](https://github.com/dmrschmidt/QRCode) - a customizable QR code generator 30 | 31 | If you really like this library (aka Sponsoring) 32 | ------------ 33 | I'm doing all this for fun and joy and because I strongly believe in the power of open source. On the off-chance though, that using my library has brought joy to you and you just feel like saying "thank you", I would smile like a 4-year old getting a huge ice cream cone, if you'd support my via one of the sponsoring buttons ☺️💕 34 | 35 | Alternatively, consider supporting me by downloading one of my side project iOS apps. If you're feeling in the mood of sending someone else a lovely gesture of appreciation, maybe check out my iOS app [💌 SoundCard](https://www.soundcard.io) to send them a real postcard with a personal audio message. Or download my ad-supported free to play game [🕹️ Snekris for iOS](https://apps.apple.com/us/app/snekris-play-like-its-1999/id6446217693). 36 | 37 |

38 | 39 | Buy Me A Coffee 40 | 41 | 42 | Play Snekris 43 |

44 | 45 | 46 | Installation 47 | ------------ 48 | 49 | * use SPM: add `https://github.com/dmrschmidt/DSWaveformImage` and set "Up to Next Major" with "14.0.0" 50 | 51 | ```swift 52 | import DSWaveformImage // for core classes to generate `UIImage` / `NSImage` directly 53 | import DSWaveformImageViews // if you want to use the native UIKit / SwiftUI views 54 | ``` 55 | 56 | Usage 57 | ----- 58 | 59 | `DSWaveformImage` provides 3 kinds of tools to use 60 | * native SwiftUI views - [SwiftUI example usage code](Example/DSWaveformImageExample-iOS/SwiftUIExample/SwiftUIExampleView.swift) 61 | * native UIKit views - [UIKit example usage code](Example/DSWaveformImageExample-iOS/ViewController.swift) 62 | * access to the raw renderes and processors 63 | 64 | The core renderes and processors as well as SwiftUI views natively support iOS & macOS, using `UIImage` & `NSImage` respectively. 65 | 66 | ### SwiftUI 67 | 68 | #### `WaveformView` - renders a one-off waveform from an audio file: 69 | 70 | ```swift 71 | @State var audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")! 72 | WaveformView(audioURL: audioURL) 73 | ``` 74 | 75 | Default styling may be overridden if you have more complex requirements: 76 | 77 | ```swift 78 | @State var audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")! 79 | WaveformView(audioURL: audioURL) { waveformShape in 80 | waveformShape 81 | .stroke(LinearGradient(colors: [.red, [.green, red, orange], startPoint: .zero, endPoint: .topTrailing), lineWidth: 3) 82 | } 83 | ``` 84 | 85 | Similar to [AsyncImage](https://developer.apple.com/documentation/swiftui/asyncimage/init(url:scale:content:placeholder:)), a placeholder can be 86 | set to show until the load and render operation completes successfully. Thanks to [@alfogrillo](https://github.com/alfogrillo)! 87 | 88 | ```swift 89 | WaveformView(audioURL: audioURL) { waveformShape in 90 | waveformShape 91 | .stroke(LinearGradient(colors: [.red, [.green, red, orange], startPoint: .zero, endPoint: .topTrailing), lineWidth: 3) 92 | } placeholder: { 93 | ProgressView() 94 | } 95 | ``` 96 | 97 | #### `WaveformLiveCanvas` - renders a live waveform from `(0...1)` normalized samples: 98 | 99 | ```swift 100 | @StateObject private var audioRecorder: AudioRecorder = AudioRecorder() // just an example 101 | WaveformLiveCanvas(samples: audioRecorder.samples) 102 | ``` 103 | 104 | ### UIKit 105 | 106 | #### `WaveformImageView` - renders a one-off waveform from an audio file: 107 | 108 | ```swift 109 | let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")! 110 | waveformImageView = WaveformImageView(frame: CGRect(x: 0, y: 0, width: 500, height: 300) 111 | waveformImageView.waveformAudioURL = audioURL 112 | ``` 113 | 114 | #### `WaveformLiveView` - renders a live waveform from `(0...1)` normalized samples: 115 | 116 | Find a full example in the [sample project's RecordingViewController](Example/DSWaveformImageExample-iOS/RecordingViewController.swift). 117 | 118 | ```swift 119 | let waveformView = WaveformLiveView() 120 | 121 | // configure and start AVAudioRecorder 122 | let recorder = AVAudioRecorder() 123 | recorder.isMeteringEnabled = true // required to get current power levels 124 | 125 | // after all the other recording (omitted for focus) setup, periodically (every 20ms or so): 126 | recorder.updateMeters() // gets the current value 127 | let currentAmplitude = 1 - pow(10, recorder.averagePower(forChannel: 0) / 20) 128 | waveformView.add(sample: currentAmplitude) 129 | ``` 130 | 131 | ### Raw API 132 | 133 | #### Configuration 134 | 135 | *Note:* Calculations are always performed and returned on a background thread, so make sure to return to the main thread before doing any UI work. 136 | 137 | Check `Waveform.Configuration` in [WaveformImageTypes](./Sources/DSWaveformImage/WaveformImageTypes.swift) for various configuration options. 138 | 139 | #### `WaveformImageDrawer` - creates a `UIImage` waveform from an audio file: 140 | 141 | ```swift 142 | let waveformImageDrawer = WaveformImageDrawer() 143 | let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")! 144 | let image = try await waveformImageDrawer.waveformImage( 145 | fromAudioAt: audioURL, 146 | with: .init(size: topWaveformView.bounds.size, style: .filled(UIColor.black)), 147 | renderer: LinearWaveformRenderer() 148 | ) 149 | 150 | // need to jump back to main queue 151 | DispatchQueue.main.async { 152 | self.topWaveformView.image = image 153 | } 154 | ``` 155 | 156 | #### `WaveformAnalyzer` - calculates an audio file's waveform sample: 157 | 158 | ```swift 159 | let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")! 160 | waveformAnalyzer = WaveformAnalyzer() 161 | let samples = try await waveformAnalyzer.samples(fromAudioAt: audioURL, count: 200) 162 | print("samples: \(samples)") 163 | ``` 164 | 165 | ### Playback Progress Indication 166 | 167 | If you're playing back audio files and would like to indicate the playback progress to your users, you can [find inspiration in the example app](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Example/DSWaveformImageExample-iOS/ProgressViewController.swift). UIKit and [SwiftUI](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Example/DSWaveformImageExample-iOS/SwiftUIExample/ProgressWaveformView.swift) examples are provided. 168 | 169 | Both approaches will result in something like the image below. 170 | 171 |
172 | playback progress waveform 173 |
174 | 175 | 176 | There is currently no plan to integrate this as a 1st class citizen to the library itself, as every app will have different design requirements, and `WaveformImageDrawer` as well as `WaveformAnalyzer` are as simple to use as the views themselves as you can see in the examples. 177 | 178 | ### Loading remote audio files from URL 179 | 180 | For one example way to display waveforms for audio files on remote URLs see https://github.com/dmrschmidt/DSWaveformImage/issues/22. 181 | 182 | What it looks like 183 | ------------------ 184 | 185 | Waveforms can be rendered in 2 different ways and 5 different styles each. 186 | 187 | By default [`LinearWaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/LinearWaveformRenderer.swift) is used, which draws a linear 2D amplitude envelope. 188 | 189 | [`CircularWaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/CircularWaveformRenderer.swift) is available as an alternative, which can be passed in to the `WaveformView` or `WaveformLiveView` respectively. It draws a circular 190 | 2D amplitude envelope. 191 | 192 | You can implement your own renderer by implementing [`WaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/WaveformRenderer.swift). 193 | 194 | The following styles can be applied to either renderer: 195 | - **filled**: Use solid color for the waveform. 196 | - **outlined**: Draws the envelope as an outline with the provided thickness. 197 | - **gradient**: Use gradient based on color for the waveform. 198 | - **gradientOutlined**: Use gradient based on color for the waveform. Draws the envelope as an outline with the provided thickness. 199 | - **striped**: Use striped filling based on color for the waveform. 200 | 201 |
202 | Screenshot 203 |
204 | 205 | 206 | ### Live waveform rendering 207 | https://user-images.githubusercontent.com/69365/127739821-061a4345-0adc-4cc1-bfd6-f7cfbe1268c9.mov 208 | 209 | 210 | Migration 211 | --------- 212 | ### In 14.0.0 213 | * Minimum iOS Deployment target is 15.0, macOS is 12.0 to remove internal usage of deprecated APIs 214 | * `WaveformAnalyzer` and `WaveformImageDrawer` now return `Result<[Float] | DSImage, Error>` when used with completionHandler for better error handling 215 | * `WaveformAnalyzer` is now stateless and requires the URL in `.samples(fromAudioAt:count:qos:)` instead of its constructor 216 | * SwiftUI's `WaveformView` has a new constructor that provides optional access to the underlying `WaveformShape`, which is now used for rendering, see [#78](https://github.com/dmrschmidt/DSWaveformImage/issues/78) 217 | 218 | ### In 13.0.0 219 | * Any mentions of `dampening` & similar were corrected to `damping` etc in [11460b8b](https://github.com/dmrschmidt/DSWaveformImage/commit/11460b8b8203f163868ba774d1533116d2fe68a1). Most notably in `Waveform.Configuration`. See [#64](https://github.com/dmrschmidt/DSWaveformImage/issues/64). 220 | * styles `.outlined` & `.gradientOutlined` were added to `Waveform.Style`, see https://github.com/dmrschmidt/DSWaveformImage#what-it-looks-like 221 | * `Waveform.Position` was removed. If you were using it to place the view somewhere, move this responsibility up to its parent for positioning, like with any other view as well. 222 | 223 | ### In 12.0.0 224 | * The rendering pipeline was split out from the analysis. You can now create your own renderes by subclassing [`WaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/WaveformRenderer.swift). 225 | * A new [`CircularWaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/CircularWaveformRenderer.swift) has been added. 226 | * `position` was removed from `Waveform.Configuration`, see [0447737](https://github.com/dmrschmidt/DSWaveformImage/commit/044773782092becec0424527f6feef061988db7a). 227 | * new `Waveform.Style` option have been added and need to be accounted for in `switch` statements etc. 228 | 229 | ### In 11.0.0 230 | the library was split into two: `DSWaveformImage` and `DSWaveformImageViews`. If you've used any of the native views bevore, just add the additional `import DSWaveformImageViews`. 231 | The SwiftUI views have changed from taking a Binding to the respective plain values instead. 232 | 233 | ### In 9.0.0 234 | a few public API's have been slightly changed to be more concise. All types have also been grouped under the `Waveform` enum-namespace. Meaning `WaveformConfiguration` for instance has become `Waveform.Configuration` and so on. 235 | 236 | ### In 7.0.0 237 | colors have moved into associated values on the respective `style` enum. 238 | 239 | `Waveform` and the `UIImage` category have been removed in 6.0.0 to simplify the API. 240 | See `Usage` for current usage. 241 | 242 | ## See it live in action 243 | 244 | [SoundCard - postcards with sound](https://www.soundcard.io) lets you send real, physical postcards with audio messages. Right from your iOS device. 245 | 246 | DSWaveformImage is used to draw the waveforms of the audio messages that get printed on the postcards sent by [SoundCard - postcards with audio](https://www.soundcard.io). 247 | 248 |   249 | 250 | 257 | 258 |   259 | 260 | 261 | Screenshot 262 | 263 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/Renderers/CircularWaveformRenderer.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import CoreGraphics 3 | 4 | /** 5 | Draws a circular 2D amplitude envelope of the samples provided. 6 | 7 | Draws either a filled circle, or a hollow ring, depending on the provided `Kind`. Defaults to drawing a `.circle`. 8 | `Kind.ring` is currently experimental. 9 | Can be customized further via the configuration `Waveform.Style`. 10 | */ 11 | 12 | public struct CircularWaveformRenderer: WaveformRenderer { 13 | public enum Kind: Sendable { 14 | /// Draws waveform as a circular amplitude envelope. 15 | case circle 16 | 17 | /// **Experimental!** (Will) draw waveform as a ring-shaped amplitude envelope. 18 | /// Associated value will define the percentage of desired "hollowness" inside, or in other words the ring's thickness / diameter in relation to the overall diameter. 19 | case ring(CGFloat) 20 | } 21 | 22 | private let kind: Kind 23 | 24 | public init(kind: Kind = .circle) { 25 | self.kind = kind 26 | } 27 | 28 | public func path(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) -> CGPath { 29 | switch kind { 30 | case .circle: return circlePath(samples: samples, with: configuration, lastOffset: lastOffset, position: position) 31 | case .ring: return ringPath(samples: samples, with: configuration, lastOffset: lastOffset, position: position) 32 | } 33 | } 34 | 35 | public func render(samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) { 36 | let path = path(samples: samples, with: configuration, lastOffset: lastOffset) 37 | context.addPath(path) 38 | 39 | style(context: context, with: configuration) 40 | } 41 | 42 | func style(context: CGContext, with configuration: Waveform.Configuration) { 43 | if case let .gradient(colors) = configuration.style { 44 | context.clip() 45 | let colors = NSArray(array: colors.map { (color: DSColor) -> CGColor in color.cgColor }) as CFArray 46 | let colorSpace = CGColorSpaceCreateDeviceRGB() 47 | let gradient = CGGradient(colorsSpace: colorSpace, colors: colors, locations: nil)! 48 | context.drawLinearGradient(gradient, 49 | start: CGPoint(x: 0, y: 0), 50 | end: CGPoint(x: 0, y: configuration.size.height), 51 | options: .drawsAfterEndLocation) 52 | } else { 53 | defaultStyle(context: context, with: configuration) 54 | } 55 | } 56 | 57 | private func circlePath(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position) -> CGPath { 58 | let graphRect = CGRect(origin: .zero, size: configuration.size) 59 | let maxRadius = CGFloat(min(graphRect.maxX, graphRect.maxY) / 2.0) * configuration.verticalScalingFactor 60 | let center = CGPoint( 61 | x: graphRect.maxX * position.offset(), 62 | y: graphRect.maxY * position.offset() 63 | ) 64 | let path = CGMutablePath() 65 | 66 | path.move(to: center) 67 | 68 | for (index, sample) in samples.enumerated() { 69 | let angle = CGFloat.pi * 2 * (CGFloat(index) / CGFloat(samples.count)) 70 | let x = index + lastOffset 71 | 72 | if case .striped = configuration.style, x % Int(configuration.scale) != 0 || x % stripeBucket(configuration) != 0 { 73 | // skip sub-pixels - any x value not scale aligned 74 | // skip any point that is not a multiple of our bucket width (width + spacing) 75 | path.addLine(to: center) 76 | continue 77 | } 78 | 79 | let invertedDbSample = 1 - CGFloat(sample) // sample is in dB, linearly normalized to [0, 1] (1 -> -50 dB) 80 | let pointOnCircle = CGPoint( 81 | x: center.x + maxRadius * invertedDbSample * cos(angle), 82 | y: center.y + maxRadius * invertedDbSample * sin(angle) 83 | ) 84 | 85 | path.addLine(to: pointOnCircle) 86 | } 87 | 88 | path.closeSubpath() 89 | return path 90 | } 91 | 92 | private func ringPath(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position) -> CGPath { 93 | guard case let .ring(config) = kind else { 94 | fatalError("called with wrong kind") 95 | } 96 | 97 | let graphRect = CGRect(origin: .zero, size: configuration.size) 98 | let maxRadius = CGFloat(min(graphRect.maxX, graphRect.maxY) / 2.0) * configuration.verticalScalingFactor 99 | let innerRadius: CGFloat = maxRadius * config 100 | let center = CGPoint( 101 | x: graphRect.maxX * position.offset(), 102 | y: graphRect.maxY * position.offset() 103 | ) 104 | let path = CGMutablePath() 105 | 106 | path.move(to: CGPoint( 107 | x: center.x + innerRadius * cos(0), 108 | y: center.y + innerRadius * sin(0) 109 | )) 110 | 111 | for (index, sample) in samples.enumerated() { 112 | let x = index + lastOffset 113 | let angle = CGFloat.pi * 2 * (CGFloat(index) / CGFloat(samples.count)) 114 | 115 | if case .striped = configuration.style, x % Int(configuration.scale) != 0 || x % stripeBucket(configuration) != 0 { 116 | // skip sub-pixels - any x value not scale aligned 117 | // skip any point that is not a multiple of our bucket width (width + spacing) 118 | path.move(to: CGPoint( 119 | x: center.x + innerRadius * cos(angle), 120 | y: center.y + innerRadius * sin(angle) 121 | )) 122 | continue 123 | } 124 | 125 | let invertedDbSample = 1 - CGFloat(sample) // sample is in dB, linearly normalized to [0, 1] (1 -> -50 dB) 126 | let pointOnCircle = CGPoint( 127 | x: center.x + innerRadius * cos(angle) + (maxRadius - innerRadius) * invertedDbSample * cos(angle), 128 | y: center.y + innerRadius * sin(angle) + (maxRadius - innerRadius) * invertedDbSample * sin(angle) 129 | ) 130 | 131 | path.addLine(to: pointOnCircle) 132 | } 133 | 134 | path.closeSubpath() 135 | return path 136 | } 137 | 138 | private func stripeBucket(_ configuration: Waveform.Configuration) -> Int { 139 | if case let .striped(stripeConfig) = configuration.style { 140 | return Int(stripeConfig.width + stripeConfig.spacing) * Int(configuration.scale) 141 | } else { 142 | return 0 143 | } 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/Renderers/LinearWaveformRenderer.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import CoreGraphics 3 | 4 | /** 5 | Draws a linear 2D amplitude envelope of the samples provided. 6 | 7 | Default `WaveformRenderer` used. Can be customized further via the configuration `Waveform.Style`. 8 | */ 9 | public struct LinearWaveformRenderer: WaveformRenderer { 10 | public init() {} 11 | 12 | public func path(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) -> CGPath { 13 | let graphRect = CGRect(origin: CGPoint.zero, size: configuration.size) 14 | let positionAdjustedGraphCenter = position.offset() * graphRect.size.height 15 | var path = CGMutablePath() 16 | 17 | path.move(to: CGPoint(x: 0, y: positionAdjustedGraphCenter)) 18 | 19 | if case .striped = configuration.style { 20 | path = draw(samples: samples, path: path, with: configuration, lastOffset: lastOffset, sides: .both, position: position) 21 | } else { 22 | path = draw(samples: samples, path: path, with: configuration, lastOffset: lastOffset, sides: .up, position: position) 23 | path = draw(samples: samples.reversed(), path: path, with: configuration, lastOffset: lastOffset, sides: .down, position: position) 24 | } 25 | 26 | path.closeSubpath() 27 | return path 28 | } 29 | 30 | public func render(samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) { 31 | context.addPath(path(samples: samples, with: configuration, lastOffset: lastOffset, position: position)) 32 | defaultStyle(context: context, with: configuration) 33 | } 34 | 35 | private func stripeBucket(_ configuration: Waveform.Configuration) -> Int { 36 | if case let .striped(stripeConfig) = configuration.style { 37 | return Int(stripeConfig.width + stripeConfig.spacing) * Int(configuration.scale) 38 | } else { 39 | return 0 40 | } 41 | } 42 | 43 | enum Sides { 44 | case up, down, both 45 | } 46 | 47 | private func draw(samples: [Float], path: CGMutablePath, with configuration: Waveform.Configuration, lastOffset: Int, sides: Sides, position: Waveform.Position = .middle) -> CGMutablePath { 48 | let graphRect = CGRect(origin: CGPoint.zero, size: configuration.size) 49 | let positionAdjustedGraphCenter = position.offset() * graphRect.size.height 50 | let drawMappingFactor = graphRect.size.height * configuration.verticalScalingFactor 51 | let minimumGraphAmplitude: CGFloat = 1 / configuration.scale // we want to see at least a 1px line for silence 52 | var lastXPos: CGFloat = 0 53 | 54 | for (index, sample) in samples.enumerated() { 55 | let adjustedIndex: Int 56 | switch sides { 57 | case .up, .both: adjustedIndex = index 58 | case .down: adjustedIndex = samples.count - index 59 | } 60 | 61 | var x = adjustedIndex + lastOffset 62 | if case .striped = configuration.style, x % Int(configuration.scale) != 0 || x % stripeBucket(configuration) != 0 { 63 | // skip sub-pixels - any x value not scale aligned 64 | // skip any point that is not a multiple of our bucket width (width + spacing) 65 | continue 66 | } else if case let .striped(config) = configuration.style { 67 | // ensure 1st stripe is drawn completely inside bounds and does not clip half way on the left side 68 | x += Int(config.width / 2 * configuration.scale) 69 | } 70 | 71 | let samplesNeeded = Int(configuration.size.width * configuration.scale) 72 | let xOffset = CGFloat(samplesNeeded - samples.count) / configuration.scale // When there's extra space, draw waveform on the right 73 | let xPos = (CGFloat(x - lastOffset) / configuration.scale) + xOffset 74 | let invertedDbSample = 1 - CGFloat(sample) // sample is in dB, linearly normalized to [0, 1] (1 -> -50 dB) 75 | let drawingAmplitude = max(minimumGraphAmplitude, invertedDbSample * drawMappingFactor) 76 | let drawingAmplitudeUp = positionAdjustedGraphCenter - drawingAmplitude 77 | let drawingAmplitudeDown = positionAdjustedGraphCenter + drawingAmplitude 78 | lastXPos = xPos 79 | 80 | switch sides { 81 | case .up: 82 | path.addLine(to: CGPoint(x: xPos, y: drawingAmplitudeUp)) 83 | 84 | case .down: 85 | path.addLine(to: CGPoint(x: xPos, y: drawingAmplitudeDown)) 86 | 87 | case .both: 88 | path.move(to: CGPoint(x: xPos, y: drawingAmplitudeUp)) 89 | path.addLine(to: CGPoint(x: xPos, y: drawingAmplitudeDown)) 90 | } 91 | } 92 | 93 | if case .striped = configuration.style { 94 | path.move(to: CGPoint(x: lastXPos, y: positionAdjustedGraphCenter)) 95 | } 96 | 97 | return path 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/Renderers/WaveformRenderer.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import CoreGraphics 3 | 4 | public extension WaveformRenderer { 5 | /** 6 | Default styling. Exposed publicly simply because it is re-used internally in the included renderes. May be useful for other 7 | if other renderes would like to stick to the default behavior. 8 | */ 9 | func defaultStyle(context: CGContext, with configuration: Waveform.Configuration) { 10 | // draw pixel-perfect by default 11 | context.setLineWidth(1.0 / configuration.scale) 12 | 13 | switch configuration.style { 14 | case let .filled(color): 15 | context.setFillColor(color.cgColor) 16 | context.fillPath() 17 | 18 | case let .outlined(color, lineWidth): 19 | context.setStrokeColor(color.cgColor) 20 | context.setLineWidth(lineWidth) 21 | context.setLineCap(.round) 22 | context.strokePath() 23 | 24 | case let .striped(config): 25 | context.setLineWidth(config.width) 26 | context.setLineCap(config.lineCap) 27 | context.setStrokeColor(config.color.cgColor) 28 | context.strokePath() 29 | 30 | case let .gradient(colors): 31 | context.clip() 32 | let colors = NSArray(array: colors.map { (color: DSColor) -> CGColor in color.cgColor }) as CFArray 33 | let colorSpace = CGColorSpaceCreateDeviceRGB() 34 | let gradient = CGGradient(colorsSpace: colorSpace, colors: colors, locations: nil)! 35 | context.drawLinearGradient(gradient, 36 | start: CGPoint(x: 0, y: 0), 37 | end: CGPoint(x: 0, y: configuration.size.height), 38 | options: .drawsAfterEndLocation) 39 | 40 | case let .gradientOutlined(colors, lineWidth): 41 | context.setLineWidth(lineWidth) 42 | context.replacePathWithStrokedPath() 43 | context.setLineCap(.round) 44 | context.setLineJoin(.round) 45 | context.clip() 46 | let colors = NSArray(array: colors.map { (color: DSColor) -> CGColor in color.cgColor }) as CFArray 47 | let colorSpace = CGColorSpaceCreateDeviceRGB() 48 | let gradient = CGGradient(colorsSpace: colorSpace, colors: colors, locations: nil)! 49 | context.drawLinearGradient(gradient, 50 | start: CGPoint(x: 0, y: 0), 51 | end: CGPoint(x: 0, y: configuration.size.height), 52 | options: .drawsAfterEndLocation) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/TempiFFT.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TempiFFT.swift 3 | // TempiBeatDetection 4 | // 5 | // Created by John Scalo on 1/12/16. 6 | // Copyright © 2016 John Scalo. See accompanying License.txt for terms. 7 | 8 | /* A functional FFT built atop Apple's Accelerate framework for optimum performance on any device. In addition to simply performing the FFT and providing access to the resulting data, TempiFFT provides the ability to map the FFT spectrum data into logical bands, either linear or logarithmic, for further analysis. 9 | 10 | E.g. 11 | 12 | let fft = TempiFFT(withSize: frameSize, sampleRate: 44100) 13 | 14 | // Setting a window type reduces errors 15 | fft.windowType = TempiFFTWindowType.hanning 16 | 17 | // Perform the FFT 18 | fft.fftForward(samples) 19 | 20 | // Map FFT data to logical bands. This gives 4 bands per octave across 7 octaves = 28 bands. 21 | fft.calculateLogarithmicBands(minFrequency: 100, maxFrequency: 11025, bandsPerOctave: 4) 22 | 23 | // Process some data 24 | for i in 0.. Int { 228 | return Int(Float(self.magnitudes.count) * freq / self.nyquistFrequency) 229 | } 230 | 231 | // On arrays of 1024 elements, this is ~35x faster than an iterational algorithm. Thanks Accelerate.framework! 232 | @inline(__always) private func fastAverage(_ array:[Float], _ startIdx: Int, _ stopIdx: Int) -> Float { 233 | var mean: Float = 0 234 | array.withUnsafeBufferPointer { arrayBP in 235 | vDSP_meanv(arrayBP.baseAddress! + startIdx, 1, &mean, UInt(stopIdx - startIdx)) 236 | } 237 | 238 | return mean 239 | } 240 | 241 | @inline(__always) private func averageFrequencyInRange(_ startIndex: Int, _ endIndex: Int) -> Float { 242 | return (self.bandwidth * Float(startIndex) + self.bandwidth * Float(endIndex)) / 2 243 | } 244 | 245 | /// Get the magnitude for the specified frequency band. 246 | /// - Parameter inBand: The frequency band you want a magnitude for. 247 | func magnitudeAtBand(_ inBand: Int) -> Float { 248 | assert(hasPerformedFFT, "*** Perform the FFT first.") 249 | assert(bandMagnitudes != nil, "*** Call calculateLinearBands() or calculateLogarithmicBands() first") 250 | 251 | return bandMagnitudes[inBand] 252 | } 253 | 254 | /// Get the magnitude of the requested frequency in the spectrum. 255 | /// - Parameter inFrequency: The requested frequency. Must be less than the Nyquist frequency (```sampleRate/2```). 256 | /// - Returns: A magnitude. 257 | func magnitudeAtFrequency(_ inFrequency: Float) -> Float { 258 | assert(hasPerformedFFT, "*** Perform the FFT first.") 259 | let index = Int(floorf(inFrequency / self.bandwidth )) 260 | return self.magnitudes[index] 261 | } 262 | 263 | /// Get the middle frequency of the Nth band. 264 | /// - Parameter inBand: An index where 0 <= inBand < size / 2. 265 | /// - Returns: The middle frequency of the provided band. 266 | func frequencyAtBand(_ inBand: Int) -> Float { 267 | assert(hasPerformedFFT, "*** Perform the FFT first.") 268 | assert(bandMagnitudes != nil, "*** Call calculateLinearBands() or calculateLogarithmicBands() first") 269 | return self.bandFrequencies[inBand] 270 | } 271 | 272 | /// A convenience function that converts a linear magnitude (like those stored in ```magnitudes```) to db (which is log 10). 273 | class func toDB(_ inMagnitude: Float) -> Float { 274 | // ceil to 128db in order to avoid log10'ing 0 275 | let magnitude = max(inMagnitude, 0.000000000001) 276 | return 10 * log10f(magnitude) 277 | } 278 | } 279 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/WaveformAnalyzer.swift: -------------------------------------------------------------------------------- 1 | // 2 | // see 3 | // * http://www.davidstarke.com/2015/04/waveforms.html 4 | // * http://stackoverflow.com/questions/28626914 5 | // for very good explanations of the asset reading and processing path 6 | // 7 | // FFT done using: https://github.com/jscalo/tempi-fft 8 | // 9 | 10 | import Foundation 11 | import Accelerate 12 | import AVFoundation 13 | 14 | struct WaveformAnalysis { 15 | let amplitudes: [Float] 16 | let fft: [TempiFFT]? 17 | } 18 | 19 | /// Calculates the waveform of the initialized asset URL. 20 | public struct WaveformAnalyzer: Sendable { 21 | public enum AnalyzeError: Error { case generic, userError, emptyTracks, readerError(AVAssetReader.Status) } 22 | 23 | /// Everything below this noise floor cutoff will be clipped and interpreted as silence. Default is `-50.0`. 24 | public var noiseFloorDecibelCutoff: Float = -50.0 25 | 26 | public init() {} 27 | 28 | /// Calculates the amplitude envelope of the initialized audio asset URL, downsampled to the required `count` amount of samples. 29 | /// - Parameter fromAudioAt: local filesystem URL of the audio file to process. 30 | /// - Parameter count: amount of samples to be calculated. Downsamples. 31 | /// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on. 32 | public func samples(fromAudioAt audioAssetURL: URL, count: Int, qos: DispatchQoS.QoSClass = .userInitiated) async throws -> [Float] { 33 | try await Task(priority: taskPriority(qos: qos)) { 34 | let audioAsset = AVURLAsset(url: audioAssetURL, options: [AVURLAssetPreferPreciseDurationAndTimingKey: true]) 35 | let assetReader = try AVAssetReader(asset: audioAsset) 36 | 37 | guard let assetTrack = try await audioAsset.loadTracks(withMediaType: .audio).first else { 38 | throw AnalyzeError.emptyTracks 39 | } 40 | 41 | return try await waveformSamples(track: assetTrack, reader: assetReader, count: count, fftBands: nil).amplitudes 42 | }.value 43 | } 44 | 45 | /// Calculates the amplitude envelope of the initialized audio asset URL, downsampled to the required `count` amount of samples. 46 | /// - Parameter fromAudioAt: local filesystem URL of the audio file to process. 47 | /// - Parameter count: amount of samples to be calculated. Downsamples. 48 | /// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on. 49 | /// - Parameter completionHandler: called from a background thread. Returns the sampled result `[Float]` or `Error`. 50 | /// 51 | /// Calls the completionHandler on a background thread. 52 | @available(*, deprecated, renamed: "samples(fromAudioAt:count:qos:)") 53 | public func samples(fromAudioAt audioAssetURL: URL, count: Int, qos: DispatchQoS.QoSClass = .userInitiated, completionHandler: @escaping (Result<[Float], Error>) -> ()) { 54 | Task { 55 | do { 56 | let samples = try await samples(fromAudioAt: audioAssetURL, count: count, qos: qos) 57 | completionHandler(.success(samples)) 58 | } catch { 59 | completionHandler(.failure(error)) 60 | } 61 | } 62 | } 63 | } 64 | 65 | // MARK: - Private 66 | 67 | fileprivate extension WaveformAnalyzer { 68 | func waveformSamples( 69 | track audioAssetTrack: AVAssetTrack, 70 | reader assetReader: AVAssetReader, 71 | count requiredNumberOfSamples: Int, 72 | fftBands: Int? 73 | ) async throws -> WaveformAnalysis { 74 | guard requiredNumberOfSamples > 0 else { 75 | throw AnalyzeError.userError 76 | } 77 | 78 | let trackOutput = AVAssetReaderTrackOutput(track: audioAssetTrack, outputSettings: outputSettings()) 79 | assetReader.add(trackOutput) 80 | 81 | let totalSamples = try await totalSamples(of: audioAssetTrack) 82 | let analysis = extract(totalSamples, downsampledTo: requiredNumberOfSamples, from: assetReader, fftBands: fftBands) 83 | 84 | switch assetReader.status { 85 | case .completed: 86 | return analysis 87 | default: 88 | print("ERROR: reading waveform audio data has failed \(assetReader.status)") 89 | throw AnalyzeError.readerError(assetReader.status) 90 | } 91 | } 92 | 93 | func extract( 94 | _ totalSamples: Int, 95 | downsampledTo targetSampleCount: Int, 96 | from assetReader: AVAssetReader, 97 | fftBands: Int? 98 | ) -> WaveformAnalysis { 99 | var outputSamples = [Float]() 100 | var outputFFT = fftBands == nil ? nil : [TempiFFT]() 101 | var sampleBuffer = Data() 102 | var sampleBufferFFT = Data() 103 | 104 | // read upfront to avoid frequent re-calculation (and memory bloat from C-bridging) 105 | let samplesPerPixel = max(1, totalSamples / targetSampleCount) 106 | let samplesPerFFT = 4096 // ~100ms at 44.1kHz, rounded to closest pow(2) for FFT 107 | 108 | assetReader.startReading() 109 | while assetReader.status == .reading { 110 | let trackOutput = assetReader.outputs.first! 111 | 112 | guard let nextSampleBuffer = trackOutput.copyNextSampleBuffer(), 113 | let blockBuffer = CMSampleBufferGetDataBuffer(nextSampleBuffer) else { 114 | break 115 | } 116 | 117 | var readBufferLength = 0 118 | var readBufferPointer: UnsafeMutablePointer? = nil 119 | CMBlockBufferGetDataPointer(blockBuffer, atOffset: 0, lengthAtOffsetOut: &readBufferLength, totalLengthOut: nil, dataPointerOut: &readBufferPointer) 120 | sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength)) 121 | if fftBands != nil { 122 | // don't append data to this buffer unless we're going to use it. 123 | sampleBufferFFT.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength)) 124 | } 125 | CMSampleBufferInvalidate(nextSampleBuffer) 126 | 127 | let processedSamples = process(sampleBuffer, from: assetReader, downsampleTo: samplesPerPixel) 128 | outputSamples += processedSamples 129 | 130 | if processedSamples.count > 0 { 131 | // vDSP_desamp uses strides of samplesPerPixel; remove only the processed ones 132 | sampleBuffer.removeFirst(processedSamples.count * samplesPerPixel * MemoryLayout.size) 133 | 134 | // this takes care of a memory leak where Memory continues to increase even though it should clear after calling .removeFirst(…) above. 135 | sampleBuffer = Data(sampleBuffer) 136 | } 137 | 138 | if let fftBands = fftBands, sampleBufferFFT.count / MemoryLayout.size >= samplesPerFFT { 139 | let processedFFTs = process(sampleBufferFFT, samplesPerFFT: samplesPerFFT, fftBands: fftBands) 140 | sampleBufferFFT.removeFirst(processedFFTs.count * samplesPerFFT * MemoryLayout.size) 141 | outputFFT? += processedFFTs 142 | } 143 | } 144 | 145 | // if we don't have enough pixels yet, 146 | // process leftover samples with padding (to reach multiple of samplesPerPixel for vDSP_desamp) 147 | if outputSamples.count < targetSampleCount { 148 | let missingSampleCount = (targetSampleCount - outputSamples.count) * samplesPerPixel 149 | let backfillPaddingSampleCount = missingSampleCount - (sampleBuffer.count / MemoryLayout.size) 150 | let backfillPaddingSampleCount16 = backfillPaddingSampleCount * MemoryLayout.size 151 | let backfillPaddingSamples = [UInt8](repeating: 0, count: backfillPaddingSampleCount16) 152 | sampleBuffer.append(backfillPaddingSamples, count: backfillPaddingSampleCount16) 153 | let processedSamples = process(sampleBuffer, from: assetReader, downsampleTo: samplesPerPixel) 154 | outputSamples += processedSamples 155 | } 156 | 157 | let targetSamples = Array(outputSamples[0.. [Float] { 162 | var downSampledData = [Float]() 163 | let sampleLength = sampleBuffer.count / MemoryLayout.size 164 | 165 | // guard for crash in very long audio files 166 | guard sampleLength / samplesPerPixel > 0 else { return downSampledData } 167 | 168 | sampleBuffer.withUnsafeBytes { (samplesRawPointer: UnsafeRawBufferPointer) in 169 | let unsafeSamplesBufferPointer = samplesRawPointer.bindMemory(to: Int16.self) 170 | let unsafeSamplesPointer = unsafeSamplesBufferPointer.baseAddress! 171 | var loudestClipValue: Float = 0.0 172 | var quietestClipValue = noiseFloorDecibelCutoff 173 | var zeroDbEquivalent: Float = Float(Int16.max) // maximum amplitude storable in Int16 = 0 Db (loudest) 174 | let samplesToProcess = vDSP_Length(sampleLength) 175 | 176 | var processingBuffer = [Float](repeating: 0.0, count: Int(samplesToProcess)) 177 | vDSP_vflt16(unsafeSamplesPointer, 1, &processingBuffer, 1, samplesToProcess) // convert 16bit int to float ( 178 | vDSP_vabs(processingBuffer, 1, &processingBuffer, 1, samplesToProcess) // absolute amplitude value 179 | vDSP_vdbcon(processingBuffer, 1, &zeroDbEquivalent, &processingBuffer, 1, samplesToProcess, 1) // convert to DB 180 | vDSP_vclip(processingBuffer, 1, &quietestClipValue, &loudestClipValue, &processingBuffer, 1, samplesToProcess) 181 | 182 | let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel) 183 | let downSampledLength = sampleLength / samplesPerPixel 184 | downSampledData = [Float](repeating: 0.0, count: downSampledLength) 185 | 186 | vDSP_desamp(processingBuffer, 187 | vDSP_Stride(samplesPerPixel), 188 | filter, 189 | &downSampledData, 190 | vDSP_Length(downSampledLength), 191 | vDSP_Length(samplesPerPixel)) 192 | } 193 | 194 | return downSampledData 195 | } 196 | 197 | private func process(_ sampleBuffer: Data, samplesPerFFT: Int, fftBands: Int) -> [TempiFFT] { 198 | var ffts = [TempiFFT]() 199 | let sampleLength = sampleBuffer.count / MemoryLayout.size 200 | sampleBuffer.withUnsafeBytes { (samplesRawPointer: UnsafeRawBufferPointer) in 201 | let unsafeSamplesBufferPointer = samplesRawPointer.bindMemory(to: Int16.self) 202 | let unsafeSamplesPointer = unsafeSamplesBufferPointer.baseAddress! 203 | let samplesToProcess = vDSP_Length(sampleLength) 204 | 205 | var processingBuffer = [Float](repeating: 0.0, count: Int(samplesToProcess)) 206 | vDSP_vflt16(unsafeSamplesPointer, 1, &processingBuffer, 1, samplesToProcess) // convert 16bit int to float 207 | 208 | repeat { 209 | let fftBuffer = processingBuffer[0..= samplesPerFFT 218 | } 219 | return ffts 220 | } 221 | 222 | func normalize(_ samples: [Float]) -> [Float] { 223 | samples.map { $0 / noiseFloorDecibelCutoff } 224 | } 225 | 226 | private func totalSamples(of audioAssetTrack: AVAssetTrack) async throws -> Int { 227 | var totalSamples = 0 228 | let (descriptions, timeRange) = try await audioAssetTrack.load(.formatDescriptions, .timeRange) 229 | 230 | descriptions.forEach { formatDescription in 231 | guard let basicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) else { return } 232 | let channelCount = Int(basicDescription.pointee.mChannelsPerFrame) 233 | let sampleRate = basicDescription.pointee.mSampleRate 234 | totalSamples = Int(sampleRate * timeRange.duration.seconds) * channelCount 235 | } 236 | return totalSamples 237 | } 238 | } 239 | 240 | // MARK: - Configuration 241 | 242 | private extension WaveformAnalyzer { 243 | func outputSettings() -> [String: Any] { 244 | return [ 245 | AVFormatIDKey: kAudioFormatLinearPCM, 246 | AVLinearPCMBitDepthKey: 16, 247 | AVLinearPCMIsBigEndianKey: false, 248 | AVLinearPCMIsFloatKey: false, 249 | AVLinearPCMIsNonInterleaved: false 250 | ] 251 | } 252 | 253 | func taskPriority(qos: DispatchQoS.QoSClass) -> TaskPriority { 254 | switch qos { 255 | case .background: return .background 256 | case .utility: return .utility 257 | case .default: return .medium 258 | case .userInitiated: return .userInitiated 259 | case .userInteractive: return .high 260 | case .unspecified: return .medium 261 | @unknown default: return .medium 262 | } 263 | } 264 | } 265 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/WaveformImageDrawer+iOS.swift: -------------------------------------------------------------------------------- 1 | #if os(iOS) || swift(>=5.9) && os(visionOS) 2 | import Foundation 3 | import AVFoundation 4 | import UIKit 5 | import CoreGraphics 6 | 7 | public extension WaveformImageDrawer { 8 | /// Renders a DSImage of the provided waveform samples. 9 | /// 10 | /// Samples need to be normalized within interval `(0...1)`. 11 | func waveformImage(from samples: [Float], with configuration: Waveform.Configuration, renderer: WaveformRenderer, position: Waveform.Position = .middle) -> DSImage? { 12 | guard samples.count > 0, samples.count == Int(configuration.size.width * configuration.scale) else { 13 | print("ERROR: samples: \(samples.count) != \(configuration.size.width) * \(configuration.scale)") 14 | return nil 15 | } 16 | 17 | let format = UIGraphicsImageRendererFormat() 18 | format.scale = configuration.scale 19 | let imageRenderer = UIGraphicsImageRenderer(size: configuration.size, format: format) 20 | let dampedSamples = configuration.shouldDamp ? damp(samples, with: configuration) : samples 21 | 22 | return imageRenderer.image { renderContext in 23 | draw(on: renderContext.cgContext, from: dampedSamples, with: configuration, renderer: renderer, position: position) 24 | } 25 | } 26 | } 27 | #endif 28 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/WaveformImageDrawer+macOS.swift: -------------------------------------------------------------------------------- 1 | #if os(macOS) 2 | import Foundation 3 | import AVFoundation 4 | import AppKit 5 | import CoreGraphics 6 | 7 | public extension WaveformImageDrawer { 8 | /// Renders a DSImage of the provided waveform samples. 9 | /// 10 | /// Samples need to be normalized within interval `(0...1)`. 11 | func waveformImage(from samples: [Float], with configuration: Waveform.Configuration, renderer: WaveformRenderer, position: Waveform.Position = .middle) -> DSImage? { 12 | guard samples.count > 0, samples.count == Int(configuration.size.width * configuration.scale) else { 13 | print("ERROR: samples: \(samples.count) != \(configuration.size.width) * \(configuration.scale)") 14 | return nil 15 | } 16 | 17 | let dampedSamples = configuration.shouldDamp ? damp(samples, with: configuration) : samples 18 | return NSImage(size: configuration.size, flipped: false) { rect in 19 | guard let context = NSGraphicsContext.current?.cgContext else { 20 | fatalError("Missing context") 21 | } 22 | self.draw(on: context, from: dampedSamples, with: configuration, renderer: renderer, position: position) 23 | return true 24 | } 25 | } 26 | } 27 | #endif 28 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/WaveformImageDrawer.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import AVFoundation 3 | import CoreGraphics 4 | 5 | /// Renders a DSImage of the waveform data calculated by the analyzer. 6 | public class WaveformImageDrawer: ObservableObject { 7 | public enum GenerationError: Error { case generic } 8 | 9 | public init() {} 10 | 11 | /// only internal; determines whether to draw silence lines in live mode. 12 | public var shouldDrawSilencePadding: Bool = false 13 | 14 | /// Makes sure we always look at the same samples while animating 15 | private var lastOffset: Int = 0 16 | 17 | /// Keep track of how many samples we are adding each draw cycle 18 | private var lastSampleCount: Int = 0 19 | 20 | /// Async analyzes the provided audio and renders a DSImage of the waveform data calculated by the analyzer. 21 | /// - Parameter fromAudioAt: local filesystem URL of the audio file to process. 22 | /// - Parameter with: `Waveform.Configuration` to be used. 23 | /// - Parameter renderer: optional `WaveformRenderer` to adapt how the waveform shall be rendered. 24 | /// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on. 25 | /// 26 | /// Returns the image on a background thread. 27 | public func waveformImage(fromAudioAt audioAssetURL: URL, 28 | with configuration: Waveform.Configuration, 29 | renderer: WaveformRenderer = LinearWaveformRenderer(), 30 | position: Waveform.Position = .middle, 31 | qos: DispatchQoS.QoSClass = .userInitiated) async throws -> DSImage { 32 | try await render(fromAudioAt: audioAssetURL, with: configuration, renderer: renderer, qos: qos, position: position) 33 | } 34 | 35 | /// Async analyzes the provided audio and renders a DSImage of the waveform data calculated by the analyzer. 36 | /// - Parameter fromAudioAt: local filesystem URL of the audio file to process. 37 | /// - Parameter with: `Waveform.Configuration` to be used. 38 | /// - Parameter renderer: optional `WaveformRenderer` to adapt how the waveform shall be rendered. 39 | /// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on. 40 | /// - Parameter completionHandler: called from a background thread. Returns the sampled result `DSImage` or `Error`. 41 | /// 42 | /// Calls the completionHandler on a background thread. 43 | @available(*, deprecated, renamed: "waveformImage(fromAudioAt:with:renderer:qos:)") 44 | public func waveformImage(fromAudioAt audioAssetURL: URL, 45 | with configuration: Waveform.Configuration, 46 | renderer: WaveformRenderer = LinearWaveformRenderer(), 47 | qos: DispatchQoS.QoSClass = .userInitiated, 48 | position: Waveform.Position = .middle, 49 | completionHandler: @escaping (Result) -> ()) { 50 | Task { 51 | do { 52 | let image = try await render(fromAudioAt: audioAssetURL, with: configuration, renderer: renderer, qos: qos, position: position) 53 | completionHandler(.success(image)) 54 | } catch { 55 | completionHandler(.failure(error)) 56 | } 57 | } 58 | } 59 | } 60 | 61 | extension WaveformImageDrawer { 62 | /// Renders the waveform from the provided samples into the provided `CGContext`. 63 | /// 64 | /// Samples need to be normalized within interval `(0...1)`. 65 | /// Ensure context size & scale match with the configuration's size & scale. 66 | public func draw(waveform samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, renderer: WaveformRenderer, position: Waveform.Position = .middle) { 67 | guard samples.count > 0 || shouldDrawSilencePadding else { 68 | return 69 | } 70 | 71 | let samplesNeeded = Int(configuration.size.width * configuration.scale) 72 | 73 | let newSampleCount: Int = lastSampleCount > samples.count 74 | ? samples.count // this implies that we have reset drawing an are starting over 75 | : samples.count - lastSampleCount 76 | 77 | lastSampleCount = samples.count 78 | 79 | // Reset the cumulative lastOffset when new drawing begins 80 | if samples.count == newSampleCount { 81 | lastOffset = 0 82 | } 83 | 84 | if case .striped = configuration.style { 85 | if shouldDrawSilencePadding { 86 | lastOffset = (lastOffset + newSampleCount) % stripeBucket(configuration) 87 | } else if samples.count >= samplesNeeded { 88 | lastOffset = (lastOffset + min(newSampleCount, samples.count - samplesNeeded)) % stripeBucket(configuration) 89 | } 90 | } 91 | 92 | // move the window, so that its always at the end (appears to move from right to left) 93 | let startSample = max(0, samples.count - samplesNeeded) 94 | let clippedSamples = Array(samples[startSample.. [Float] { 112 | guard let damping = configuration.damping, damping.percentage > 0 else { 113 | return samples 114 | } 115 | 116 | let count = Float(samples.count) 117 | return samples.enumerated().map { x, value -> Float in 118 | 1 - ((1 - value) * dampFactor(x: Float(x), count: count, with: damping)) 119 | } 120 | } 121 | } 122 | 123 | // MARK: Image generation 124 | 125 | private extension WaveformImageDrawer { 126 | func render( 127 | fromAudioAt audioAssetURL: URL, 128 | with configuration: Waveform.Configuration, 129 | renderer: WaveformRenderer, 130 | qos: DispatchQoS.QoSClass, 131 | position: Waveform.Position 132 | ) async throws -> DSImage { 133 | let sampleCount = Int(configuration.size.width * configuration.scale) 134 | let waveformAnalyzer = WaveformAnalyzer() 135 | let samples = try await waveformAnalyzer.samples(fromAudioAt: audioAssetURL, count: sampleCount, qos: qos) 136 | let dampedSamples = configuration.shouldDamp ? self.damp(samples, with: configuration) : samples 137 | 138 | if let image = waveformImage(from: dampedSamples, with: configuration, renderer: renderer, position: position) { 139 | return image 140 | } else { 141 | throw GenerationError.generic 142 | } 143 | } 144 | 145 | private func drawBackground(on context: CGContext, with configuration: Waveform.Configuration) { 146 | context.setFillColor(configuration.backgroundColor.cgColor) 147 | context.fill(CGRect(origin: CGPoint.zero, size: configuration.size)) 148 | } 149 | } 150 | 151 | // MARK: - Helpers 152 | 153 | private extension WaveformImageDrawer { 154 | private func stripeCount(_ configuration: Waveform.Configuration) -> Int { 155 | if case .striped = configuration.style { 156 | return Int(configuration.size.width * configuration.scale) / stripeBucket(configuration) 157 | } else { 158 | return 0 159 | } 160 | } 161 | 162 | private func stripeBucket(_ configuration: Waveform.Configuration) -> Int { 163 | if case let .striped(stripeConfig) = configuration.style { 164 | return Int(stripeConfig.width + stripeConfig.spacing) * Int(configuration.scale) 165 | } else { 166 | return 0 167 | } 168 | } 169 | 170 | private func dampFactor(x: Float, count: Float, with damping: Waveform.Damping) -> Float { 171 | if (damping.sides == .left || damping.sides == .both) && x < count * damping.percentage { 172 | // increasing linear damping within the left 8th (default) 173 | // basically (x : 1/8) with x in (0..<1/8) 174 | return damping.easing(x / (count * damping.percentage)) 175 | } else if (damping.sides == .right || damping.sides == .both) && x > ((1 / damping.percentage) - 1) * (count * damping.percentage) { 176 | // decaying linear damping within the right 8th 177 | // basically also (x : 1/8), but since x in (7/8>...1) x is "inverted" as x = x - 7/8 178 | return damping.easing(1 - (x - (((1 / damping.percentage) - 1) * (count * damping.percentage))) / (count * damping.percentage)) 179 | } 180 | return 1 181 | } 182 | } 183 | -------------------------------------------------------------------------------- /Sources/DSWaveformImage/WaveformImageTypes.swift: -------------------------------------------------------------------------------- 1 | import AVFoundation 2 | 3 | #if os(macOS) 4 | import AppKit 5 | 6 | public typealias DSColor = NSColor 7 | public typealias DSImage = NSImage 8 | public enum DSScreen { 9 | public static var scale: CGFloat { NSScreen.main?.backingScaleFactor ?? 1 } 10 | } 11 | #else 12 | import UIKit 13 | 14 | public typealias DSColor = UIColor 15 | public typealias DSImage = UIImage 16 | public enum DSScreen { 17 | public static var scale: CGFloat { 18 | #if swift(>=5.9) && os(visionOS) 19 | return (UIApplication.shared.connectedScenes.first(where: {$0 is UIWindowScene}) as? UIWindowScene)?.traitCollection.displayScale ?? 1 20 | #else 21 | return UIScreen.main.scale 22 | #endif 23 | } 24 | } 25 | #endif 26 | 27 | /** 28 | Renders the waveformsamples on the provided `CGContext`. 29 | 30 | Default implementations are `LinearWaveformRenderer` and `CircularWaveformRenderer`. 31 | Check out those if you'd like to implement your own custom renderer. 32 | */ 33 | public protocol WaveformRenderer: Sendable { 34 | 35 | /** 36 | Calculates a CGPath from the waveform samples. 37 | 38 | - Parameters: 39 | - samples: `[Float]` of the amplitude envelope to be drawn, normalized to interval `(0...1)`. `0` is maximum (typically `0dB`). 40 | `1` is the noise floor, typically `-50dB`, as defined in `WaveformAnalyzer.noiseFloorDecibelCutoff`. 41 | - lastOffset: You can typtically leave this `0`. **Required for live rendering**, where it is needed to keep track of the last drawing cycle. Setting it avoids 'flickering' as samples are being added 42 | continuously and the waveform moves across the view. 43 | */ 44 | func path(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position) -> CGPath 45 | 46 | /** 47 | Renders the waveform samples on the provided `CGContext`. 48 | 49 | - Parameters: 50 | - samples: `[Float]` of the amplitude envelope to be drawn, normalized to interval `(0...1)`. `0` is maximum (typically `0dB`). 51 | `1` is the noise floor, typically `-50dB`, as defined in `WaveformAnalyzer.noiseFloorDecibelCutoff`. 52 | - with configuration: The desired configuration to be used for drawing. 53 | - lastOffset: You can typtically leave this `0`. **Required for live rendering**, where it is needed to keep track of the last drawing cycle. Setting it avoids 'flickering' as samples are being added 54 | continuously and the waveform moves across the view. 55 | */ 56 | func render(samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position) 57 | } 58 | 59 | public extension WaveformRenderer { 60 | func path(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) -> CGPath { 61 | path(samples: samples, with: configuration, lastOffset: lastOffset, position: position) 62 | } 63 | 64 | func render(samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) { 65 | render(samples: samples, on: context, with: configuration, lastOffset: lastOffset, position: position) 66 | } 67 | } 68 | 69 | public enum Waveform { 70 | /** Position of the drawn waveform. */ 71 | public enum Position: Equatable { 72 | /// **top**: Draws the waveform at the top of the image, such that only the bottom 50% are visible. 73 | case top 74 | 75 | /// **middle**: Draws the waveform in the middle the image, such that the entire waveform is visible. 76 | case middle 77 | 78 | /// **bottom**: Draws the waveform at the bottom of the image, such that only the top 50% are visible. 79 | case bottom 80 | 81 | /// **custom**: Draws the waveform at the specified point of the image. Clamped within range `(0...1)`. Where `0` 82 | /// is equal to `.top`, `0.5` is equal to `.middle` and `1` is equal to `.bottom`. 83 | case custom(CGFloat) 84 | 85 | func offset() -> CGFloat { 86 | switch self { 87 | case .top: return 0.0 88 | case .middle: return 0.5 89 | case .bottom: return 1.0 90 | case let .custom(offset): return min(1, max(0, offset)) 91 | } 92 | } 93 | } 94 | 95 | /** 96 | Style of the waveform which is used during drawing: 97 | - **filled**: Use solid color for the waveform. 98 | - **outlined**: Draws the envelope as an outline with the provided thickness. 99 | - **gradient**: Use gradient based on color for the waveform. 100 | - **gradientOutlined**: Use gradient based on color for the waveform. Draws the envelope as an outline with the provided thickness. 101 | - **striped**: Use striped filling based on color for the waveform. 102 | */ 103 | public enum Style: Equatable, Sendable { 104 | public struct StripeConfig: Equatable, Sendable { 105 | /// Color of the waveform stripes. Default is clear. 106 | public let color: DSColor 107 | 108 | /// Width of stripes drawn. Default is `1` 109 | public let width: CGFloat 110 | 111 | /// Space between stripes. Default is `5` 112 | public let spacing: CGFloat 113 | 114 | /// Line cap style. Default is `.round`. 115 | public let lineCap: CGLineCap 116 | 117 | public init(color: DSColor, width: CGFloat = 1, spacing: CGFloat = 5, lineCap: CGLineCap = .round) { 118 | self.color = color 119 | self.width = width 120 | self.spacing = spacing 121 | self.lineCap = lineCap 122 | } 123 | } 124 | 125 | case filled(DSColor) 126 | case outlined(DSColor, CGFloat) 127 | case gradient([DSColor]) 128 | case gradientOutlined([DSColor], CGFloat) 129 | case striped(StripeConfig) 130 | } 131 | 132 | /** 133 | Defines the damping attributes of the waveform. 134 | */ 135 | public struct Damping: Equatable, Sendable { 136 | public enum Sides: Equatable, Sendable { 137 | case left 138 | case right 139 | case both 140 | } 141 | 142 | /// Determines the percentage of the resulting graph to be damped. 143 | /// 144 | /// Must be within `(0..<0.5)` to leave an undapmened area. 145 | /// Default is `0.125` 146 | public let percentage: Float 147 | 148 | /// Determines which sides of the graph to damp. 149 | /// Default is `.both` 150 | public let sides: Sides 151 | 152 | /// Easing function to be used. Default is `pow(x, 2)`. 153 | public let easing: @Sendable (Float) -> Float 154 | 155 | public init(percentage: Float = 0.125, sides: Sides = .both, easing: @escaping @Sendable (Float) -> Float = { x in pow(x, 2) }) { 156 | guard (0...0.5).contains(percentage) else { 157 | preconditionFailure("dampingPercentage must be within (0..<0.5)") 158 | } 159 | 160 | self.percentage = percentage 161 | self.sides = sides 162 | self.easing = easing 163 | } 164 | 165 | /// Build a new `Waveform.Damping` with only the given parameters replaced. 166 | public func with(percentage: Float? = nil, sides: Sides? = nil, easing: (@Sendable (Float) -> Float)? = nil) -> Damping { 167 | .init(percentage: percentage ?? self.percentage, sides: sides ?? self.sides, easing: easing ?? self.easing) 168 | } 169 | 170 | public static func == (lhs: Waveform.Damping, rhs: Waveform.Damping) -> Bool { 171 | // poor-man's way to make two closures Equatable w/o too much hassle 172 | let randomEqualitySample = Float.random(in: (0.. 1`: louder waveform samples will extend out of the view boundaries and clip. 203 | */ 204 | public let verticalScalingFactor: CGFloat 205 | 206 | /// Waveform antialiasing. If enabled, may reduce overall opacity. Default is `false`. 207 | public let shouldAntialias: Bool 208 | 209 | public var shouldDamp: Bool { 210 | damping != nil 211 | } 212 | 213 | public init(size: CGSize = .zero, 214 | backgroundColor: DSColor = DSColor.clear, 215 | style: Style = .gradient([DSColor.black, DSColor.gray]), 216 | damping: Damping? = nil, 217 | scale: CGFloat = DSScreen.scale, 218 | verticalScalingFactor: CGFloat = 0.95, 219 | shouldAntialias: Bool = false) { 220 | guard verticalScalingFactor > 0 else { 221 | preconditionFailure("verticalScalingFactor must be greater 0") 222 | } 223 | 224 | self.backgroundColor = backgroundColor 225 | self.style = style 226 | self.damping = damping 227 | self.size = size 228 | self.scale = scale 229 | self.verticalScalingFactor = verticalScalingFactor 230 | self.shouldAntialias = shouldAntialias 231 | } 232 | 233 | /// Build a new `Waveform.Configuration` with only the given parameters replaced. 234 | public func with(size: CGSize? = nil, 235 | backgroundColor: DSColor? = nil, 236 | style: Style? = nil, 237 | damping: Damping? = nil, 238 | scale: CGFloat? = nil, 239 | verticalScalingFactor: CGFloat? = nil, 240 | shouldAntialias: Bool? = nil 241 | ) -> Configuration { 242 | Configuration( 243 | size: size ?? self.size, 244 | backgroundColor: backgroundColor ?? self.backgroundColor, 245 | style: style ?? self.style, 246 | damping: damping ?? self.damping, 247 | scale: scale ?? self.scale, 248 | verticalScalingFactor: verticalScalingFactor ?? self.verticalScalingFactor, 249 | shouldAntialias: shouldAntialias ?? self.shouldAntialias 250 | ) 251 | } 252 | } 253 | } 254 | -------------------------------------------------------------------------------- /Sources/DSWaveformImageViews/SwiftUI/DefaultShapeStyler.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import DSWaveformImage 3 | import SwiftUI 4 | 5 | struct DefaultShapeStyler { 6 | @ViewBuilder 7 | func style(shape: WaveformShape, with configuration: Waveform.Configuration) -> some View { 8 | switch configuration.style { 9 | case let .filled(color): 10 | shape.fill(Color(color)) 11 | 12 | case let .outlined(color, lineWidth): 13 | shape.stroke( 14 | Color(color), 15 | style: StrokeStyle( 16 | lineWidth: lineWidth, 17 | lineCap: .round 18 | ) 19 | ) 20 | 21 | case let .gradient(colors): 22 | shape 23 | .fill(LinearGradient(colors: colors.map(Color.init), startPoint: .bottom, endPoint: .top)) 24 | 25 | case let .gradientOutlined(colors, lineWidth): 26 | shape.stroke( 27 | LinearGradient(colors: colors.map(Color.init), startPoint: .bottom, endPoint: .top), 28 | style: StrokeStyle( 29 | lineWidth: lineWidth, 30 | lineCap: .round 31 | ) 32 | ) 33 | 34 | case let .striped(config): 35 | shape.stroke( 36 | Color(config.color), 37 | style: StrokeStyle( 38 | lineWidth: config.width, 39 | lineCap: config.lineCap 40 | ) 41 | ) 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /Sources/DSWaveformImageViews/SwiftUI/VersionMigrations.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | 3 | // workaround for crashes in iOS 15 when using #available in ViewBuilders 4 | // see https://developer.apple.com/forums/thread/650818 5 | // not sure if this is still relevant, but keeping it due to its obscurity when it occurs 6 | // and because I cannot verify that it does not happen anymore due to lack of devices 7 | public struct LazyContent: View { 8 | let content: () -> Content 9 | 10 | public init(@ViewBuilder content: @escaping () -> Content) { 11 | self.content = content 12 | } 13 | 14 | public var body: some View { 15 | content() 16 | } 17 | } 18 | 19 | // This is here to support visionOS / iOS 17 and remove the deprecation warning relating about the usage of 20 | // @available(visionOS, deprecated: 1.0, message: "Use `onChange` with a two or zero parameter action closure instead.") 21 | // @inlinable public func onChange(of value: V, perform action: @escaping (_ newValue: V) -> Void) -> some View where V : Equatable 22 | public struct OnChange: ViewModifier { 23 | private var value: V 24 | private var action: (_ newValue: V) -> Void 25 | 26 | public init(of value: V, action: @escaping (_ newValue: V) -> Void) { 27 | self.value = value 28 | self.action = action 29 | } 30 | 31 | public func body(content: Content) -> some View { 32 | #if swift(>=5.9) 33 | if #available(iOS 17, macOS 14.0, visionOS 1.0, *) { 34 | LazyContent { 35 | content 36 | .onChange(of: value) { _, newValue in 37 | action(newValue) 38 | } 39 | } 40 | } else { 41 | content 42 | .onChange(of: value) { newValue in 43 | action(newValue) 44 | } 45 | } 46 | #else 47 | content 48 | .onChange(of: value) { newValue in 49 | action(newValue) 50 | } 51 | #endif 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /Sources/DSWaveformImageViews/SwiftUI/WaveformLiveCanvas.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | import DSWaveformImage 3 | 4 | @available(iOS 15.0, macOS 12.0, *) 5 | public struct WaveformLiveCanvas: View { 6 | public static let defaultConfiguration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)) 7 | 8 | public let samples: [Float] 9 | public let configuration: Waveform.Configuration 10 | public let renderer: WaveformRenderer 11 | public let shouldDrawSilencePadding: Bool 12 | 13 | @StateObject private var waveformDrawer: WaveformImageDrawer 14 | 15 | public init( 16 | samples: [Float], 17 | configuration: Waveform.Configuration = defaultConfiguration, 18 | renderer: WaveformRenderer = LinearWaveformRenderer(), 19 | shouldDrawSilencePadding: Bool = false 20 | ) { 21 | let drawer = WaveformImageDrawer() 22 | self.samples = samples 23 | self.configuration = configuration 24 | self.renderer = renderer 25 | self.shouldDrawSilencePadding = shouldDrawSilencePadding 26 | 27 | drawer.shouldDrawSilencePadding = shouldDrawSilencePadding 28 | _waveformDrawer = StateObject(wrappedValue: drawer) 29 | } 30 | 31 | public var body: some View { 32 | Canvas(rendersAsynchronously: true) { context, size in 33 | context.withCGContext { cgContext in 34 | waveformDrawer.draw(waveform: samples, on: cgContext, with: configuration.with(size: size), renderer: renderer) 35 | } 36 | } 37 | .onAppear { 38 | waveformDrawer.shouldDrawSilencePadding = shouldDrawSilencePadding 39 | } 40 | .modifier(OnChange(of: shouldDrawSilencePadding, action: { newValue in 41 | waveformDrawer.shouldDrawSilencePadding = newValue 42 | })) 43 | } 44 | } 45 | 46 | #if DEBUG 47 | @available(iOS 15.0, macOS 12.0, *) 48 | struct WaveformLiveCanvas_Previews: PreviewProvider { 49 | struct TestView: View { 50 | @State var show: Bool = false 51 | 52 | var body: some View { 53 | VStack { 54 | if show { 55 | WaveformLiveCanvas( 56 | samples: [], 57 | configuration: liveConfiguration, 58 | renderer: LinearWaveformRenderer(), 59 | shouldDrawSilencePadding: show 60 | ) 61 | } 62 | }.onAppear() { 63 | show = true 64 | } 65 | } 66 | } 67 | 68 | static var liveConfiguration: Waveform.Configuration = Waveform.Configuration( 69 | style: .striped(.init(color: .systemPink, width: 3, spacing: 3)) 70 | ) 71 | 72 | static var previews: some View { 73 | TestView() 74 | } 75 | } 76 | #endif 77 | -------------------------------------------------------------------------------- /Sources/DSWaveformImageViews/SwiftUI/WaveformShape.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import SwiftUI 3 | import DSWaveformImage 4 | 5 | /// A waveform SwiftUI `Shape` object for generating a shape path from component(s) of the waveform. 6 | /// **Note:** The Shape does *not* style itself. Use `WaveformView` for that purpose and only use the Shape directly if needed. 7 | @available(iOS 15.0, macOS 12.0, *) 8 | public struct WaveformShape: Shape { 9 | private let samples: [Float] 10 | private let configuration: Waveform.Configuration 11 | private let renderer: WaveformRenderer 12 | 13 | public init( 14 | samples: [Float], 15 | configuration: Waveform.Configuration = Waveform.Configuration(), 16 | renderer: WaveformRenderer = LinearWaveformRenderer() 17 | ) { 18 | self.samples = samples 19 | self.configuration = configuration 20 | self.renderer = renderer 21 | } 22 | 23 | public func path(in rect: CGRect) -> Path { 24 | let size = CGSize(width: rect.maxX, height: rect.maxY) 25 | let dampedSamples = configuration.shouldDamp ? damp(samples, with: configuration) : samples 26 | let path = renderer.path(samples: dampedSamples, with: configuration.with(size: size), lastOffset: 0) 27 | 28 | return Path(path) 29 | } 30 | 31 | /// Whether the shape has no underlying samples to display. 32 | var isEmpty: Bool { 33 | samples.isEmpty 34 | } 35 | } 36 | 37 | private extension WaveformShape { 38 | private func damp(_ samples: [Float], with configuration: Waveform.Configuration) -> [Float] { 39 | guard let damping = configuration.damping, damping.percentage > 0 else { 40 | return samples 41 | } 42 | 43 | let count = Float(samples.count) 44 | return samples.enumerated().map { x, value -> Float in 45 | 1 - ((1 - value) * dampFactor(x: Float(x), count: count, with: damping)) 46 | } 47 | } 48 | 49 | private func dampFactor(x: Float, count: Float, with damping: Waveform.Damping) -> Float { 50 | if (damping.sides == .left || damping.sides == .both) && x < count * damping.percentage { 51 | // increasing linear damping within the left 8th (default) 52 | // basically (x : 1/8) with x in (0..<1/8) 53 | return damping.easing(x / (count * damping.percentage)) 54 | } else if (damping.sides == .right || damping.sides == .both) && x > ((1 / damping.percentage) - 1) * (count * damping.percentage) { 55 | // decaying linear damping within the right 8th 56 | // basically also (x : 1/8), but since x in (7/8>...1) x is "inverted" as x = x - 7/8 57 | return damping.easing(1 - (x - (((1 / damping.percentage) - 1) * (count * damping.percentage))) / (count * damping.percentage)) 58 | } 59 | return 1 60 | } 61 | } 62 | 63 | -------------------------------------------------------------------------------- /Sources/DSWaveformImageViews/SwiftUI/WaveformView.swift: -------------------------------------------------------------------------------- 1 | import DSWaveformImage 2 | import SwiftUI 3 | 4 | @available(iOS 15.0, macOS 12.0, *) 5 | /// Renders and displays a waveform for the audio at `audioURL`. 6 | public struct WaveformView: View { 7 | private let audioURL: URL 8 | private let configuration: Waveform.Configuration 9 | private let renderer: WaveformRenderer 10 | private let priority: TaskPriority 11 | private let content: (WaveformShape) -> Content 12 | 13 | @State private var samples: [Float] = [] 14 | @State private var rescaleTimer: Timer? 15 | @State private var currentSize: CGSize = .zero 16 | 17 | /** 18 | Creates a new WaveformView which displays a waveform for the audio at `audioURL`. 19 | 20 | - Parameters: 21 | - audioURL: The `URL` of the audio asset to be rendered. 22 | - configuration: The `Waveform.Configuration` to be used for rendering. 23 | - renderer: The `WaveformRenderer` implementation to be used. Defaults to `LinearWaveformRenderer`. Also comes with `CircularWaveformRenderer`. 24 | - priority: The `TaskPriority` used during analyzing. Defaults to `.userInitiated`. 25 | - content: ViewBuilder with the WaveformShape to be customized. 26 | */ 27 | public init( 28 | audioURL: URL, 29 | configuration: Waveform.Configuration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)), 30 | renderer: WaveformRenderer = LinearWaveformRenderer(), 31 | priority: TaskPriority = .userInitiated, 32 | @ViewBuilder content: @escaping (WaveformShape) -> Content 33 | ) { 34 | self.audioURL = audioURL 35 | self.configuration = configuration 36 | self.renderer = renderer 37 | self.priority = priority 38 | self.content = content 39 | } 40 | 41 | public var body: some View { 42 | GeometryReader { geometry in 43 | content(WaveformShape(samples: samples, configuration: configuration, renderer: renderer)) 44 | .onAppear { 45 | guard samples.isEmpty else { return } 46 | update(size: geometry.size, url: audioURL, configuration: configuration) 47 | } 48 | .modifier(OnChange(of: geometry.size, action: { newValue in update(size: newValue, url: audioURL, configuration: configuration, delayed: true) })) 49 | .modifier(OnChange(of: audioURL, action: { newValue in update(size: geometry.size, url: audioURL, configuration: configuration) })) 50 | .modifier(OnChange(of: configuration, action: { newValue in update(size: geometry.size, url: audioURL, configuration: newValue) })) 51 | } 52 | } 53 | 54 | private func update(size: CGSize, url: URL, configuration: Waveform.Configuration, delayed: Bool = false) { 55 | rescaleTimer?.invalidate() 56 | 57 | let updateTask: @Sendable (Timer?) -> Void = { _ in 58 | Task(priority: .userInitiated) { 59 | do { 60 | let samplesNeeded = Int(size.width * configuration.scale) 61 | let samples = try await WaveformAnalyzer().samples(fromAudioAt: url, count: samplesNeeded) 62 | 63 | await MainActor.run { 64 | self.currentSize = size 65 | self.samples = samples 66 | } 67 | } catch { 68 | assertionFailure(error.localizedDescription) 69 | } 70 | } 71 | } 72 | 73 | if delayed { 74 | rescaleTimer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: false, block: updateTask) 75 | RunLoop.main.add(rescaleTimer!, forMode: .common) 76 | } else { 77 | updateTask(nil) 78 | } 79 | } 80 | } 81 | 82 | public extension WaveformView { 83 | /** 84 | Creates a new WaveformView which displays a waveform for the audio at `audioURL`. 85 | 86 | - Parameters: 87 | - audioURL: The `URL` of the audio asset to be rendered. 88 | - configuration: The `Waveform.Configuration` to be used for rendering. 89 | - renderer: The `WaveformRenderer` implementation to be used. Defaults to `LinearWaveformRenderer`. Also comes with `CircularWaveformRenderer`. 90 | - priority: The `TaskPriority` used during analyzing. Defaults to `.userInitiated`. 91 | */ 92 | init( 93 | audioURL: URL, 94 | configuration: Waveform.Configuration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)), 95 | renderer: WaveformRenderer = LinearWaveformRenderer(), 96 | priority: TaskPriority = .userInitiated 97 | ) where Content == AnyView { 98 | self.init(audioURL: audioURL, configuration: configuration, renderer: renderer, priority: priority) { shape in 99 | AnyView(DefaultShapeStyler().style(shape: shape, with: configuration)) 100 | } 101 | } 102 | 103 | /** 104 | Creates a new WaveformView which displays a waveform for the audio at `audioURL`. 105 | 106 | - Parameters: 107 | - audioURL: The `URL` of the audio asset to be rendered. 108 | - configuration: The `Waveform.Configuration` to be used for rendering. 109 | - renderer: The `WaveformRenderer` implementation to be used. Defaults to `LinearWaveformRenderer`. Also comes with `CircularWaveformRenderer`. 110 | - priority: The `TaskPriority` used during analyzing. Defaults to `.userInitiated`. 111 | - placeholder: ViewBuilder for a placeholder view during the loading phase. 112 | */ 113 | init( 114 | audioURL: URL, 115 | configuration: Waveform.Configuration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)), 116 | renderer: WaveformRenderer = LinearWaveformRenderer(), 117 | priority: TaskPriority = .userInitiated, 118 | @ViewBuilder placeholder: @escaping () -> Placeholder 119 | ) where Content == _ConditionalContent { 120 | self.init(audioURL: audioURL, configuration: configuration, renderer: renderer, priority: priority) { shape in 121 | if shape.isEmpty { 122 | placeholder() 123 | } else { 124 | AnyView(DefaultShapeStyler().style(shape: shape, with: configuration)) 125 | } 126 | } 127 | } 128 | 129 | /** 130 | Creates a new WaveformView which displays a waveform for the audio at `audioURL`. 131 | 132 | - Parameters: 133 | - audioURL: The `URL` of the audio asset to be rendered. 134 | - configuration: The `Waveform.Configuration` to be used for rendering. 135 | - renderer: The `WaveformRenderer` implementation to be used. Defaults to `LinearWaveformRenderer`. Also comes with `CircularWaveformRenderer`. 136 | - priority: The `TaskPriority` used during analyzing. Defaults to `.userInitiated`. 137 | - content: ViewBuilder with the WaveformShape to be customized. 138 | - placeholder: ViewBuilder for a placeholder view during the loading phase. 139 | */ 140 | init( 141 | audioURL: URL, 142 | configuration: Waveform.Configuration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)), 143 | renderer: WaveformRenderer = LinearWaveformRenderer(), 144 | priority: TaskPriority = .userInitiated, 145 | @ViewBuilder content: @escaping (WaveformShape) -> ModifiedContent, 146 | @ViewBuilder placeholder: @escaping () -> Placeholder 147 | ) where Content == _ConditionalContent { 148 | self.init(audioURL: audioURL, configuration: configuration, renderer: renderer, priority: priority) { shape in 149 | if shape.isEmpty { 150 | placeholder() 151 | } else { 152 | content(shape) 153 | } 154 | } 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /Sources/DSWaveformImageViews/UIKit/WaveformImageView.swift: -------------------------------------------------------------------------------- 1 | #if os(iOS) || swift(>=5.9) && os(visionOS) 2 | import DSWaveformImage 3 | import Foundation 4 | import AVFoundation 5 | import UIKit 6 | 7 | public class WaveformImageView: UIImageView { 8 | private let waveformImageDrawer: WaveformImageDrawer 9 | 10 | public var configuration: Waveform.Configuration { 11 | didSet { updateWaveform() } 12 | } 13 | 14 | public var waveformAudioURL: URL? { 15 | didSet { updateWaveform() } 16 | } 17 | 18 | override public init(frame: CGRect) { 19 | configuration = Waveform.Configuration(size: frame.size) 20 | waveformImageDrawer = WaveformImageDrawer() 21 | super.init(frame: frame) 22 | } 23 | 24 | required public init?(coder aDecoder: NSCoder) { 25 | configuration = Waveform.Configuration() 26 | waveformImageDrawer = WaveformImageDrawer() 27 | super.init(coder: aDecoder) 28 | } 29 | 30 | override public func layoutSubviews() { 31 | super.layoutSubviews() 32 | updateWaveform() 33 | } 34 | 35 | /// Clears the audio data, emptying the waveform view. 36 | public func reset() { 37 | waveformAudioURL = nil 38 | image = nil 39 | } 40 | } 41 | 42 | private extension WaveformImageView { 43 | func updateWaveform() { 44 | guard let audioURL = waveformAudioURL else { return } 45 | 46 | Task { 47 | do { 48 | let image = try await waveformImageDrawer.waveformImage( 49 | fromAudioAt: audioURL, 50 | with: configuration.with(size: bounds.size), 51 | qos: .userInteractive 52 | ) 53 | 54 | await MainActor.run { 55 | self.image = image 56 | } 57 | } catch { 58 | print("Error occurred during waveform image creation:") 59 | print(error) 60 | } 61 | } 62 | } 63 | } 64 | #endif 65 | -------------------------------------------------------------------------------- /Sources/DSWaveformImageViews/UIKit/WaveformLiveView.swift: -------------------------------------------------------------------------------- 1 | #if os(iOS) || swift(>=5.9) && os(visionOS) 2 | import DSWaveformImage 3 | import Foundation 4 | import UIKit 5 | 6 | /// Renders a live waveform everytime its `(0...1)`-normalized samples are changed. 7 | public class WaveformLiveView: UIView { 8 | 9 | /// Default configuration with damping enabled. 10 | public static let defaultConfiguration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)) 11 | 12 | /// If set to `true`, a zero line, indicating silence, is being drawn while the received 13 | /// samples are not filling up the entire view's width yet. 14 | public var shouldDrawSilencePadding: Bool = false { 15 | didSet { 16 | sampleLayer.shouldDrawSilencePadding = shouldDrawSilencePadding 17 | } 18 | } 19 | 20 | public var configuration: Waveform.Configuration { 21 | didSet { 22 | sampleLayer.configuration = configuration 23 | } 24 | } 25 | 26 | /// Returns the currently used samples. 27 | public var samples: [Float] { 28 | sampleLayer.samples 29 | } 30 | 31 | private var sampleLayer: WaveformLiveLayer! { 32 | return layer as? WaveformLiveLayer 33 | } 34 | 35 | override public class var layerClass: AnyClass { 36 | return WaveformLiveLayer.self 37 | } 38 | 39 | public var renderer: WaveformRenderer { 40 | didSet { 41 | sampleLayer.renderer = renderer 42 | } 43 | } 44 | 45 | public init(configuration: Waveform.Configuration = defaultConfiguration, renderer: WaveformRenderer = LinearWaveformRenderer()) { 46 | self.configuration = configuration 47 | self.renderer = renderer 48 | super.init(frame: .zero) 49 | self.contentMode = .redraw 50 | 51 | defer { // will call didSet to propagate to sampleLayer 52 | self.configuration = configuration 53 | self.renderer = renderer 54 | } 55 | } 56 | 57 | public override init(frame: CGRect) { 58 | self.configuration = Self.defaultConfiguration 59 | self.renderer = LinearWaveformRenderer() 60 | super.init(frame: frame) 61 | contentMode = .redraw 62 | 63 | defer { // will call didSet to propagate to sampleLayer 64 | self.configuration = Self.defaultConfiguration 65 | self.renderer = LinearWaveformRenderer() 66 | } 67 | } 68 | 69 | required init?(coder: NSCoder) { 70 | self.configuration = Self.defaultConfiguration 71 | self.renderer = LinearWaveformRenderer() 72 | super.init(coder: coder) 73 | contentMode = .redraw 74 | 75 | defer { // will call didSet to propagate to sampleLayer 76 | self.configuration = Self.defaultConfiguration 77 | self.renderer = LinearWaveformRenderer() 78 | } 79 | } 80 | 81 | /// The sample to be added. Re-draws the waveform with the pre-existing samples and the new one. 82 | /// Value must be within `(0...1)` to make sense (0 being loweset and 1 being maximum amplitude). 83 | public func add(sample: Float) { 84 | sampleLayer.add([sample]) 85 | } 86 | 87 | /// The samples to be added. Re-draws the waveform with the pre-existing samples and the new ones. 88 | /// Values must be within `(0...1)` to make sense (0 being loweset and 1 being maximum amplitude). 89 | public func add(samples: [Float]) { 90 | sampleLayer.add(samples) 91 | } 92 | 93 | /// Clears the samples, emptying the waveform view. 94 | public func reset() { 95 | sampleLayer.reset() 96 | } 97 | } 98 | 99 | class WaveformLiveLayer: CALayer { 100 | @NSManaged var samples: [Float] 101 | 102 | var configuration = WaveformLiveView.defaultConfiguration { 103 | didSet { contentsScale = configuration.scale } 104 | } 105 | 106 | var shouldDrawSilencePadding: Bool = false { 107 | didSet { 108 | waveformDrawer.shouldDrawSilencePadding = shouldDrawSilencePadding 109 | setNeedsDisplay() 110 | } 111 | } 112 | 113 | var renderer: WaveformRenderer = LinearWaveformRenderer() { 114 | didSet { setNeedsDisplay() } 115 | } 116 | 117 | private let waveformDrawer = WaveformImageDrawer() 118 | 119 | override class func needsDisplay(forKey key: String) -> Bool { 120 | if key == #keyPath(samples) { 121 | return true 122 | } 123 | return super.needsDisplay(forKey: key) 124 | } 125 | 126 | override func draw(in context: CGContext) { 127 | super.draw(in: context) 128 | 129 | UIGraphicsPushContext(context) 130 | waveformDrawer.draw(waveform: samples, on: context, with: configuration.with(size: bounds.size), renderer: renderer) 131 | UIGraphicsPopContext() 132 | } 133 | 134 | func add(_ newSamples: [Float]) { 135 | samples += newSamples 136 | } 137 | 138 | func reset() { 139 | samples = [] 140 | } 141 | } 142 | #endif 143 | --------------------------------------------------------------------------------