2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-only
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/Example/DSWaveformImageExample-macOS/DSWaveformImageExample_macOSApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DSWaveformImageExample_macOSApp.swift
3 | // DSWaveformImageExample-macOS
4 | //
5 | // Created by Dennis Schmidt on 27.09.22.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct DSWaveformImageExample_macOSApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Example/DSWaveformImageExample-macOS/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Example/DSWaveformImageExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Example/DSWaveformImageExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2013 Dennis Schmidt
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so,
10 | subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:5.7
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "DSWaveformImage",
8 | platforms: [
9 | .iOS(.v15),
10 | .macOS(.v12),
11 | ],
12 | products: [
13 | // Products define the executables and libraries a package produces, and make them visible to other packages.
14 | .library(
15 | name: "DSWaveformImage",
16 | targets: ["DSWaveformImage"]),
17 | .library(
18 | name: "DSWaveformImageViews",
19 | targets: ["DSWaveformImageViews"]),
20 | ],
21 | dependencies: [
22 | // Dependencies declare other packages that this package depends on.
23 | // .package(url: /* package url */, from: "1.0.0"),
24 | ],
25 | targets: [
26 | .target(name: "DSWaveformImage"),
27 | .target(
28 | name: "DSWaveformImageViews",
29 | dependencies: ["DSWaveformImage"]
30 | ),
31 | ]
32 | )
33 |
--------------------------------------------------------------------------------
/Promotion/appstore.svg:
--------------------------------------------------------------------------------
1 |
2 | Download_on_the_App_Store_Badge_US-UK_RGB_blk_4SVG_092917
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/Promotion/progress-example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Promotion/progress-example.png
--------------------------------------------------------------------------------
/Promotion/recorder-example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Promotion/recorder-example.png
--------------------------------------------------------------------------------
/Promotion/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Promotion/screenshot.png
--------------------------------------------------------------------------------
/Promotion/screenshot3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dmrschmidt/DSWaveformImage/5f1ce68474df5a4ab055dfd0df5d2da810eaec7c/Promotion/screenshot3.png
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | DSWaveformImage - iOS, macOS & visionOS realtime audio waveform rendering
2 | ===============
3 | [](https://swift.org/package-manager)
4 |
5 | DSWaveformImage offers a native interfaces for drawing the envelope waveform of audio data
6 | in **iOS**, **iPadOS**, **macOS**, **visionOS** or via Catalyst. To do so, you can use
7 |
8 | * [`WaveformImageView`](Sources/DSWaveformImageViews/UIKit/WaveformImageView.swift) (UIKit) / [`WaveformView`](Sources/DSWaveformImageViews/SwiftUI/WaveformView.swift) (SwiftUI) to render a static waveform from an audio file or
9 | * [`WaveformLiveView`](Sources/DSWaveformImageViews/UIKit/WaveformLiveView.swift) (UIKit) / [`WaveformLiveCanvas`](Sources/DSWaveformImageViews/SwiftUI/WaveformLiveCanvas.swift) (SwiftUI) to realtime render a waveform of live audio data (e.g. from `AVAudioRecorder`)
10 | * `WaveformImageDrawer` to generate a waveform `UIImage` from an audio file
11 |
12 | Additionally, you can get a waveform's (normalized) `[Float]` samples directly as well by
13 | creating an instance of `WaveformAnalyzer`.
14 |
15 | Example UI (included in repository)
16 | ------------
17 |
18 | For a practical real-world example usage of a SwiftUI live audio recording waveform rendering, see [RecordingIndicatorView](Example/DSWaveformImageExample-iOS/SwiftUIExample/SwiftUIExampleView.swift).
19 |
20 |
21 |
22 |
23 | More related iOS Controls
24 | ------------
25 |
26 | You may also find the following iOS controls written in Swift interesting:
27 |
28 | * [SwiftColorWheel](https://github.com/dmrschmidt/SwiftColorWheel) - a delightful color picker
29 | * [QRCode](https://github.com/dmrschmidt/QRCode) - a customizable QR code generator
30 |
31 | If you really like this library (aka Sponsoring)
32 | ------------
33 | I'm doing all this for fun and joy and because I strongly believe in the power of open source. On the off-chance though, that using my library has brought joy to you and you just feel like saying "thank you", I would smile like a 4-year old getting a huge ice cream cone, if you'd support my via one of the sponsoring buttons ☺️💕
34 |
35 | Alternatively, consider supporting me by downloading one of my side project iOS apps. If you're feeling in the mood of sending someone else a lovely gesture of appreciation, maybe check out my iOS app [💌 SoundCard](https://www.soundcard.io) to send them a real postcard with a personal audio message. Or download my ad-supported free to play game [🕹️ Snekris for iOS](https://apps.apple.com/us/app/snekris-play-like-its-1999/id6446217693).
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 | Installation
47 | ------------
48 |
49 | * use SPM: add `https://github.com/dmrschmidt/DSWaveformImage` and set "Up to Next Major" with "14.0.0"
50 |
51 | ```swift
52 | import DSWaveformImage // for core classes to generate `UIImage` / `NSImage` directly
53 | import DSWaveformImageViews // if you want to use the native UIKit / SwiftUI views
54 | ```
55 |
56 | Usage
57 | -----
58 |
59 | `DSWaveformImage` provides 3 kinds of tools to use
60 | * native SwiftUI views - [SwiftUI example usage code](Example/DSWaveformImageExample-iOS/SwiftUIExample/SwiftUIExampleView.swift)
61 | * native UIKit views - [UIKit example usage code](Example/DSWaveformImageExample-iOS/ViewController.swift)
62 | * access to the raw renderes and processors
63 |
64 | The core renderes and processors as well as SwiftUI views natively support iOS & macOS, using `UIImage` & `NSImage` respectively.
65 |
66 | ### SwiftUI
67 |
68 | #### `WaveformView` - renders a one-off waveform from an audio file:
69 |
70 | ```swift
71 | @State var audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")!
72 | WaveformView(audioURL: audioURL)
73 | ```
74 |
75 | Default styling may be overridden if you have more complex requirements:
76 |
77 | ```swift
78 | @State var audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")!
79 | WaveformView(audioURL: audioURL) { waveformShape in
80 | waveformShape
81 | .stroke(LinearGradient(colors: [.red, [.green, red, orange], startPoint: .zero, endPoint: .topTrailing), lineWidth: 3)
82 | }
83 | ```
84 |
85 | Similar to [AsyncImage](https://developer.apple.com/documentation/swiftui/asyncimage/init(url:scale:content:placeholder:)), a placeholder can be
86 | set to show until the load and render operation completes successfully. Thanks to [@alfogrillo](https://github.com/alfogrillo)!
87 |
88 | ```swift
89 | WaveformView(audioURL: audioURL) { waveformShape in
90 | waveformShape
91 | .stroke(LinearGradient(colors: [.red, [.green, red, orange], startPoint: .zero, endPoint: .topTrailing), lineWidth: 3)
92 | } placeholder: {
93 | ProgressView()
94 | }
95 | ```
96 |
97 | #### `WaveformLiveCanvas` - renders a live waveform from `(0...1)` normalized samples:
98 |
99 | ```swift
100 | @StateObject private var audioRecorder: AudioRecorder = AudioRecorder() // just an example
101 | WaveformLiveCanvas(samples: audioRecorder.samples)
102 | ```
103 |
104 | ### UIKit
105 |
106 | #### `WaveformImageView` - renders a one-off waveform from an audio file:
107 |
108 | ```swift
109 | let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")!
110 | waveformImageView = WaveformImageView(frame: CGRect(x: 0, y: 0, width: 500, height: 300)
111 | waveformImageView.waveformAudioURL = audioURL
112 | ```
113 |
114 | #### `WaveformLiveView` - renders a live waveform from `(0...1)` normalized samples:
115 |
116 | Find a full example in the [sample project's RecordingViewController](Example/DSWaveformImageExample-iOS/RecordingViewController.swift).
117 |
118 | ```swift
119 | let waveformView = WaveformLiveView()
120 |
121 | // configure and start AVAudioRecorder
122 | let recorder = AVAudioRecorder()
123 | recorder.isMeteringEnabled = true // required to get current power levels
124 |
125 | // after all the other recording (omitted for focus) setup, periodically (every 20ms or so):
126 | recorder.updateMeters() // gets the current value
127 | let currentAmplitude = 1 - pow(10, recorder.averagePower(forChannel: 0) / 20)
128 | waveformView.add(sample: currentAmplitude)
129 | ```
130 |
131 | ### Raw API
132 |
133 | #### Configuration
134 |
135 | *Note:* Calculations are always performed and returned on a background thread, so make sure to return to the main thread before doing any UI work.
136 |
137 | Check `Waveform.Configuration` in [WaveformImageTypes](./Sources/DSWaveformImage/WaveformImageTypes.swift) for various configuration options.
138 |
139 | #### `WaveformImageDrawer` - creates a `UIImage` waveform from an audio file:
140 |
141 | ```swift
142 | let waveformImageDrawer = WaveformImageDrawer()
143 | let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")!
144 | let image = try await waveformImageDrawer.waveformImage(
145 | fromAudioAt: audioURL,
146 | with: .init(size: topWaveformView.bounds.size, style: .filled(UIColor.black)),
147 | renderer: LinearWaveformRenderer()
148 | )
149 |
150 | // need to jump back to main queue
151 | DispatchQueue.main.async {
152 | self.topWaveformView.image = image
153 | }
154 | ```
155 |
156 | #### `WaveformAnalyzer` - calculates an audio file's waveform sample:
157 |
158 | ```swift
159 | let audioURL = Bundle.main.url(forResource: "example_sound", withExtension: "m4a")!
160 | waveformAnalyzer = WaveformAnalyzer()
161 | let samples = try await waveformAnalyzer.samples(fromAudioAt: audioURL, count: 200)
162 | print("samples: \(samples)")
163 | ```
164 |
165 | ### Playback Progress Indication
166 |
167 | If you're playing back audio files and would like to indicate the playback progress to your users, you can [find inspiration in the example app](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Example/DSWaveformImageExample-iOS/ProgressViewController.swift). UIKit and [SwiftUI](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Example/DSWaveformImageExample-iOS/SwiftUIExample/ProgressWaveformView.swift) examples are provided.
168 |
169 | Both approaches will result in something like the image below.
170 |
171 |
172 |
173 |
174 |
175 |
176 | There is currently no plan to integrate this as a 1st class citizen to the library itself, as every app will have different design requirements, and `WaveformImageDrawer` as well as `WaveformAnalyzer` are as simple to use as the views themselves as you can see in the examples.
177 |
178 | ### Loading remote audio files from URL
179 |
180 | For one example way to display waveforms for audio files on remote URLs see https://github.com/dmrschmidt/DSWaveformImage/issues/22.
181 |
182 | What it looks like
183 | ------------------
184 |
185 | Waveforms can be rendered in 2 different ways and 5 different styles each.
186 |
187 | By default [`LinearWaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/LinearWaveformRenderer.swift) is used, which draws a linear 2D amplitude envelope.
188 |
189 | [`CircularWaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/CircularWaveformRenderer.swift) is available as an alternative, which can be passed in to the `WaveformView` or `WaveformLiveView` respectively. It draws a circular
190 | 2D amplitude envelope.
191 |
192 | You can implement your own renderer by implementing [`WaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/WaveformRenderer.swift).
193 |
194 | The following styles can be applied to either renderer:
195 | - **filled**: Use solid color for the waveform.
196 | - **outlined**: Draws the envelope as an outline with the provided thickness.
197 | - **gradient**: Use gradient based on color for the waveform.
198 | - **gradientOutlined**: Use gradient based on color for the waveform. Draws the envelope as an outline with the provided thickness.
199 | - **striped**: Use striped filling based on color for the waveform.
200 |
201 |
202 |
203 |
204 |
205 |
206 | ### Live waveform rendering
207 | https://user-images.githubusercontent.com/69365/127739821-061a4345-0adc-4cc1-bfd6-f7cfbe1268c9.mov
208 |
209 |
210 | Migration
211 | ---------
212 | ### In 14.0.0
213 | * Minimum iOS Deployment target is 15.0, macOS is 12.0 to remove internal usage of deprecated APIs
214 | * `WaveformAnalyzer` and `WaveformImageDrawer` now return `Result<[Float] | DSImage, Error>` when used with completionHandler for better error handling
215 | * `WaveformAnalyzer` is now stateless and requires the URL in `.samples(fromAudioAt:count:qos:)` instead of its constructor
216 | * SwiftUI's `WaveformView` has a new constructor that provides optional access to the underlying `WaveformShape`, which is now used for rendering, see [#78](https://github.com/dmrschmidt/DSWaveformImage/issues/78)
217 |
218 | ### In 13.0.0
219 | * Any mentions of `dampening` & similar were corrected to `damping` etc in [11460b8b](https://github.com/dmrschmidt/DSWaveformImage/commit/11460b8b8203f163868ba774d1533116d2fe68a1). Most notably in `Waveform.Configuration`. See [#64](https://github.com/dmrschmidt/DSWaveformImage/issues/64).
220 | * styles `.outlined` & `.gradientOutlined` were added to `Waveform.Style`, see https://github.com/dmrschmidt/DSWaveformImage#what-it-looks-like
221 | * `Waveform.Position` was removed. If you were using it to place the view somewhere, move this responsibility up to its parent for positioning, like with any other view as well.
222 |
223 | ### In 12.0.0
224 | * The rendering pipeline was split out from the analysis. You can now create your own renderes by subclassing [`WaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/WaveformRenderer.swift).
225 | * A new [`CircularWaveformRenderer`](https://github.com/dmrschmidt/DSWaveformImage/blob/main/Sources/DSWaveformImage/Renderers/CircularWaveformRenderer.swift) has been added.
226 | * `position` was removed from `Waveform.Configuration`, see [0447737](https://github.com/dmrschmidt/DSWaveformImage/commit/044773782092becec0424527f6feef061988db7a).
227 | * new `Waveform.Style` option have been added and need to be accounted for in `switch` statements etc.
228 |
229 | ### In 11.0.0
230 | the library was split into two: `DSWaveformImage` and `DSWaveformImageViews`. If you've used any of the native views bevore, just add the additional `import DSWaveformImageViews`.
231 | The SwiftUI views have changed from taking a Binding to the respective plain values instead.
232 |
233 | ### In 9.0.0
234 | a few public API's have been slightly changed to be more concise. All types have also been grouped under the `Waveform` enum-namespace. Meaning `WaveformConfiguration` for instance has become `Waveform.Configuration` and so on.
235 |
236 | ### In 7.0.0
237 | colors have moved into associated values on the respective `style` enum.
238 |
239 | `Waveform` and the `UIImage` category have been removed in 6.0.0 to simplify the API.
240 | See `Usage` for current usage.
241 |
242 | ## See it live in action
243 |
244 | [SoundCard - postcards with sound](https://www.soundcard.io) lets you send real, physical postcards with audio messages. Right from your iOS device.
245 |
246 | DSWaveformImage is used to draw the waveforms of the audio messages that get printed on the postcards sent by [SoundCard - postcards with audio](https://www.soundcard.io).
247 |
248 |
249 |
250 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/Renderers/CircularWaveformRenderer.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import CoreGraphics
3 |
4 | /**
5 | Draws a circular 2D amplitude envelope of the samples provided.
6 |
7 | Draws either a filled circle, or a hollow ring, depending on the provided `Kind`. Defaults to drawing a `.circle`.
8 | `Kind.ring` is currently experimental.
9 | Can be customized further via the configuration `Waveform.Style`.
10 | */
11 |
12 | public struct CircularWaveformRenderer: WaveformRenderer {
13 | public enum Kind: Sendable {
14 | /// Draws waveform as a circular amplitude envelope.
15 | case circle
16 |
17 | /// **Experimental!** (Will) draw waveform as a ring-shaped amplitude envelope.
18 | /// Associated value will define the percentage of desired "hollowness" inside, or in other words the ring's thickness / diameter in relation to the overall diameter.
19 | case ring(CGFloat)
20 | }
21 |
22 | private let kind: Kind
23 |
24 | public init(kind: Kind = .circle) {
25 | self.kind = kind
26 | }
27 |
28 | public func path(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) -> CGPath {
29 | switch kind {
30 | case .circle: return circlePath(samples: samples, with: configuration, lastOffset: lastOffset, position: position)
31 | case .ring: return ringPath(samples: samples, with: configuration, lastOffset: lastOffset, position: position)
32 | }
33 | }
34 |
35 | public func render(samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) {
36 | let path = path(samples: samples, with: configuration, lastOffset: lastOffset)
37 | context.addPath(path)
38 |
39 | style(context: context, with: configuration)
40 | }
41 |
42 | func style(context: CGContext, with configuration: Waveform.Configuration) {
43 | if case let .gradient(colors) = configuration.style {
44 | context.clip()
45 | let colors = NSArray(array: colors.map { (color: DSColor) -> CGColor in color.cgColor }) as CFArray
46 | let colorSpace = CGColorSpaceCreateDeviceRGB()
47 | let gradient = CGGradient(colorsSpace: colorSpace, colors: colors, locations: nil)!
48 | context.drawLinearGradient(gradient,
49 | start: CGPoint(x: 0, y: 0),
50 | end: CGPoint(x: 0, y: configuration.size.height),
51 | options: .drawsAfterEndLocation)
52 | } else {
53 | defaultStyle(context: context, with: configuration)
54 | }
55 | }
56 |
57 | private func circlePath(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position) -> CGPath {
58 | let graphRect = CGRect(origin: .zero, size: configuration.size)
59 | let maxRadius = CGFloat(min(graphRect.maxX, graphRect.maxY) / 2.0) * configuration.verticalScalingFactor
60 | let center = CGPoint(
61 | x: graphRect.maxX * position.offset(),
62 | y: graphRect.maxY * position.offset()
63 | )
64 | let path = CGMutablePath()
65 |
66 | path.move(to: center)
67 |
68 | for (index, sample) in samples.enumerated() {
69 | let angle = CGFloat.pi * 2 * (CGFloat(index) / CGFloat(samples.count))
70 | let x = index + lastOffset
71 |
72 | if case .striped = configuration.style, x % Int(configuration.scale) != 0 || x % stripeBucket(configuration) != 0 {
73 | // skip sub-pixels - any x value not scale aligned
74 | // skip any point that is not a multiple of our bucket width (width + spacing)
75 | path.addLine(to: center)
76 | continue
77 | }
78 |
79 | let invertedDbSample = 1 - CGFloat(sample) // sample is in dB, linearly normalized to [0, 1] (1 -> -50 dB)
80 | let pointOnCircle = CGPoint(
81 | x: center.x + maxRadius * invertedDbSample * cos(angle),
82 | y: center.y + maxRadius * invertedDbSample * sin(angle)
83 | )
84 |
85 | path.addLine(to: pointOnCircle)
86 | }
87 |
88 | path.closeSubpath()
89 | return path
90 | }
91 |
92 | private func ringPath(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position) -> CGPath {
93 | guard case let .ring(config) = kind else {
94 | fatalError("called with wrong kind")
95 | }
96 |
97 | let graphRect = CGRect(origin: .zero, size: configuration.size)
98 | let maxRadius = CGFloat(min(graphRect.maxX, graphRect.maxY) / 2.0) * configuration.verticalScalingFactor
99 | let innerRadius: CGFloat = maxRadius * config
100 | let center = CGPoint(
101 | x: graphRect.maxX * position.offset(),
102 | y: graphRect.maxY * position.offset()
103 | )
104 | let path = CGMutablePath()
105 |
106 | path.move(to: CGPoint(
107 | x: center.x + innerRadius * cos(0),
108 | y: center.y + innerRadius * sin(0)
109 | ))
110 |
111 | for (index, sample) in samples.enumerated() {
112 | let x = index + lastOffset
113 | let angle = CGFloat.pi * 2 * (CGFloat(index) / CGFloat(samples.count))
114 |
115 | if case .striped = configuration.style, x % Int(configuration.scale) != 0 || x % stripeBucket(configuration) != 0 {
116 | // skip sub-pixels - any x value not scale aligned
117 | // skip any point that is not a multiple of our bucket width (width + spacing)
118 | path.move(to: CGPoint(
119 | x: center.x + innerRadius * cos(angle),
120 | y: center.y + innerRadius * sin(angle)
121 | ))
122 | continue
123 | }
124 |
125 | let invertedDbSample = 1 - CGFloat(sample) // sample is in dB, linearly normalized to [0, 1] (1 -> -50 dB)
126 | let pointOnCircle = CGPoint(
127 | x: center.x + innerRadius * cos(angle) + (maxRadius - innerRadius) * invertedDbSample * cos(angle),
128 | y: center.y + innerRadius * sin(angle) + (maxRadius - innerRadius) * invertedDbSample * sin(angle)
129 | )
130 |
131 | path.addLine(to: pointOnCircle)
132 | }
133 |
134 | path.closeSubpath()
135 | return path
136 | }
137 |
138 | private func stripeBucket(_ configuration: Waveform.Configuration) -> Int {
139 | if case let .striped(stripeConfig) = configuration.style {
140 | return Int(stripeConfig.width + stripeConfig.spacing) * Int(configuration.scale)
141 | } else {
142 | return 0
143 | }
144 | }
145 | }
146 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/Renderers/LinearWaveformRenderer.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import CoreGraphics
3 |
4 | /**
5 | Draws a linear 2D amplitude envelope of the samples provided.
6 |
7 | Default `WaveformRenderer` used. Can be customized further via the configuration `Waveform.Style`.
8 | */
9 | public struct LinearWaveformRenderer: WaveformRenderer {
10 | public init() {}
11 |
12 | public func path(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) -> CGPath {
13 | let graphRect = CGRect(origin: CGPoint.zero, size: configuration.size)
14 | let positionAdjustedGraphCenter = position.offset() * graphRect.size.height
15 | var path = CGMutablePath()
16 |
17 | path.move(to: CGPoint(x: 0, y: positionAdjustedGraphCenter))
18 |
19 | if case .striped = configuration.style {
20 | path = draw(samples: samples, path: path, with: configuration, lastOffset: lastOffset, sides: .both, position: position)
21 | } else {
22 | path = draw(samples: samples, path: path, with: configuration, lastOffset: lastOffset, sides: .up, position: position)
23 | path = draw(samples: samples.reversed(), path: path, with: configuration, lastOffset: lastOffset, sides: .down, position: position)
24 | }
25 |
26 | path.closeSubpath()
27 | return path
28 | }
29 |
30 | public func render(samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) {
31 | context.addPath(path(samples: samples, with: configuration, lastOffset: lastOffset, position: position))
32 | defaultStyle(context: context, with: configuration)
33 | }
34 |
35 | private func stripeBucket(_ configuration: Waveform.Configuration) -> Int {
36 | if case let .striped(stripeConfig) = configuration.style {
37 | return Int(stripeConfig.width + stripeConfig.spacing) * Int(configuration.scale)
38 | } else {
39 | return 0
40 | }
41 | }
42 |
43 | enum Sides {
44 | case up, down, both
45 | }
46 |
47 | private func draw(samples: [Float], path: CGMutablePath, with configuration: Waveform.Configuration, lastOffset: Int, sides: Sides, position: Waveform.Position = .middle) -> CGMutablePath {
48 | let graphRect = CGRect(origin: CGPoint.zero, size: configuration.size)
49 | let positionAdjustedGraphCenter = position.offset() * graphRect.size.height
50 | let drawMappingFactor = graphRect.size.height * configuration.verticalScalingFactor
51 | let minimumGraphAmplitude: CGFloat = 1 / configuration.scale // we want to see at least a 1px line for silence
52 | var lastXPos: CGFloat = 0
53 |
54 | for (index, sample) in samples.enumerated() {
55 | let adjustedIndex: Int
56 | switch sides {
57 | case .up, .both: adjustedIndex = index
58 | case .down: adjustedIndex = samples.count - index
59 | }
60 |
61 | var x = adjustedIndex + lastOffset
62 | if case .striped = configuration.style, x % Int(configuration.scale) != 0 || x % stripeBucket(configuration) != 0 {
63 | // skip sub-pixels - any x value not scale aligned
64 | // skip any point that is not a multiple of our bucket width (width + spacing)
65 | continue
66 | } else if case let .striped(config) = configuration.style {
67 | // ensure 1st stripe is drawn completely inside bounds and does not clip half way on the left side
68 | x += Int(config.width / 2 * configuration.scale)
69 | }
70 |
71 | let samplesNeeded = Int(configuration.size.width * configuration.scale)
72 | let xOffset = CGFloat(samplesNeeded - samples.count) / configuration.scale // When there's extra space, draw waveform on the right
73 | let xPos = (CGFloat(x - lastOffset) / configuration.scale) + xOffset
74 | let invertedDbSample = 1 - CGFloat(sample) // sample is in dB, linearly normalized to [0, 1] (1 -> -50 dB)
75 | let drawingAmplitude = max(minimumGraphAmplitude, invertedDbSample * drawMappingFactor)
76 | let drawingAmplitudeUp = positionAdjustedGraphCenter - drawingAmplitude
77 | let drawingAmplitudeDown = positionAdjustedGraphCenter + drawingAmplitude
78 | lastXPos = xPos
79 |
80 | switch sides {
81 | case .up:
82 | path.addLine(to: CGPoint(x: xPos, y: drawingAmplitudeUp))
83 |
84 | case .down:
85 | path.addLine(to: CGPoint(x: xPos, y: drawingAmplitudeDown))
86 |
87 | case .both:
88 | path.move(to: CGPoint(x: xPos, y: drawingAmplitudeUp))
89 | path.addLine(to: CGPoint(x: xPos, y: drawingAmplitudeDown))
90 | }
91 | }
92 |
93 | if case .striped = configuration.style {
94 | path.move(to: CGPoint(x: lastXPos, y: positionAdjustedGraphCenter))
95 | }
96 |
97 | return path
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/Renderers/WaveformRenderer.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import CoreGraphics
3 |
4 | public extension WaveformRenderer {
5 | /**
6 | Default styling. Exposed publicly simply because it is re-used internally in the included renderes. May be useful for other
7 | if other renderes would like to stick to the default behavior.
8 | */
9 | func defaultStyle(context: CGContext, with configuration: Waveform.Configuration) {
10 | // draw pixel-perfect by default
11 | context.setLineWidth(1.0 / configuration.scale)
12 |
13 | switch configuration.style {
14 | case let .filled(color):
15 | context.setFillColor(color.cgColor)
16 | context.fillPath()
17 |
18 | case let .outlined(color, lineWidth):
19 | context.setStrokeColor(color.cgColor)
20 | context.setLineWidth(lineWidth)
21 | context.setLineCap(.round)
22 | context.strokePath()
23 |
24 | case let .striped(config):
25 | context.setLineWidth(config.width)
26 | context.setLineCap(config.lineCap)
27 | context.setStrokeColor(config.color.cgColor)
28 | context.strokePath()
29 |
30 | case let .gradient(colors):
31 | context.clip()
32 | let colors = NSArray(array: colors.map { (color: DSColor) -> CGColor in color.cgColor }) as CFArray
33 | let colorSpace = CGColorSpaceCreateDeviceRGB()
34 | let gradient = CGGradient(colorsSpace: colorSpace, colors: colors, locations: nil)!
35 | context.drawLinearGradient(gradient,
36 | start: CGPoint(x: 0, y: 0),
37 | end: CGPoint(x: 0, y: configuration.size.height),
38 | options: .drawsAfterEndLocation)
39 |
40 | case let .gradientOutlined(colors, lineWidth):
41 | context.setLineWidth(lineWidth)
42 | context.replacePathWithStrokedPath()
43 | context.setLineCap(.round)
44 | context.setLineJoin(.round)
45 | context.clip()
46 | let colors = NSArray(array: colors.map { (color: DSColor) -> CGColor in color.cgColor }) as CFArray
47 | let colorSpace = CGColorSpaceCreateDeviceRGB()
48 | let gradient = CGGradient(colorsSpace: colorSpace, colors: colors, locations: nil)!
49 | context.drawLinearGradient(gradient,
50 | start: CGPoint(x: 0, y: 0),
51 | end: CGPoint(x: 0, y: configuration.size.height),
52 | options: .drawsAfterEndLocation)
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/TempiFFT.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TempiFFT.swift
3 | // TempiBeatDetection
4 | //
5 | // Created by John Scalo on 1/12/16.
6 | // Copyright © 2016 John Scalo. See accompanying License.txt for terms.
7 |
8 | /* A functional FFT built atop Apple's Accelerate framework for optimum performance on any device. In addition to simply performing the FFT and providing access to the resulting data, TempiFFT provides the ability to map the FFT spectrum data into logical bands, either linear or logarithmic, for further analysis.
9 |
10 | E.g.
11 |
12 | let fft = TempiFFT(withSize: frameSize, sampleRate: 44100)
13 |
14 | // Setting a window type reduces errors
15 | fft.windowType = TempiFFTWindowType.hanning
16 |
17 | // Perform the FFT
18 | fft.fftForward(samples)
19 |
20 | // Map FFT data to logical bands. This gives 4 bands per octave across 7 octaves = 28 bands.
21 | fft.calculateLogarithmicBands(minFrequency: 100, maxFrequency: 11025, bandsPerOctave: 4)
22 |
23 | // Process some data
24 | for i in 0.. Int {
228 | return Int(Float(self.magnitudes.count) * freq / self.nyquistFrequency)
229 | }
230 |
231 | // On arrays of 1024 elements, this is ~35x faster than an iterational algorithm. Thanks Accelerate.framework!
232 | @inline(__always) private func fastAverage(_ array:[Float], _ startIdx: Int, _ stopIdx: Int) -> Float {
233 | var mean: Float = 0
234 | array.withUnsafeBufferPointer { arrayBP in
235 | vDSP_meanv(arrayBP.baseAddress! + startIdx, 1, &mean, UInt(stopIdx - startIdx))
236 | }
237 |
238 | return mean
239 | }
240 |
241 | @inline(__always) private func averageFrequencyInRange(_ startIndex: Int, _ endIndex: Int) -> Float {
242 | return (self.bandwidth * Float(startIndex) + self.bandwidth * Float(endIndex)) / 2
243 | }
244 |
245 | /// Get the magnitude for the specified frequency band.
246 | /// - Parameter inBand: The frequency band you want a magnitude for.
247 | func magnitudeAtBand(_ inBand: Int) -> Float {
248 | assert(hasPerformedFFT, "*** Perform the FFT first.")
249 | assert(bandMagnitudes != nil, "*** Call calculateLinearBands() or calculateLogarithmicBands() first")
250 |
251 | return bandMagnitudes[inBand]
252 | }
253 |
254 | /// Get the magnitude of the requested frequency in the spectrum.
255 | /// - Parameter inFrequency: The requested frequency. Must be less than the Nyquist frequency (```sampleRate/2```).
256 | /// - Returns: A magnitude.
257 | func magnitudeAtFrequency(_ inFrequency: Float) -> Float {
258 | assert(hasPerformedFFT, "*** Perform the FFT first.")
259 | let index = Int(floorf(inFrequency / self.bandwidth ))
260 | return self.magnitudes[index]
261 | }
262 |
263 | /// Get the middle frequency of the Nth band.
264 | /// - Parameter inBand: An index where 0 <= inBand < size / 2.
265 | /// - Returns: The middle frequency of the provided band.
266 | func frequencyAtBand(_ inBand: Int) -> Float {
267 | assert(hasPerformedFFT, "*** Perform the FFT first.")
268 | assert(bandMagnitudes != nil, "*** Call calculateLinearBands() or calculateLogarithmicBands() first")
269 | return self.bandFrequencies[inBand]
270 | }
271 |
272 | /// A convenience function that converts a linear magnitude (like those stored in ```magnitudes```) to db (which is log 10).
273 | class func toDB(_ inMagnitude: Float) -> Float {
274 | // ceil to 128db in order to avoid log10'ing 0
275 | let magnitude = max(inMagnitude, 0.000000000001)
276 | return 10 * log10f(magnitude)
277 | }
278 | }
279 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/WaveformAnalyzer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // see
3 | // * http://www.davidstarke.com/2015/04/waveforms.html
4 | // * http://stackoverflow.com/questions/28626914
5 | // for very good explanations of the asset reading and processing path
6 | //
7 | // FFT done using: https://github.com/jscalo/tempi-fft
8 | //
9 |
10 | import Foundation
11 | import Accelerate
12 | import AVFoundation
13 |
14 | struct WaveformAnalysis {
15 | let amplitudes: [Float]
16 | let fft: [TempiFFT]?
17 | }
18 |
19 | /// Calculates the waveform of the initialized asset URL.
20 | public struct WaveformAnalyzer: Sendable {
21 | public enum AnalyzeError: Error { case generic, userError, emptyTracks, readerError(AVAssetReader.Status) }
22 |
23 | /// Everything below this noise floor cutoff will be clipped and interpreted as silence. Default is `-50.0`.
24 | public var noiseFloorDecibelCutoff: Float = -50.0
25 |
26 | public init() {}
27 |
28 | /// Calculates the amplitude envelope of the initialized audio asset URL, downsampled to the required `count` amount of samples.
29 | /// - Parameter fromAudioAt: local filesystem URL of the audio file to process.
30 | /// - Parameter count: amount of samples to be calculated. Downsamples.
31 | /// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on.
32 | public func samples(fromAudioAt audioAssetURL: URL, count: Int, qos: DispatchQoS.QoSClass = .userInitiated) async throws -> [Float] {
33 | try await Task(priority: taskPriority(qos: qos)) {
34 | let audioAsset = AVURLAsset(url: audioAssetURL, options: [AVURLAssetPreferPreciseDurationAndTimingKey: true])
35 | let assetReader = try AVAssetReader(asset: audioAsset)
36 |
37 | guard let assetTrack = try await audioAsset.loadTracks(withMediaType: .audio).first else {
38 | throw AnalyzeError.emptyTracks
39 | }
40 |
41 | return try await waveformSamples(track: assetTrack, reader: assetReader, count: count, fftBands: nil).amplitudes
42 | }.value
43 | }
44 |
45 | /// Calculates the amplitude envelope of the initialized audio asset URL, downsampled to the required `count` amount of samples.
46 | /// - Parameter fromAudioAt: local filesystem URL of the audio file to process.
47 | /// - Parameter count: amount of samples to be calculated. Downsamples.
48 | /// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on.
49 | /// - Parameter completionHandler: called from a background thread. Returns the sampled result `[Float]` or `Error`.
50 | ///
51 | /// Calls the completionHandler on a background thread.
52 | @available(*, deprecated, renamed: "samples(fromAudioAt:count:qos:)")
53 | public func samples(fromAudioAt audioAssetURL: URL, count: Int, qos: DispatchQoS.QoSClass = .userInitiated, completionHandler: @escaping (Result<[Float], Error>) -> ()) {
54 | Task {
55 | do {
56 | let samples = try await samples(fromAudioAt: audioAssetURL, count: count, qos: qos)
57 | completionHandler(.success(samples))
58 | } catch {
59 | completionHandler(.failure(error))
60 | }
61 | }
62 | }
63 | }
64 |
65 | // MARK: - Private
66 |
67 | fileprivate extension WaveformAnalyzer {
68 | func waveformSamples(
69 | track audioAssetTrack: AVAssetTrack,
70 | reader assetReader: AVAssetReader,
71 | count requiredNumberOfSamples: Int,
72 | fftBands: Int?
73 | ) async throws -> WaveformAnalysis {
74 | guard requiredNumberOfSamples > 0 else {
75 | throw AnalyzeError.userError
76 | }
77 |
78 | let trackOutput = AVAssetReaderTrackOutput(track: audioAssetTrack, outputSettings: outputSettings())
79 | assetReader.add(trackOutput)
80 |
81 | let totalSamples = try await totalSamples(of: audioAssetTrack)
82 | let analysis = extract(totalSamples, downsampledTo: requiredNumberOfSamples, from: assetReader, fftBands: fftBands)
83 |
84 | switch assetReader.status {
85 | case .completed:
86 | return analysis
87 | default:
88 | print("ERROR: reading waveform audio data has failed \(assetReader.status)")
89 | throw AnalyzeError.readerError(assetReader.status)
90 | }
91 | }
92 |
93 | func extract(
94 | _ totalSamples: Int,
95 | downsampledTo targetSampleCount: Int,
96 | from assetReader: AVAssetReader,
97 | fftBands: Int?
98 | ) -> WaveformAnalysis {
99 | var outputSamples = [Float]()
100 | var outputFFT = fftBands == nil ? nil : [TempiFFT]()
101 | var sampleBuffer = Data()
102 | var sampleBufferFFT = Data()
103 |
104 | // read upfront to avoid frequent re-calculation (and memory bloat from C-bridging)
105 | let samplesPerPixel = max(1, totalSamples / targetSampleCount)
106 | let samplesPerFFT = 4096 // ~100ms at 44.1kHz, rounded to closest pow(2) for FFT
107 |
108 | assetReader.startReading()
109 | while assetReader.status == .reading {
110 | let trackOutput = assetReader.outputs.first!
111 |
112 | guard let nextSampleBuffer = trackOutput.copyNextSampleBuffer(),
113 | let blockBuffer = CMSampleBufferGetDataBuffer(nextSampleBuffer) else {
114 | break
115 | }
116 |
117 | var readBufferLength = 0
118 | var readBufferPointer: UnsafeMutablePointer? = nil
119 | CMBlockBufferGetDataPointer(blockBuffer, atOffset: 0, lengthAtOffsetOut: &readBufferLength, totalLengthOut: nil, dataPointerOut: &readBufferPointer)
120 | sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength))
121 | if fftBands != nil {
122 | // don't append data to this buffer unless we're going to use it.
123 | sampleBufferFFT.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength))
124 | }
125 | CMSampleBufferInvalidate(nextSampleBuffer)
126 |
127 | let processedSamples = process(sampleBuffer, from: assetReader, downsampleTo: samplesPerPixel)
128 | outputSamples += processedSamples
129 |
130 | if processedSamples.count > 0 {
131 | // vDSP_desamp uses strides of samplesPerPixel; remove only the processed ones
132 | sampleBuffer.removeFirst(processedSamples.count * samplesPerPixel * MemoryLayout.size)
133 |
134 | // this takes care of a memory leak where Memory continues to increase even though it should clear after calling .removeFirst(…) above.
135 | sampleBuffer = Data(sampleBuffer)
136 | }
137 |
138 | if let fftBands = fftBands, sampleBufferFFT.count / MemoryLayout.size >= samplesPerFFT {
139 | let processedFFTs = process(sampleBufferFFT, samplesPerFFT: samplesPerFFT, fftBands: fftBands)
140 | sampleBufferFFT.removeFirst(processedFFTs.count * samplesPerFFT * MemoryLayout.size)
141 | outputFFT? += processedFFTs
142 | }
143 | }
144 |
145 | // if we don't have enough pixels yet,
146 | // process leftover samples with padding (to reach multiple of samplesPerPixel for vDSP_desamp)
147 | if outputSamples.count < targetSampleCount {
148 | let missingSampleCount = (targetSampleCount - outputSamples.count) * samplesPerPixel
149 | let backfillPaddingSampleCount = missingSampleCount - (sampleBuffer.count / MemoryLayout.size)
150 | let backfillPaddingSampleCount16 = backfillPaddingSampleCount * MemoryLayout.size
151 | let backfillPaddingSamples = [UInt8](repeating: 0, count: backfillPaddingSampleCount16)
152 | sampleBuffer.append(backfillPaddingSamples, count: backfillPaddingSampleCount16)
153 | let processedSamples = process(sampleBuffer, from: assetReader, downsampleTo: samplesPerPixel)
154 | outputSamples += processedSamples
155 | }
156 |
157 | let targetSamples = Array(outputSamples[0.. [Float] {
162 | var downSampledData = [Float]()
163 | let sampleLength = sampleBuffer.count / MemoryLayout.size
164 |
165 | // guard for crash in very long audio files
166 | guard sampleLength / samplesPerPixel > 0 else { return downSampledData }
167 |
168 | sampleBuffer.withUnsafeBytes { (samplesRawPointer: UnsafeRawBufferPointer) in
169 | let unsafeSamplesBufferPointer = samplesRawPointer.bindMemory(to: Int16.self)
170 | let unsafeSamplesPointer = unsafeSamplesBufferPointer.baseAddress!
171 | var loudestClipValue: Float = 0.0
172 | var quietestClipValue = noiseFloorDecibelCutoff
173 | var zeroDbEquivalent: Float = Float(Int16.max) // maximum amplitude storable in Int16 = 0 Db (loudest)
174 | let samplesToProcess = vDSP_Length(sampleLength)
175 |
176 | var processingBuffer = [Float](repeating: 0.0, count: Int(samplesToProcess))
177 | vDSP_vflt16(unsafeSamplesPointer, 1, &processingBuffer, 1, samplesToProcess) // convert 16bit int to float (
178 | vDSP_vabs(processingBuffer, 1, &processingBuffer, 1, samplesToProcess) // absolute amplitude value
179 | vDSP_vdbcon(processingBuffer, 1, &zeroDbEquivalent, &processingBuffer, 1, samplesToProcess, 1) // convert to DB
180 | vDSP_vclip(processingBuffer, 1, &quietestClipValue, &loudestClipValue, &processingBuffer, 1, samplesToProcess)
181 |
182 | let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel)
183 | let downSampledLength = sampleLength / samplesPerPixel
184 | downSampledData = [Float](repeating: 0.0, count: downSampledLength)
185 |
186 | vDSP_desamp(processingBuffer,
187 | vDSP_Stride(samplesPerPixel),
188 | filter,
189 | &downSampledData,
190 | vDSP_Length(downSampledLength),
191 | vDSP_Length(samplesPerPixel))
192 | }
193 |
194 | return downSampledData
195 | }
196 |
197 | private func process(_ sampleBuffer: Data, samplesPerFFT: Int, fftBands: Int) -> [TempiFFT] {
198 | var ffts = [TempiFFT]()
199 | let sampleLength = sampleBuffer.count / MemoryLayout.size
200 | sampleBuffer.withUnsafeBytes { (samplesRawPointer: UnsafeRawBufferPointer) in
201 | let unsafeSamplesBufferPointer = samplesRawPointer.bindMemory(to: Int16.self)
202 | let unsafeSamplesPointer = unsafeSamplesBufferPointer.baseAddress!
203 | let samplesToProcess = vDSP_Length(sampleLength)
204 |
205 | var processingBuffer = [Float](repeating: 0.0, count: Int(samplesToProcess))
206 | vDSP_vflt16(unsafeSamplesPointer, 1, &processingBuffer, 1, samplesToProcess) // convert 16bit int to float
207 |
208 | repeat {
209 | let fftBuffer = processingBuffer[0..= samplesPerFFT
218 | }
219 | return ffts
220 | }
221 |
222 | func normalize(_ samples: [Float]) -> [Float] {
223 | samples.map { $0 / noiseFloorDecibelCutoff }
224 | }
225 |
226 | private func totalSamples(of audioAssetTrack: AVAssetTrack) async throws -> Int {
227 | var totalSamples = 0
228 | let (descriptions, timeRange) = try await audioAssetTrack.load(.formatDescriptions, .timeRange)
229 |
230 | descriptions.forEach { formatDescription in
231 | guard let basicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) else { return }
232 | let channelCount = Int(basicDescription.pointee.mChannelsPerFrame)
233 | let sampleRate = basicDescription.pointee.mSampleRate
234 | totalSamples = Int(sampleRate * timeRange.duration.seconds) * channelCount
235 | }
236 | return totalSamples
237 | }
238 | }
239 |
240 | // MARK: - Configuration
241 |
242 | private extension WaveformAnalyzer {
243 | func outputSettings() -> [String: Any] {
244 | return [
245 | AVFormatIDKey: kAudioFormatLinearPCM,
246 | AVLinearPCMBitDepthKey: 16,
247 | AVLinearPCMIsBigEndianKey: false,
248 | AVLinearPCMIsFloatKey: false,
249 | AVLinearPCMIsNonInterleaved: false
250 | ]
251 | }
252 |
253 | func taskPriority(qos: DispatchQoS.QoSClass) -> TaskPriority {
254 | switch qos {
255 | case .background: return .background
256 | case .utility: return .utility
257 | case .default: return .medium
258 | case .userInitiated: return .userInitiated
259 | case .userInteractive: return .high
260 | case .unspecified: return .medium
261 | @unknown default: return .medium
262 | }
263 | }
264 | }
265 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/WaveformImageDrawer+iOS.swift:
--------------------------------------------------------------------------------
1 | #if os(iOS) || swift(>=5.9) && os(visionOS)
2 | import Foundation
3 | import AVFoundation
4 | import UIKit
5 | import CoreGraphics
6 |
7 | public extension WaveformImageDrawer {
8 | /// Renders a DSImage of the provided waveform samples.
9 | ///
10 | /// Samples need to be normalized within interval `(0...1)`.
11 | func waveformImage(from samples: [Float], with configuration: Waveform.Configuration, renderer: WaveformRenderer, position: Waveform.Position = .middle) -> DSImage? {
12 | guard samples.count > 0, samples.count == Int(configuration.size.width * configuration.scale) else {
13 | print("ERROR: samples: \(samples.count) != \(configuration.size.width) * \(configuration.scale)")
14 | return nil
15 | }
16 |
17 | let format = UIGraphicsImageRendererFormat()
18 | format.scale = configuration.scale
19 | let imageRenderer = UIGraphicsImageRenderer(size: configuration.size, format: format)
20 | let dampedSamples = configuration.shouldDamp ? damp(samples, with: configuration) : samples
21 |
22 | return imageRenderer.image { renderContext in
23 | draw(on: renderContext.cgContext, from: dampedSamples, with: configuration, renderer: renderer, position: position)
24 | }
25 | }
26 | }
27 | #endif
28 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/WaveformImageDrawer+macOS.swift:
--------------------------------------------------------------------------------
1 | #if os(macOS)
2 | import Foundation
3 | import AVFoundation
4 | import AppKit
5 | import CoreGraphics
6 |
7 | public extension WaveformImageDrawer {
8 | /// Renders a DSImage of the provided waveform samples.
9 | ///
10 | /// Samples need to be normalized within interval `(0...1)`.
11 | func waveformImage(from samples: [Float], with configuration: Waveform.Configuration, renderer: WaveformRenderer, position: Waveform.Position = .middle) -> DSImage? {
12 | guard samples.count > 0, samples.count == Int(configuration.size.width * configuration.scale) else {
13 | print("ERROR: samples: \(samples.count) != \(configuration.size.width) * \(configuration.scale)")
14 | return nil
15 | }
16 |
17 | let dampedSamples = configuration.shouldDamp ? damp(samples, with: configuration) : samples
18 | return NSImage(size: configuration.size, flipped: false) { rect in
19 | guard let context = NSGraphicsContext.current?.cgContext else {
20 | fatalError("Missing context")
21 | }
22 | self.draw(on: context, from: dampedSamples, with: configuration, renderer: renderer, position: position)
23 | return true
24 | }
25 | }
26 | }
27 | #endif
28 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/WaveformImageDrawer.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import AVFoundation
3 | import CoreGraphics
4 |
5 | /// Renders a DSImage of the waveform data calculated by the analyzer.
6 | public class WaveformImageDrawer: ObservableObject {
7 | public enum GenerationError: Error { case generic }
8 |
9 | public init() {}
10 |
11 | /// only internal; determines whether to draw silence lines in live mode.
12 | public var shouldDrawSilencePadding: Bool = false
13 |
14 | /// Makes sure we always look at the same samples while animating
15 | private var lastOffset: Int = 0
16 |
17 | /// Keep track of how many samples we are adding each draw cycle
18 | private var lastSampleCount: Int = 0
19 |
20 | /// Async analyzes the provided audio and renders a DSImage of the waveform data calculated by the analyzer.
21 | /// - Parameter fromAudioAt: local filesystem URL of the audio file to process.
22 | /// - Parameter with: `Waveform.Configuration` to be used.
23 | /// - Parameter renderer: optional `WaveformRenderer` to adapt how the waveform shall be rendered.
24 | /// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on.
25 | ///
26 | /// Returns the image on a background thread.
27 | public func waveformImage(fromAudioAt audioAssetURL: URL,
28 | with configuration: Waveform.Configuration,
29 | renderer: WaveformRenderer = LinearWaveformRenderer(),
30 | position: Waveform.Position = .middle,
31 | qos: DispatchQoS.QoSClass = .userInitiated) async throws -> DSImage {
32 | try await render(fromAudioAt: audioAssetURL, with: configuration, renderer: renderer, qos: qos, position: position)
33 | }
34 |
35 | /// Async analyzes the provided audio and renders a DSImage of the waveform data calculated by the analyzer.
36 | /// - Parameter fromAudioAt: local filesystem URL of the audio file to process.
37 | /// - Parameter with: `Waveform.Configuration` to be used.
38 | /// - Parameter renderer: optional `WaveformRenderer` to adapt how the waveform shall be rendered.
39 | /// - Parameter qos: QoS of the DispatchQueue the calculations are performed (and returned) on.
40 | /// - Parameter completionHandler: called from a background thread. Returns the sampled result `DSImage` or `Error`.
41 | ///
42 | /// Calls the completionHandler on a background thread.
43 | @available(*, deprecated, renamed: "waveformImage(fromAudioAt:with:renderer:qos:)")
44 | public func waveformImage(fromAudioAt audioAssetURL: URL,
45 | with configuration: Waveform.Configuration,
46 | renderer: WaveformRenderer = LinearWaveformRenderer(),
47 | qos: DispatchQoS.QoSClass = .userInitiated,
48 | position: Waveform.Position = .middle,
49 | completionHandler: @escaping (Result) -> ()) {
50 | Task {
51 | do {
52 | let image = try await render(fromAudioAt: audioAssetURL, with: configuration, renderer: renderer, qos: qos, position: position)
53 | completionHandler(.success(image))
54 | } catch {
55 | completionHandler(.failure(error))
56 | }
57 | }
58 | }
59 | }
60 |
61 | extension WaveformImageDrawer {
62 | /// Renders the waveform from the provided samples into the provided `CGContext`.
63 | ///
64 | /// Samples need to be normalized within interval `(0...1)`.
65 | /// Ensure context size & scale match with the configuration's size & scale.
66 | public func draw(waveform samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, renderer: WaveformRenderer, position: Waveform.Position = .middle) {
67 | guard samples.count > 0 || shouldDrawSilencePadding else {
68 | return
69 | }
70 |
71 | let samplesNeeded = Int(configuration.size.width * configuration.scale)
72 |
73 | let newSampleCount: Int = lastSampleCount > samples.count
74 | ? samples.count // this implies that we have reset drawing an are starting over
75 | : samples.count - lastSampleCount
76 |
77 | lastSampleCount = samples.count
78 |
79 | // Reset the cumulative lastOffset when new drawing begins
80 | if samples.count == newSampleCount {
81 | lastOffset = 0
82 | }
83 |
84 | if case .striped = configuration.style {
85 | if shouldDrawSilencePadding {
86 | lastOffset = (lastOffset + newSampleCount) % stripeBucket(configuration)
87 | } else if samples.count >= samplesNeeded {
88 | lastOffset = (lastOffset + min(newSampleCount, samples.count - samplesNeeded)) % stripeBucket(configuration)
89 | }
90 | }
91 |
92 | // move the window, so that its always at the end (appears to move from right to left)
93 | let startSample = max(0, samples.count - samplesNeeded)
94 | let clippedSamples = Array(samples[startSample.. [Float] {
112 | guard let damping = configuration.damping, damping.percentage > 0 else {
113 | return samples
114 | }
115 |
116 | let count = Float(samples.count)
117 | return samples.enumerated().map { x, value -> Float in
118 | 1 - ((1 - value) * dampFactor(x: Float(x), count: count, with: damping))
119 | }
120 | }
121 | }
122 |
123 | // MARK: Image generation
124 |
125 | private extension WaveformImageDrawer {
126 | func render(
127 | fromAudioAt audioAssetURL: URL,
128 | with configuration: Waveform.Configuration,
129 | renderer: WaveformRenderer,
130 | qos: DispatchQoS.QoSClass,
131 | position: Waveform.Position
132 | ) async throws -> DSImage {
133 | let sampleCount = Int(configuration.size.width * configuration.scale)
134 | let waveformAnalyzer = WaveformAnalyzer()
135 | let samples = try await waveformAnalyzer.samples(fromAudioAt: audioAssetURL, count: sampleCount, qos: qos)
136 | let dampedSamples = configuration.shouldDamp ? self.damp(samples, with: configuration) : samples
137 |
138 | if let image = waveformImage(from: dampedSamples, with: configuration, renderer: renderer, position: position) {
139 | return image
140 | } else {
141 | throw GenerationError.generic
142 | }
143 | }
144 |
145 | private func drawBackground(on context: CGContext, with configuration: Waveform.Configuration) {
146 | context.setFillColor(configuration.backgroundColor.cgColor)
147 | context.fill(CGRect(origin: CGPoint.zero, size: configuration.size))
148 | }
149 | }
150 |
151 | // MARK: - Helpers
152 |
153 | private extension WaveformImageDrawer {
154 | private func stripeCount(_ configuration: Waveform.Configuration) -> Int {
155 | if case .striped = configuration.style {
156 | return Int(configuration.size.width * configuration.scale) / stripeBucket(configuration)
157 | } else {
158 | return 0
159 | }
160 | }
161 |
162 | private func stripeBucket(_ configuration: Waveform.Configuration) -> Int {
163 | if case let .striped(stripeConfig) = configuration.style {
164 | return Int(stripeConfig.width + stripeConfig.spacing) * Int(configuration.scale)
165 | } else {
166 | return 0
167 | }
168 | }
169 |
170 | private func dampFactor(x: Float, count: Float, with damping: Waveform.Damping) -> Float {
171 | if (damping.sides == .left || damping.sides == .both) && x < count * damping.percentage {
172 | // increasing linear damping within the left 8th (default)
173 | // basically (x : 1/8) with x in (0..<1/8)
174 | return damping.easing(x / (count * damping.percentage))
175 | } else if (damping.sides == .right || damping.sides == .both) && x > ((1 / damping.percentage) - 1) * (count * damping.percentage) {
176 | // decaying linear damping within the right 8th
177 | // basically also (x : 1/8), but since x in (7/8>...1) x is "inverted" as x = x - 7/8
178 | return damping.easing(1 - (x - (((1 / damping.percentage) - 1) * (count * damping.percentage))) / (count * damping.percentage))
179 | }
180 | return 1
181 | }
182 | }
183 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImage/WaveformImageTypes.swift:
--------------------------------------------------------------------------------
1 | import AVFoundation
2 |
3 | #if os(macOS)
4 | import AppKit
5 |
6 | public typealias DSColor = NSColor
7 | public typealias DSImage = NSImage
8 | public enum DSScreen {
9 | public static var scale: CGFloat { NSScreen.main?.backingScaleFactor ?? 1 }
10 | }
11 | #else
12 | import UIKit
13 |
14 | public typealias DSColor = UIColor
15 | public typealias DSImage = UIImage
16 | public enum DSScreen {
17 | public static var scale: CGFloat {
18 | #if swift(>=5.9) && os(visionOS)
19 | return (UIApplication.shared.connectedScenes.first(where: {$0 is UIWindowScene}) as? UIWindowScene)?.traitCollection.displayScale ?? 1
20 | #else
21 | return UIScreen.main.scale
22 | #endif
23 | }
24 | }
25 | #endif
26 |
27 | /**
28 | Renders the waveformsamples on the provided `CGContext`.
29 |
30 | Default implementations are `LinearWaveformRenderer` and `CircularWaveformRenderer`.
31 | Check out those if you'd like to implement your own custom renderer.
32 | */
33 | public protocol WaveformRenderer: Sendable {
34 |
35 | /**
36 | Calculates a CGPath from the waveform samples.
37 |
38 | - Parameters:
39 | - samples: `[Float]` of the amplitude envelope to be drawn, normalized to interval `(0...1)`. `0` is maximum (typically `0dB`).
40 | `1` is the noise floor, typically `-50dB`, as defined in `WaveformAnalyzer.noiseFloorDecibelCutoff`.
41 | - lastOffset: You can typtically leave this `0`. **Required for live rendering**, where it is needed to keep track of the last drawing cycle. Setting it avoids 'flickering' as samples are being added
42 | continuously and the waveform moves across the view.
43 | */
44 | func path(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position) -> CGPath
45 |
46 | /**
47 | Renders the waveform samples on the provided `CGContext`.
48 |
49 | - Parameters:
50 | - samples: `[Float]` of the amplitude envelope to be drawn, normalized to interval `(0...1)`. `0` is maximum (typically `0dB`).
51 | `1` is the noise floor, typically `-50dB`, as defined in `WaveformAnalyzer.noiseFloorDecibelCutoff`.
52 | - with configuration: The desired configuration to be used for drawing.
53 | - lastOffset: You can typtically leave this `0`. **Required for live rendering**, where it is needed to keep track of the last drawing cycle. Setting it avoids 'flickering' as samples are being added
54 | continuously and the waveform moves across the view.
55 | */
56 | func render(samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position)
57 | }
58 |
59 | public extension WaveformRenderer {
60 | func path(samples: [Float], with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) -> CGPath {
61 | path(samples: samples, with: configuration, lastOffset: lastOffset, position: position)
62 | }
63 |
64 | func render(samples: [Float], on context: CGContext, with configuration: Waveform.Configuration, lastOffset: Int, position: Waveform.Position = .middle) {
65 | render(samples: samples, on: context, with: configuration, lastOffset: lastOffset, position: position)
66 | }
67 | }
68 |
69 | public enum Waveform {
70 | /** Position of the drawn waveform. */
71 | public enum Position: Equatable {
72 | /// **top**: Draws the waveform at the top of the image, such that only the bottom 50% are visible.
73 | case top
74 |
75 | /// **middle**: Draws the waveform in the middle the image, such that the entire waveform is visible.
76 | case middle
77 |
78 | /// **bottom**: Draws the waveform at the bottom of the image, such that only the top 50% are visible.
79 | case bottom
80 |
81 | /// **custom**: Draws the waveform at the specified point of the image. Clamped within range `(0...1)`. Where `0`
82 | /// is equal to `.top`, `0.5` is equal to `.middle` and `1` is equal to `.bottom`.
83 | case custom(CGFloat)
84 |
85 | func offset() -> CGFloat {
86 | switch self {
87 | case .top: return 0.0
88 | case .middle: return 0.5
89 | case .bottom: return 1.0
90 | case let .custom(offset): return min(1, max(0, offset))
91 | }
92 | }
93 | }
94 |
95 | /**
96 | Style of the waveform which is used during drawing:
97 | - **filled**: Use solid color for the waveform.
98 | - **outlined**: Draws the envelope as an outline with the provided thickness.
99 | - **gradient**: Use gradient based on color for the waveform.
100 | - **gradientOutlined**: Use gradient based on color for the waveform. Draws the envelope as an outline with the provided thickness.
101 | - **striped**: Use striped filling based on color for the waveform.
102 | */
103 | public enum Style: Equatable, Sendable {
104 | public struct StripeConfig: Equatable, Sendable {
105 | /// Color of the waveform stripes. Default is clear.
106 | public let color: DSColor
107 |
108 | /// Width of stripes drawn. Default is `1`
109 | public let width: CGFloat
110 |
111 | /// Space between stripes. Default is `5`
112 | public let spacing: CGFloat
113 |
114 | /// Line cap style. Default is `.round`.
115 | public let lineCap: CGLineCap
116 |
117 | public init(color: DSColor, width: CGFloat = 1, spacing: CGFloat = 5, lineCap: CGLineCap = .round) {
118 | self.color = color
119 | self.width = width
120 | self.spacing = spacing
121 | self.lineCap = lineCap
122 | }
123 | }
124 |
125 | case filled(DSColor)
126 | case outlined(DSColor, CGFloat)
127 | case gradient([DSColor])
128 | case gradientOutlined([DSColor], CGFloat)
129 | case striped(StripeConfig)
130 | }
131 |
132 | /**
133 | Defines the damping attributes of the waveform.
134 | */
135 | public struct Damping: Equatable, Sendable {
136 | public enum Sides: Equatable, Sendable {
137 | case left
138 | case right
139 | case both
140 | }
141 |
142 | /// Determines the percentage of the resulting graph to be damped.
143 | ///
144 | /// Must be within `(0..<0.5)` to leave an undapmened area.
145 | /// Default is `0.125`
146 | public let percentage: Float
147 |
148 | /// Determines which sides of the graph to damp.
149 | /// Default is `.both`
150 | public let sides: Sides
151 |
152 | /// Easing function to be used. Default is `pow(x, 2)`.
153 | public let easing: @Sendable (Float) -> Float
154 |
155 | public init(percentage: Float = 0.125, sides: Sides = .both, easing: @escaping @Sendable (Float) -> Float = { x in pow(x, 2) }) {
156 | guard (0...0.5).contains(percentage) else {
157 | preconditionFailure("dampingPercentage must be within (0..<0.5)")
158 | }
159 |
160 | self.percentage = percentage
161 | self.sides = sides
162 | self.easing = easing
163 | }
164 |
165 | /// Build a new `Waveform.Damping` with only the given parameters replaced.
166 | public func with(percentage: Float? = nil, sides: Sides? = nil, easing: (@Sendable (Float) -> Float)? = nil) -> Damping {
167 | .init(percentage: percentage ?? self.percentage, sides: sides ?? self.sides, easing: easing ?? self.easing)
168 | }
169 |
170 | public static func == (lhs: Waveform.Damping, rhs: Waveform.Damping) -> Bool {
171 | // poor-man's way to make two closures Equatable w/o too much hassle
172 | let randomEqualitySample = Float.random(in: (0.. 1`: louder waveform samples will extend out of the view boundaries and clip.
203 | */
204 | public let verticalScalingFactor: CGFloat
205 |
206 | /// Waveform antialiasing. If enabled, may reduce overall opacity. Default is `false`.
207 | public let shouldAntialias: Bool
208 |
209 | public var shouldDamp: Bool {
210 | damping != nil
211 | }
212 |
213 | public init(size: CGSize = .zero,
214 | backgroundColor: DSColor = DSColor.clear,
215 | style: Style = .gradient([DSColor.black, DSColor.gray]),
216 | damping: Damping? = nil,
217 | scale: CGFloat = DSScreen.scale,
218 | verticalScalingFactor: CGFloat = 0.95,
219 | shouldAntialias: Bool = false) {
220 | guard verticalScalingFactor > 0 else {
221 | preconditionFailure("verticalScalingFactor must be greater 0")
222 | }
223 |
224 | self.backgroundColor = backgroundColor
225 | self.style = style
226 | self.damping = damping
227 | self.size = size
228 | self.scale = scale
229 | self.verticalScalingFactor = verticalScalingFactor
230 | self.shouldAntialias = shouldAntialias
231 | }
232 |
233 | /// Build a new `Waveform.Configuration` with only the given parameters replaced.
234 | public func with(size: CGSize? = nil,
235 | backgroundColor: DSColor? = nil,
236 | style: Style? = nil,
237 | damping: Damping? = nil,
238 | scale: CGFloat? = nil,
239 | verticalScalingFactor: CGFloat? = nil,
240 | shouldAntialias: Bool? = nil
241 | ) -> Configuration {
242 | Configuration(
243 | size: size ?? self.size,
244 | backgroundColor: backgroundColor ?? self.backgroundColor,
245 | style: style ?? self.style,
246 | damping: damping ?? self.damping,
247 | scale: scale ?? self.scale,
248 | verticalScalingFactor: verticalScalingFactor ?? self.verticalScalingFactor,
249 | shouldAntialias: shouldAntialias ?? self.shouldAntialias
250 | )
251 | }
252 | }
253 | }
254 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImageViews/SwiftUI/DefaultShapeStyler.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import DSWaveformImage
3 | import SwiftUI
4 |
5 | struct DefaultShapeStyler {
6 | @ViewBuilder
7 | func style(shape: WaveformShape, with configuration: Waveform.Configuration) -> some View {
8 | switch configuration.style {
9 | case let .filled(color):
10 | shape.fill(Color(color))
11 |
12 | case let .outlined(color, lineWidth):
13 | shape.stroke(
14 | Color(color),
15 | style: StrokeStyle(
16 | lineWidth: lineWidth,
17 | lineCap: .round
18 | )
19 | )
20 |
21 | case let .gradient(colors):
22 | shape
23 | .fill(LinearGradient(colors: colors.map(Color.init), startPoint: .bottom, endPoint: .top))
24 |
25 | case let .gradientOutlined(colors, lineWidth):
26 | shape.stroke(
27 | LinearGradient(colors: colors.map(Color.init), startPoint: .bottom, endPoint: .top),
28 | style: StrokeStyle(
29 | lineWidth: lineWidth,
30 | lineCap: .round
31 | )
32 | )
33 |
34 | case let .striped(config):
35 | shape.stroke(
36 | Color(config.color),
37 | style: StrokeStyle(
38 | lineWidth: config.width,
39 | lineCap: config.lineCap
40 | )
41 | )
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImageViews/SwiftUI/VersionMigrations.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | // workaround for crashes in iOS 15 when using #available in ViewBuilders
4 | // see https://developer.apple.com/forums/thread/650818
5 | // not sure if this is still relevant, but keeping it due to its obscurity when it occurs
6 | // and because I cannot verify that it does not happen anymore due to lack of devices
7 | public struct LazyContent: View {
8 | let content: () -> Content
9 |
10 | public init(@ViewBuilder content: @escaping () -> Content) {
11 | self.content = content
12 | }
13 |
14 | public var body: some View {
15 | content()
16 | }
17 | }
18 |
19 | // This is here to support visionOS / iOS 17 and remove the deprecation warning relating about the usage of
20 | // @available(visionOS, deprecated: 1.0, message: "Use `onChange` with a two or zero parameter action closure instead.")
21 | // @inlinable public func onChange(of value: V, perform action: @escaping (_ newValue: V) -> Void) -> some View where V : Equatable
22 | public struct OnChange: ViewModifier {
23 | private var value: V
24 | private var action: (_ newValue: V) -> Void
25 |
26 | public init(of value: V, action: @escaping (_ newValue: V) -> Void) {
27 | self.value = value
28 | self.action = action
29 | }
30 |
31 | public func body(content: Content) -> some View {
32 | #if swift(>=5.9)
33 | if #available(iOS 17, macOS 14.0, visionOS 1.0, *) {
34 | LazyContent {
35 | content
36 | .onChange(of: value) { _, newValue in
37 | action(newValue)
38 | }
39 | }
40 | } else {
41 | content
42 | .onChange(of: value) { newValue in
43 | action(newValue)
44 | }
45 | }
46 | #else
47 | content
48 | .onChange(of: value) { newValue in
49 | action(newValue)
50 | }
51 | #endif
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImageViews/SwiftUI/WaveformLiveCanvas.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import DSWaveformImage
3 |
4 | @available(iOS 15.0, macOS 12.0, *)
5 | public struct WaveformLiveCanvas: View {
6 | public static let defaultConfiguration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both))
7 |
8 | public let samples: [Float]
9 | public let configuration: Waveform.Configuration
10 | public let renderer: WaveformRenderer
11 | public let shouldDrawSilencePadding: Bool
12 |
13 | @StateObject private var waveformDrawer: WaveformImageDrawer
14 |
15 | public init(
16 | samples: [Float],
17 | configuration: Waveform.Configuration = defaultConfiguration,
18 | renderer: WaveformRenderer = LinearWaveformRenderer(),
19 | shouldDrawSilencePadding: Bool = false
20 | ) {
21 | let drawer = WaveformImageDrawer()
22 | self.samples = samples
23 | self.configuration = configuration
24 | self.renderer = renderer
25 | self.shouldDrawSilencePadding = shouldDrawSilencePadding
26 |
27 | drawer.shouldDrawSilencePadding = shouldDrawSilencePadding
28 | _waveformDrawer = StateObject(wrappedValue: drawer)
29 | }
30 |
31 | public var body: some View {
32 | Canvas(rendersAsynchronously: true) { context, size in
33 | context.withCGContext { cgContext in
34 | waveformDrawer.draw(waveform: samples, on: cgContext, with: configuration.with(size: size), renderer: renderer)
35 | }
36 | }
37 | .onAppear {
38 | waveformDrawer.shouldDrawSilencePadding = shouldDrawSilencePadding
39 | }
40 | .modifier(OnChange(of: shouldDrawSilencePadding, action: { newValue in
41 | waveformDrawer.shouldDrawSilencePadding = newValue
42 | }))
43 | }
44 | }
45 |
46 | #if DEBUG
47 | @available(iOS 15.0, macOS 12.0, *)
48 | struct WaveformLiveCanvas_Previews: PreviewProvider {
49 | struct TestView: View {
50 | @State var show: Bool = false
51 |
52 | var body: some View {
53 | VStack {
54 | if show {
55 | WaveformLiveCanvas(
56 | samples: [],
57 | configuration: liveConfiguration,
58 | renderer: LinearWaveformRenderer(),
59 | shouldDrawSilencePadding: show
60 | )
61 | }
62 | }.onAppear() {
63 | show = true
64 | }
65 | }
66 | }
67 |
68 | static var liveConfiguration: Waveform.Configuration = Waveform.Configuration(
69 | style: .striped(.init(color: .systemPink, width: 3, spacing: 3))
70 | )
71 |
72 | static var previews: some View {
73 | TestView()
74 | }
75 | }
76 | #endif
77 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImageViews/SwiftUI/WaveformShape.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 | import DSWaveformImage
4 |
5 | /// A waveform SwiftUI `Shape` object for generating a shape path from component(s) of the waveform.
6 | /// **Note:** The Shape does *not* style itself. Use `WaveformView` for that purpose and only use the Shape directly if needed.
7 | @available(iOS 15.0, macOS 12.0, *)
8 | public struct WaveformShape: Shape {
9 | private let samples: [Float]
10 | private let configuration: Waveform.Configuration
11 | private let renderer: WaveformRenderer
12 |
13 | public init(
14 | samples: [Float],
15 | configuration: Waveform.Configuration = Waveform.Configuration(),
16 | renderer: WaveformRenderer = LinearWaveformRenderer()
17 | ) {
18 | self.samples = samples
19 | self.configuration = configuration
20 | self.renderer = renderer
21 | }
22 |
23 | public func path(in rect: CGRect) -> Path {
24 | let size = CGSize(width: rect.maxX, height: rect.maxY)
25 | let dampedSamples = configuration.shouldDamp ? damp(samples, with: configuration) : samples
26 | let path = renderer.path(samples: dampedSamples, with: configuration.with(size: size), lastOffset: 0)
27 |
28 | return Path(path)
29 | }
30 |
31 | /// Whether the shape has no underlying samples to display.
32 | var isEmpty: Bool {
33 | samples.isEmpty
34 | }
35 | }
36 |
37 | private extension WaveformShape {
38 | private func damp(_ samples: [Float], with configuration: Waveform.Configuration) -> [Float] {
39 | guard let damping = configuration.damping, damping.percentage > 0 else {
40 | return samples
41 | }
42 |
43 | let count = Float(samples.count)
44 | return samples.enumerated().map { x, value -> Float in
45 | 1 - ((1 - value) * dampFactor(x: Float(x), count: count, with: damping))
46 | }
47 | }
48 |
49 | private func dampFactor(x: Float, count: Float, with damping: Waveform.Damping) -> Float {
50 | if (damping.sides == .left || damping.sides == .both) && x < count * damping.percentage {
51 | // increasing linear damping within the left 8th (default)
52 | // basically (x : 1/8) with x in (0..<1/8)
53 | return damping.easing(x / (count * damping.percentage))
54 | } else if (damping.sides == .right || damping.sides == .both) && x > ((1 / damping.percentage) - 1) * (count * damping.percentage) {
55 | // decaying linear damping within the right 8th
56 | // basically also (x : 1/8), but since x in (7/8>...1) x is "inverted" as x = x - 7/8
57 | return damping.easing(1 - (x - (((1 / damping.percentage) - 1) * (count * damping.percentage))) / (count * damping.percentage))
58 | }
59 | return 1
60 | }
61 | }
62 |
63 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImageViews/SwiftUI/WaveformView.swift:
--------------------------------------------------------------------------------
1 | import DSWaveformImage
2 | import SwiftUI
3 |
4 | @available(iOS 15.0, macOS 12.0, *)
5 | /// Renders and displays a waveform for the audio at `audioURL`.
6 | public struct WaveformView: View {
7 | private let audioURL: URL
8 | private let configuration: Waveform.Configuration
9 | private let renderer: WaveformRenderer
10 | private let priority: TaskPriority
11 | private let content: (WaveformShape) -> Content
12 |
13 | @State private var samples: [Float] = []
14 | @State private var rescaleTimer: Timer?
15 | @State private var currentSize: CGSize = .zero
16 |
17 | /**
18 | Creates a new WaveformView which displays a waveform for the audio at `audioURL`.
19 |
20 | - Parameters:
21 | - audioURL: The `URL` of the audio asset to be rendered.
22 | - configuration: The `Waveform.Configuration` to be used for rendering.
23 | - renderer: The `WaveformRenderer` implementation to be used. Defaults to `LinearWaveformRenderer`. Also comes with `CircularWaveformRenderer`.
24 | - priority: The `TaskPriority` used during analyzing. Defaults to `.userInitiated`.
25 | - content: ViewBuilder with the WaveformShape to be customized.
26 | */
27 | public init(
28 | audioURL: URL,
29 | configuration: Waveform.Configuration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)),
30 | renderer: WaveformRenderer = LinearWaveformRenderer(),
31 | priority: TaskPriority = .userInitiated,
32 | @ViewBuilder content: @escaping (WaveformShape) -> Content
33 | ) {
34 | self.audioURL = audioURL
35 | self.configuration = configuration
36 | self.renderer = renderer
37 | self.priority = priority
38 | self.content = content
39 | }
40 |
41 | public var body: some View {
42 | GeometryReader { geometry in
43 | content(WaveformShape(samples: samples, configuration: configuration, renderer: renderer))
44 | .onAppear {
45 | guard samples.isEmpty else { return }
46 | update(size: geometry.size, url: audioURL, configuration: configuration)
47 | }
48 | .modifier(OnChange(of: geometry.size, action: { newValue in update(size: newValue, url: audioURL, configuration: configuration, delayed: true) }))
49 | .modifier(OnChange(of: audioURL, action: { newValue in update(size: geometry.size, url: audioURL, configuration: configuration) }))
50 | .modifier(OnChange(of: configuration, action: { newValue in update(size: geometry.size, url: audioURL, configuration: newValue) }))
51 | }
52 | }
53 |
54 | private func update(size: CGSize, url: URL, configuration: Waveform.Configuration, delayed: Bool = false) {
55 | rescaleTimer?.invalidate()
56 |
57 | let updateTask: @Sendable (Timer?) -> Void = { _ in
58 | Task(priority: .userInitiated) {
59 | do {
60 | let samplesNeeded = Int(size.width * configuration.scale)
61 | let samples = try await WaveformAnalyzer().samples(fromAudioAt: url, count: samplesNeeded)
62 |
63 | await MainActor.run {
64 | self.currentSize = size
65 | self.samples = samples
66 | }
67 | } catch {
68 | assertionFailure(error.localizedDescription)
69 | }
70 | }
71 | }
72 |
73 | if delayed {
74 | rescaleTimer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: false, block: updateTask)
75 | RunLoop.main.add(rescaleTimer!, forMode: .common)
76 | } else {
77 | updateTask(nil)
78 | }
79 | }
80 | }
81 |
82 | public extension WaveformView {
83 | /**
84 | Creates a new WaveformView which displays a waveform for the audio at `audioURL`.
85 |
86 | - Parameters:
87 | - audioURL: The `URL` of the audio asset to be rendered.
88 | - configuration: The `Waveform.Configuration` to be used for rendering.
89 | - renderer: The `WaveformRenderer` implementation to be used. Defaults to `LinearWaveformRenderer`. Also comes with `CircularWaveformRenderer`.
90 | - priority: The `TaskPriority` used during analyzing. Defaults to `.userInitiated`.
91 | */
92 | init(
93 | audioURL: URL,
94 | configuration: Waveform.Configuration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)),
95 | renderer: WaveformRenderer = LinearWaveformRenderer(),
96 | priority: TaskPriority = .userInitiated
97 | ) where Content == AnyView {
98 | self.init(audioURL: audioURL, configuration: configuration, renderer: renderer, priority: priority) { shape in
99 | AnyView(DefaultShapeStyler().style(shape: shape, with: configuration))
100 | }
101 | }
102 |
103 | /**
104 | Creates a new WaveformView which displays a waveform for the audio at `audioURL`.
105 |
106 | - Parameters:
107 | - audioURL: The `URL` of the audio asset to be rendered.
108 | - configuration: The `Waveform.Configuration` to be used for rendering.
109 | - renderer: The `WaveformRenderer` implementation to be used. Defaults to `LinearWaveformRenderer`. Also comes with `CircularWaveformRenderer`.
110 | - priority: The `TaskPriority` used during analyzing. Defaults to `.userInitiated`.
111 | - placeholder: ViewBuilder for a placeholder view during the loading phase.
112 | */
113 | init(
114 | audioURL: URL,
115 | configuration: Waveform.Configuration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)),
116 | renderer: WaveformRenderer = LinearWaveformRenderer(),
117 | priority: TaskPriority = .userInitiated,
118 | @ViewBuilder placeholder: @escaping () -> Placeholder
119 | ) where Content == _ConditionalContent {
120 | self.init(audioURL: audioURL, configuration: configuration, renderer: renderer, priority: priority) { shape in
121 | if shape.isEmpty {
122 | placeholder()
123 | } else {
124 | AnyView(DefaultShapeStyler().style(shape: shape, with: configuration))
125 | }
126 | }
127 | }
128 |
129 | /**
130 | Creates a new WaveformView which displays a waveform for the audio at `audioURL`.
131 |
132 | - Parameters:
133 | - audioURL: The `URL` of the audio asset to be rendered.
134 | - configuration: The `Waveform.Configuration` to be used for rendering.
135 | - renderer: The `WaveformRenderer` implementation to be used. Defaults to `LinearWaveformRenderer`. Also comes with `CircularWaveformRenderer`.
136 | - priority: The `TaskPriority` used during analyzing. Defaults to `.userInitiated`.
137 | - content: ViewBuilder with the WaveformShape to be customized.
138 | - placeholder: ViewBuilder for a placeholder view during the loading phase.
139 | */
140 | init(
141 | audioURL: URL,
142 | configuration: Waveform.Configuration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both)),
143 | renderer: WaveformRenderer = LinearWaveformRenderer(),
144 | priority: TaskPriority = .userInitiated,
145 | @ViewBuilder content: @escaping (WaveformShape) -> ModifiedContent,
146 | @ViewBuilder placeholder: @escaping () -> Placeholder
147 | ) where Content == _ConditionalContent {
148 | self.init(audioURL: audioURL, configuration: configuration, renderer: renderer, priority: priority) { shape in
149 | if shape.isEmpty {
150 | placeholder()
151 | } else {
152 | content(shape)
153 | }
154 | }
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImageViews/UIKit/WaveformImageView.swift:
--------------------------------------------------------------------------------
1 | #if os(iOS) || swift(>=5.9) && os(visionOS)
2 | import DSWaveformImage
3 | import Foundation
4 | import AVFoundation
5 | import UIKit
6 |
7 | public class WaveformImageView: UIImageView {
8 | private let waveformImageDrawer: WaveformImageDrawer
9 |
10 | public var configuration: Waveform.Configuration {
11 | didSet { updateWaveform() }
12 | }
13 |
14 | public var waveformAudioURL: URL? {
15 | didSet { updateWaveform() }
16 | }
17 |
18 | override public init(frame: CGRect) {
19 | configuration = Waveform.Configuration(size: frame.size)
20 | waveformImageDrawer = WaveformImageDrawer()
21 | super.init(frame: frame)
22 | }
23 |
24 | required public init?(coder aDecoder: NSCoder) {
25 | configuration = Waveform.Configuration()
26 | waveformImageDrawer = WaveformImageDrawer()
27 | super.init(coder: aDecoder)
28 | }
29 |
30 | override public func layoutSubviews() {
31 | super.layoutSubviews()
32 | updateWaveform()
33 | }
34 |
35 | /// Clears the audio data, emptying the waveform view.
36 | public func reset() {
37 | waveformAudioURL = nil
38 | image = nil
39 | }
40 | }
41 |
42 | private extension WaveformImageView {
43 | func updateWaveform() {
44 | guard let audioURL = waveformAudioURL else { return }
45 |
46 | Task {
47 | do {
48 | let image = try await waveformImageDrawer.waveformImage(
49 | fromAudioAt: audioURL,
50 | with: configuration.with(size: bounds.size),
51 | qos: .userInteractive
52 | )
53 |
54 | await MainActor.run {
55 | self.image = image
56 | }
57 | } catch {
58 | print("Error occurred during waveform image creation:")
59 | print(error)
60 | }
61 | }
62 | }
63 | }
64 | #endif
65 |
--------------------------------------------------------------------------------
/Sources/DSWaveformImageViews/UIKit/WaveformLiveView.swift:
--------------------------------------------------------------------------------
1 | #if os(iOS) || swift(>=5.9) && os(visionOS)
2 | import DSWaveformImage
3 | import Foundation
4 | import UIKit
5 |
6 | /// Renders a live waveform everytime its `(0...1)`-normalized samples are changed.
7 | public class WaveformLiveView: UIView {
8 |
9 | /// Default configuration with damping enabled.
10 | public static let defaultConfiguration = Waveform.Configuration(damping: .init(percentage: 0.125, sides: .both))
11 |
12 | /// If set to `true`, a zero line, indicating silence, is being drawn while the received
13 | /// samples are not filling up the entire view's width yet.
14 | public var shouldDrawSilencePadding: Bool = false {
15 | didSet {
16 | sampleLayer.shouldDrawSilencePadding = shouldDrawSilencePadding
17 | }
18 | }
19 |
20 | public var configuration: Waveform.Configuration {
21 | didSet {
22 | sampleLayer.configuration = configuration
23 | }
24 | }
25 |
26 | /// Returns the currently used samples.
27 | public var samples: [Float] {
28 | sampleLayer.samples
29 | }
30 |
31 | private var sampleLayer: WaveformLiveLayer! {
32 | return layer as? WaveformLiveLayer
33 | }
34 |
35 | override public class var layerClass: AnyClass {
36 | return WaveformLiveLayer.self
37 | }
38 |
39 | public var renderer: WaveformRenderer {
40 | didSet {
41 | sampleLayer.renderer = renderer
42 | }
43 | }
44 |
45 | public init(configuration: Waveform.Configuration = defaultConfiguration, renderer: WaveformRenderer = LinearWaveformRenderer()) {
46 | self.configuration = configuration
47 | self.renderer = renderer
48 | super.init(frame: .zero)
49 | self.contentMode = .redraw
50 |
51 | defer { // will call didSet to propagate to sampleLayer
52 | self.configuration = configuration
53 | self.renderer = renderer
54 | }
55 | }
56 |
57 | public override init(frame: CGRect) {
58 | self.configuration = Self.defaultConfiguration
59 | self.renderer = LinearWaveformRenderer()
60 | super.init(frame: frame)
61 | contentMode = .redraw
62 |
63 | defer { // will call didSet to propagate to sampleLayer
64 | self.configuration = Self.defaultConfiguration
65 | self.renderer = LinearWaveformRenderer()
66 | }
67 | }
68 |
69 | required init?(coder: NSCoder) {
70 | self.configuration = Self.defaultConfiguration
71 | self.renderer = LinearWaveformRenderer()
72 | super.init(coder: coder)
73 | contentMode = .redraw
74 |
75 | defer { // will call didSet to propagate to sampleLayer
76 | self.configuration = Self.defaultConfiguration
77 | self.renderer = LinearWaveformRenderer()
78 | }
79 | }
80 |
81 | /// The sample to be added. Re-draws the waveform with the pre-existing samples and the new one.
82 | /// Value must be within `(0...1)` to make sense (0 being loweset and 1 being maximum amplitude).
83 | public func add(sample: Float) {
84 | sampleLayer.add([sample])
85 | }
86 |
87 | /// The samples to be added. Re-draws the waveform with the pre-existing samples and the new ones.
88 | /// Values must be within `(0...1)` to make sense (0 being loweset and 1 being maximum amplitude).
89 | public func add(samples: [Float]) {
90 | sampleLayer.add(samples)
91 | }
92 |
93 | /// Clears the samples, emptying the waveform view.
94 | public func reset() {
95 | sampleLayer.reset()
96 | }
97 | }
98 |
99 | class WaveformLiveLayer: CALayer {
100 | @NSManaged var samples: [Float]
101 |
102 | var configuration = WaveformLiveView.defaultConfiguration {
103 | didSet { contentsScale = configuration.scale }
104 | }
105 |
106 | var shouldDrawSilencePadding: Bool = false {
107 | didSet {
108 | waveformDrawer.shouldDrawSilencePadding = shouldDrawSilencePadding
109 | setNeedsDisplay()
110 | }
111 | }
112 |
113 | var renderer: WaveformRenderer = LinearWaveformRenderer() {
114 | didSet { setNeedsDisplay() }
115 | }
116 |
117 | private let waveformDrawer = WaveformImageDrawer()
118 |
119 | override class func needsDisplay(forKey key: String) -> Bool {
120 | if key == #keyPath(samples) {
121 | return true
122 | }
123 | return super.needsDisplay(forKey: key)
124 | }
125 |
126 | override func draw(in context: CGContext) {
127 | super.draw(in: context)
128 |
129 | UIGraphicsPushContext(context)
130 | waveformDrawer.draw(waveform: samples, on: context, with: configuration.with(size: bounds.size), renderer: renderer)
131 | UIGraphicsPopContext()
132 | }
133 |
134 | func add(_ newSamples: [Float]) {
135 | samples += newSamples
136 | }
137 |
138 | func reset() {
139 | samples = []
140 | }
141 | }
142 | #endif
143 |
--------------------------------------------------------------------------------