├── .gitignore
├── Images
├── Header.webp
└── MainWindow@2x.png
├── LICENSE
├── README.md
├── pre-commit
├── rm2000.xcodeproj
├── project.pbxproj
└── xcshareddata
│ └── xcschemes
│ └── RM2000 Tape Recorder.xcscheme
├── rm2000
├── AppKitWindowManagerDelegate.swift
├── AppState.swift
├── Assets.xcassets
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ ├── AppIcon.appiconset
│ │ ├── 128x128.png
│ │ ├── 128x128@2x.png
│ │ ├── 16x16.png
│ │ ├── 16x16@2x.png
│ │ ├── 256x256.png
│ │ ├── 256x256@2x.png
│ │ ├── 32x32.png
│ │ ├── 32x32@2x.png
│ │ ├── 512x512.png
│ │ ├── 512x512@2x.png
│ │ └── Contents.json
│ ├── BodyBackgroundTemp.imageset
│ │ ├── Background_1x.png
│ │ ├── Background_2x.png
│ │ └── Contents.json
│ ├── Contents.json
│ ├── FolderButton.imageset
│ │ ├── Contents.json
│ │ ├── Folder_1x.png
│ │ └── Folder_2x.png
│ ├── LCDOuterGlow.imageset
│ │ ├── Contents.json
│ │ ├── LCD Outer Glow_1x 1.png
│ │ └── LCD Outer Glow_1x.png
│ ├── LCDScreenFrameInactive.imageset
│ │ ├── Contents.json
│ │ ├── LCDScreenFrameInactive_1x.png
│ │ └── LCDScreenFrameInactive_2x.png
│ ├── LCDScreenFrameRecording.imageset
│ │ ├── Contents.json
│ │ ├── LCDScreenFrameRecording_1x.png
│ │ └── LCDScreenFrameRecording_2x.png
│ ├── LCDTextColor.colorset
│ │ └── Contents.json
│ ├── MicGrilleDark.imageset
│ │ ├── @1xMicrophone Dark.png
│ │ ├── @2xMicrophone Dark.png
│ │ └── Contents.json
│ ├── MicGrilleTemp.imageset
│ │ ├── Contents.json
│ │ ├── Microphone1x.png
│ │ └── Microphone2x.png
│ ├── RecordButtonActiveTemp.imageset
│ │ ├── Contents.json
│ │ ├── RecordButtonActive_1x.png
│ │ └── RecordButtonActive_2x.png
│ ├── RecordButtonGlow.imageset
│ │ ├── Contents.json
│ │ ├── Record Button Glow_1x.png
│ │ └── Record Button Glow_2x.png
│ ├── RecordButtonIndent.imageset
│ │ ├── Contents.json
│ │ ├── RecordButtonIndent_1x.png
│ │ └── RecordButtonIndent_2x.png
│ ├── RecordButtonTemp.imageset
│ │ ├── Contents.json
│ │ ├── RecordButton_1x.png
│ │ └── RecordButton_2x.png
│ ├── RecordingTapeBlackFlipped.symbolset
│ │ ├── Contents.json
│ │ └── RecordingTapeBlackFlippedLg.symbols.svg
│ ├── SettingsButton.imageset
│ │ ├── Contents.json
│ │ ├── Settings_1x.png
│ │ └── Settings_2x.png
│ ├── SourceButton.imageset
│ │ ├── Contents.json
│ │ ├── Source_1x.png
│ │ └── Source_2x.png
│ ├── unlinked.symbolset
│ │ ├── Contents.json
│ │ └── untagged.svg
│ └── untagged.symbolset
│ │ ├── Contents.json
│ │ └── tagslash.svg
├── Credits.rtfd
│ ├── TXT.rtf
│ └── spec_smalles1tblhe.png
├── Fonts
│ ├── TASAExplorer-SemiBold.otf
│ └── Tachyo.otf
├── Info.plist
├── Logging.swift
├── Models
│ ├── AudioFormat.swift
│ ├── RecordingStateEnum.swift
│ ├── Sample.swift
│ ├── SampleEditConfiguration.swift
│ ├── SampleEditOperation.swift
│ ├── SampleLibraryViewModel.swift
│ ├── SampleMetadata.swift
│ ├── SampleStorage.swift
│ └── TemporaryActiveRecording.swift
├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
├── RM2000 Tape Recorder.swift
├── Shared.swift
├── Tape Mechanism
│ ├── AudioManager.swift
│ ├── Backends
│ │ ├── CoreAudioTaps
│ │ │ └── CoreAudio.swift
│ │ ├── Protocols.swift
│ │ └── ScreenCaptureKit
│ │ │ └── SCStreamManager.swift
│ ├── Encoder
│ │ └── Encoder.swift
│ ├── Extensions.swift
│ ├── TapeMechanism.swift
│ └── digest.txt
├── TapeRecorderState.swift
├── Views
│ ├── HUD Window
│ │ └── HUDWindowView.swift
│ ├── Main Window
│ │ ├── Buttons.swift
│ │ ├── ContentView.swift
│ │ ├── Glyphs
│ │ │ ├── DonutSpinner.swift
│ │ │ ├── ErrorGlyph.swift
│ │ │ ├── RecordingGlyph.swift
│ │ │ ├── SourceGlyph.swift
│ │ │ └── VUMeterView.swift
│ │ ├── LCDScreenView.swift
│ │ ├── Shaders.metal
│ │ └── TitleBar.swift
│ ├── Menu Bar
│ │ └── MenuBar.swift
│ ├── Onboarding
│ │ └── OnboardingView.swift
│ ├── Sample Editing
│ │ ├── EditSampleView.swift
│ │ ├── PreviewFilenameView.swift
│ │ ├── TokenField
│ │ │ └── TokenFieldView.swift
│ │ └── TrimmingPlayerView.swift
│ ├── Sample Library
│ │ ├── Audio Player
│ │ │ └── SampleLibraryAutoPlayer.swift
│ │ ├── DetailView.swift
│ │ ├── InspectorView.swift
│ │ ├── SampleLibraryView.swift
│ │ ├── SidebarView.swift
│ │ ├── TagComponent.swift
│ │ ├── Toolbar Buttons
│ │ │ └── ToolbarButtons.swift
│ │ └── WaveformView.swift
│ ├── Settings
│ │ ├── GeneralTabView.swift
│ │ ├── RecordingTabView.swift
│ │ └── SettingsView.swift
│ └── UserNotifications.swift
└── rm2000.entitlements
├── rm2000Tests
└── rm2000Tests.swift
└── rm2000UITests
├── rm2000UITests.swift
└── rm2000UITestsLaunchTests.swift
/.gitignore:
--------------------------------------------------------------------------------
1 | # Mac OS X
2 | *.DS_Store
3 |
4 | # Xcode
5 | *.pbxuser
6 | *.mode1v3
7 | *.mode2v3
8 | *.perspectivev3
9 | *.xcuserstate
10 | project.xcworkspace/
11 | xcuserdata/
12 |
13 | # Generated files
14 | *.[oa]
15 | *.pyc
16 | *.6
17 | 6.out
18 |
19 | #Python modules
20 | MANIFEST
21 | dist/
22 | build/
23 |
24 | # Backup files
25 | *~.nib
26 |
27 | # Team stuff
28 | Submariner/DEVELOPMENT_TEAM.xcconfig
29 |
--------------------------------------------------------------------------------
/Images/Header.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/Images/Header.webp
--------------------------------------------------------------------------------
/Images/MainWindow@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/Images/MainWindow@2x.png
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | “Commons Clause” License Condition v1.0
2 |
3 | The Software is provided to you by the Licensor under the License, as defined below, subject to the following condition.
4 |
5 | Without limiting other conditions in the License, the grant of rights under the License will not include, and the License does not grant to you, the right to Sell the Software.
6 |
7 | For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you under the License to provide to third parties, for a fee or other consideration (including without limitation fees for hosting or consulting/ support services related to the Software), a product or service whose value derives, entirely or substantially, from the functionality of the Software. Any license notice or attribution required by the License must also include this Commons Clause License Condition notice.
8 |
9 | Software: RM2000 Tape Recorder
10 |
11 | License: MIT
12 |
13 | Licensor: Marcelo Mendez
14 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
effortless audio recording and organizing
4 |
rm2000.app
5 |
6 |
7 |
8 |
9 | RM2000 Tape Recorder is a lightweight audio sampling tool for macOS. quickly record audio samples from any application, organize them with tags, and sort through them through your daw, the finder, or RM2000 itself
10 |
11 | > [!NOTE]
12 | >
13 | > RM2000 is currently under heavy development - however, a TestFlight version to test for bugs and crashes is publicly [available here](https://rm2000.app)
14 |
15 | ## power of persuasion
16 |
17 | RM2000 features a beautiful skeuomorphic interface to mimic the look and feel of a bygone era of Mac Apps
18 |
19 | my goal is make the app stand out from the current flat designs that apple has pushed and advocated for since the release of macOS yosemite.
20 |
21 |
22 |
23 |
24 | originally codenamed replica.app
. this app was heavily inspired by opn's work
25 |
26 |
27 |
28 | # building
29 |
30 | RM2000 Portable requires Xcode and macOS 13 or newer.
31 |
32 | It is recommended to have a Development Signing Certificate active on Xcode so that the Screen Recording permission dialog doesn't show up after every single build. A **Development Signing Certificate** is not the same as an **Apple Developer ID** and is completely free to make.
33 |
34 | 1. open the settings for `RM2000.xcodeproj`
35 |
36 | 2. go to the `Signing and Capabilities` tab
37 |
38 | 3. selecting the `rm2000` target
39 |
40 | 4. create a new Development team and set Signing Certificate as "Development"
41 |
42 |
--------------------------------------------------------------------------------
/pre-commit:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | MATCHES=$(git grep -n -E "(DevelopmentTeam|DEVELOPMENT_TEAM) =" .)
4 | COUNT=$(echo -n "$MATCHES\c" | grep -cvE '(Shared.xcconfig|README.md|= "")')
5 |
6 | if [ $COUNT -ne 0 ]; then
7 | ERRORS=$(echo -n "$MATCHES\c" | grep -vE '= ""')
8 | echo $COUNT
9 | echo "Remove Development Team specifications from project files:"
10 | echo "$ERRORS";
11 | exit 1;
12 | fi
13 |
--------------------------------------------------------------------------------
/rm2000.xcodeproj/xcshareddata/xcschemes/RM2000 Tape Recorder.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
31 |
32 |
35 |
41 |
42 |
43 |
46 |
52 |
53 |
54 |
55 |
56 |
66 |
68 |
74 |
75 |
76 |
77 |
83 |
85 |
91 |
92 |
93 |
94 |
96 |
97 |
100 |
101 |
102 |
--------------------------------------------------------------------------------
/rm2000/AppKitWindowManagerDelegate.swift:
--------------------------------------------------------------------------------
1 | import AppKit
2 | import SwiftUI
3 |
4 | class WindowController: NSWindowController {
5 | override func windowDidLoad() {
6 | super.windowDidLoad()
7 | }
8 | }
9 |
10 | class AppKitWindowManagerDelegate: NSObject, NSApplicationDelegate, NSWindowDelegate {
11 |
12 | @Published var willTerminate = false
13 | var mainWindowController: WindowController?
14 | let recordingState = TapeRecorderState.shared
15 | private var onboardingWindowController: NSWindowController?
16 | private var hudHostingView: NSHostingView?
17 |
18 | private var hudWindow: NSWindow?
19 |
20 | func applicationDidFinishLaunching(_ notification: Notification) {
21 | registerCustomFonts()
22 | if AppState.shared.hasCompletedOnboarding {
23 | showMainWindow()
24 | } else {
25 | showOnboardingWindow()
26 | }
27 | }
28 |
29 | func showMainWindow() {
30 |
31 | // if window is already created, just show it, dont make another window
32 | if let windowController = mainWindowController,
33 | let window = windowController.window {
34 | // If window is visible, just bring it to front
35 | if window.isVisible {
36 | window.makeKeyAndOrderFront(nil)
37 | return
38 | }
39 | // If window exists but isn't visible, it might be minimized - show it
40 | window.makeKeyAndOrderFront(nil)
41 | return
42 | }
43 |
44 | // else, create the window
45 | let window = SkeuromorphicWindow(
46 | contentRect: NSRect(x: 100, y: 100, width: 600, height: 400),
47 | styleMask: [.titled, .closable, .miniaturizable],
48 | backing: .buffered,
49 | defer: false
50 | )
51 |
52 | let contentView = ContentView()
53 | .environmentObject(self.recordingState)
54 | .openSettingsAccess()
55 |
56 | window.center()
57 | window.contentView = NSHostingView(rootView: contentView)
58 | window.delegate = self // track window closure
59 |
60 | window.isReleasedWhenClosed = false
61 |
62 | mainWindowController = WindowController(window: window)
63 | mainWindowController?.showWindow(nil)
64 | }
65 |
66 | func showHUDWindow() {
67 | closeHUDWindow()
68 |
69 | // wait a bit for window destruction
70 | DispatchQueue.main.async { [weak self] in
71 | guard let self = self else { return }
72 |
73 | let window = FloatingWindow(
74 | contentRect: NSRect(x: 0, y: 0, width: 400, height: 250),
75 | backing: .buffered,
76 | defer: false
77 | )
78 |
79 | window.isReleasedWhenClosed = false // Keep window alive
80 |
81 | let contentView = FloatingGradientView()
82 | .environmentObject(self.recordingState)
83 |
84 | let hostingView = NSHostingView(rootView: AnyView(contentView))
85 | self.hudHostingView = hostingView
86 |
87 | if let windowContentView = window.contentView {
88 | hostingView.autoresizingMask = [.width, .height]
89 | hostingView.frame = windowContentView.bounds
90 | windowContentView.addSubview(hostingView)
91 | }
92 |
93 | if let screenSize = NSScreen.main?.visibleFrame.size {
94 | window.setFrameOrigin(NSPoint(x: screenSize.width - 415, y: screenSize.height / 15))
95 | }
96 |
97 | window.makeKeyAndOrderFront(nil)
98 | self.hudWindow = window
99 | }
100 | }
101 |
102 | func closeHUDWindow() {
103 | guard let windowToClose = hudWindow else { return }
104 | hudHostingView?.removeFromSuperview()
105 | windowToClose.orderOut(nil)
106 | // clear references
107 | hudHostingView = nil
108 | hudWindow = nil
109 |
110 | // idk how to clean this up properly :V
111 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
112 | NSApp.windows.forEach { window in
113 | if window === windowToClose {
114 | window.close()
115 | }
116 | }
117 | }
118 | }
119 |
120 | @MainActor private func showOnboardingWindow() {
121 | let hostingController = NSHostingController(
122 | rootView: OnboardingView(viewModel: OnboardingViewModel())
123 | .environmentObject(AppState.shared)
124 | )
125 |
126 | let window = NSWindow(
127 | contentRect: NSRect(x: 0, y: 0, width: 600, height: 600),
128 | styleMask: [.titled, .closable],
129 | backing: .buffered,
130 | defer: false
131 | )
132 | window.contentViewController = hostingController
133 | onboardingWindowController = NSWindowController(window: window)
134 | onboardingWindowController?.showWindow(nil)
135 | window.center()
136 | }
137 |
138 | func applicationShouldTerminate(_ sender: NSApplication) -> NSApplication.TerminateReply {
139 | self.willTerminate = true
140 | self.promptQuitConfirmation()
141 | return .terminateLater
142 | }
143 |
144 | /// dont close (user canceled)
145 | func `continue`() {
146 | NSApplication.shared.reply(toApplicationShouldTerminate: false)
147 | }
148 | /// close
149 | func close() {
150 | NSApplication.shared.reply(toApplicationShouldTerminate: true)
151 | }
152 |
153 | func promptQuitConfirmation() {
154 | let alert = NSAlert()
155 | alert.messageText = "Really Quit?"
156 | alert.informativeText = "You will not be able to start Quick Recordings (⌘ + ⌥ + G) when the application is not running."
157 | alert.alertStyle = .critical
158 | alert.addButton(withTitle: "Yes, Quit")
159 | alert.addButton(withTitle: "No, Cancel")
160 |
161 | DispatchQueue.main.async {
162 | let response = alert.runModal()
163 | if response == .alertFirstButtonReturn {
164 | // "Quit" pressed
165 | self.close()
166 | } else {
167 | // "Cancel" pressed
168 | self.continue()
169 | }
170 | }
171 | }
172 |
173 | /*
174 | A function like this should never exist.
175 | However, even after I followed all of the tutorials,
176 | Xcode simply wouldn't bundle my otf fonts.
177 | */
178 | private func registerCustomFonts() {
179 | let fonts = Bundle.main.urls(forResourcesWithExtension: "otf", subdirectory: nil)
180 | fonts?.forEach { url in
181 | CTFontManagerRegisterFontsForURL(url as CFURL, .process, nil)
182 | }
183 | }
184 | }
185 |
186 | extension AppKitWindowManagerDelegate {
187 | @objc func windowWillClose(_ notification: Notification) {
188 | if let window = notification.object as? NSWindow,
189 | window === mainWindowController?.window {
190 | mainWindowController = nil
191 | }
192 | }
193 | }
194 |
--------------------------------------------------------------------------------
/rm2000/AppState.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 | import OSLog
4 | import KeyboardShortcuts
5 |
6 | @MainActor final class AppState: ObservableObject {
7 | static let shared = AppState()
8 | private var appDelegate = AppKitWindowManagerDelegate()
9 |
10 | @AppStorage("completedOnboarding") var hasCompletedOnboarding: Bool = false {
11 | didSet {
12 | if !hasCompletedOnboarding {
13 | openOnboardingWindow()
14 | }
15 | }
16 | }
17 |
18 | @AppStorage("sample_directory") var sampleDirectoryPath: String = ""
19 | @AppStorage("sample_directory_bookmark") private var sampleDirectoryBookmark: Data?
20 | @Published var sampleDirectory: URL? {
21 | didSet {
22 |
23 | oldValue?.stopAccessingSecurityScopedResource()
24 |
25 | if let directory = sampleDirectory {
26 | // setup security scoped bookmarks
27 |
28 | guard directory.startAccessingSecurityScopedResource() else {
29 | return
30 | }
31 | sampleDirectoryPath = sampleDirectory?.path ?? ""
32 | saveBookmarkData(for: directory)
33 | } else {
34 | sampleDirectoryPath = ""
35 | sampleDirectoryBookmark = nil
36 | }
37 | }
38 | }
39 |
40 | private var openWindowAction: OpenWindowAction?
41 |
42 | init() {
43 | KeyboardShortcuts.onKeyUp(for: .recordGlobalShortcut) { [self] in
44 | Task {
45 | await startQuickSampleRecordAndShowHUD()
46 | }
47 | }
48 |
49 | if let bookmarkData = sampleDirectoryBookmark {
50 | restoreBookmarkAccess(with: bookmarkData)
51 | }
52 | Logger.appState.info("\(String(describing: self.sampleDirectory)) as the user directory")
53 | }
54 |
55 | func setOpenWindowAction(_ action: OpenWindowAction) {
56 | self.openWindowAction = action
57 | if !hasCompletedOnboarding {
58 | openOnboardingWindow()
59 | }
60 | }
61 |
62 | func openOnboardingWindow() {
63 | openWindowAction?(id: "onboarding")
64 | }
65 |
66 | func closeHUDWindow() {
67 | appDelegate.closeHUDWindow()
68 | }
69 |
70 | private func startQuickSampleRecordAndShowHUD() async {
71 | if (TapeRecorderState.shared.status == .idle) {
72 | TapeRecorderState.shared.startRecording()
73 | appDelegate.showHUDWindow()
74 | } else {
75 | TapeRecorderState.shared.stopRecording()
76 | appDelegate.closeHUDWindow()
77 | await displayTestingGlobalNotication()
78 | }
79 | }
80 |
81 | // security scoped bookmarks for app sandbox
82 | private func saveBookmarkData(for userDir: URL) {
83 | do {
84 | let bookmarkData = try userDir.bookmarkData(options: .withSecurityScope, includingResourceValuesForKeys: nil, relativeTo: nil)
85 | sampleDirectoryBookmark = bookmarkData
86 | } catch {
87 | Logger().error("Failed to save bookmark data for \(userDir): \(error)")
88 | }
89 | }
90 |
91 | private func restoreBookmarkAccess(with bookmarks: Data) {
92 | do {
93 | var isStale = false
94 | let resolvedURL = try URL(resolvingBookmarkData: bookmarks, options: .withSecurityScope, relativeTo: nil, bookmarkDataIsStale: &isStale)
95 | if isStale {
96 | Logger.appState.info("Recreating bookmark (is stale)")
97 | }
98 |
99 | guard resolvedURL.startAccessingSecurityScopedResource() else {
100 | Logger.appState.error("AppState - failed to start access security scoped for directory \(resolvedURL)")
101 | return
102 | }
103 | Logger.appState.info("Set bookmarked access as \(String(describing: self.sampleDirectory)) : \(resolvedURL)")
104 | sampleDirectory = resolvedURL
105 | } catch {
106 | Logger.appState.error("Failed to restore bookmark access: \(error.localizedDescription)")
107 | }
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/128x128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/128x128.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/128x128@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/128x128@2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/16x16.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/16x16@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/16x16@2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/256x256.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/256x256.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/256x256@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/256x256@2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/32x32.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/32x32@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/32x32@2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/512x512.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/512x512@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/AppIcon.appiconset/512x512@2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "16x16.png",
5 | "idiom" : "mac",
6 | "scale" : "1x",
7 | "size" : "16x16"
8 | },
9 | {
10 | "filename" : "16x16@2x.png",
11 | "idiom" : "mac",
12 | "scale" : "2x",
13 | "size" : "16x16"
14 | },
15 | {
16 | "filename" : "32x32.png",
17 | "idiom" : "mac",
18 | "scale" : "1x",
19 | "size" : "32x32"
20 | },
21 | {
22 | "filename" : "32x32@2x.png",
23 | "idiom" : "mac",
24 | "scale" : "2x",
25 | "size" : "32x32"
26 | },
27 | {
28 | "filename" : "128x128.png",
29 | "idiom" : "mac",
30 | "scale" : "1x",
31 | "size" : "128x128"
32 | },
33 | {
34 | "filename" : "128x128@2x.png",
35 | "idiom" : "mac",
36 | "scale" : "2x",
37 | "size" : "128x128"
38 | },
39 | {
40 | "filename" : "256x256.png",
41 | "idiom" : "mac",
42 | "scale" : "1x",
43 | "size" : "256x256"
44 | },
45 | {
46 | "filename" : "256x256@2x.png",
47 | "idiom" : "mac",
48 | "scale" : "2x",
49 | "size" : "256x256"
50 | },
51 | {
52 | "filename" : "512x512.png",
53 | "idiom" : "mac",
54 | "scale" : "1x",
55 | "size" : "512x512"
56 | },
57 | {
58 | "filename" : "512x512@2x.png",
59 | "idiom" : "mac",
60 | "scale" : "2x",
61 | "size" : "512x512"
62 | }
63 | ],
64 | "info" : {
65 | "author" : "xcode",
66 | "version" : 1
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/BodyBackgroundTemp.imageset/Background_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/BodyBackgroundTemp.imageset/Background_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/BodyBackgroundTemp.imageset/Background_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/BodyBackgroundTemp.imageset/Background_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/BodyBackgroundTemp.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "Background_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "Background_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/FolderButton.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "Folder_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "Folder_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/FolderButton.imageset/Folder_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/FolderButton.imageset/Folder_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/FolderButton.imageset/Folder_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/FolderButton.imageset/Folder_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDOuterGlow.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "LCD Outer Glow_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "LCD Outer Glow_1x 1.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDOuterGlow.imageset/LCD Outer Glow_1x 1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/LCDOuterGlow.imageset/LCD Outer Glow_1x 1.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDOuterGlow.imageset/LCD Outer Glow_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/LCDOuterGlow.imageset/LCD Outer Glow_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDScreenFrameInactive.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "LCDScreenFrameInactive_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "LCDScreenFrameInactive_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDScreenFrameInactive.imageset/LCDScreenFrameInactive_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/LCDScreenFrameInactive.imageset/LCDScreenFrameInactive_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDScreenFrameInactive.imageset/LCDScreenFrameInactive_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/LCDScreenFrameInactive.imageset/LCDScreenFrameInactive_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDScreenFrameRecording.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "LCDScreenFrameRecording_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "LCDScreenFrameRecording_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDScreenFrameRecording.imageset/LCDScreenFrameRecording_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/LCDScreenFrameRecording.imageset/LCDScreenFrameRecording_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDScreenFrameRecording.imageset/LCDScreenFrameRecording_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/LCDScreenFrameRecording.imageset/LCDScreenFrameRecording_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/LCDTextColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "color" : {
5 | "color-space" : "extended-srgb",
6 | "components" : {
7 | "alpha" : "1.000",
8 | "blue" : "-0.001",
9 | "green" : "0.013",
10 | "red" : "0.135"
11 | }
12 | },
13 | "idiom" : "universal"
14 | },
15 | {
16 | "appearances" : [
17 | {
18 | "appearance" : "luminosity",
19 | "value" : "dark"
20 | }
21 | ],
22 | "color" : {
23 | "color-space" : "extended-srgb",
24 | "components" : {
25 | "alpha" : "1.000",
26 | "blue" : "-0.001",
27 | "green" : "0.013",
28 | "red" : "0.135"
29 | }
30 | },
31 | "idiom" : "universal"
32 | }
33 | ],
34 | "info" : {
35 | "author" : "xcode",
36 | "version" : 1
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/MicGrilleDark.imageset/@1xMicrophone Dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/MicGrilleDark.imageset/@1xMicrophone Dark.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/MicGrilleDark.imageset/@2xMicrophone Dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/MicGrilleDark.imageset/@2xMicrophone Dark.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/MicGrilleDark.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "@1xMicrophone Dark.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "@2xMicrophone Dark.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/MicGrilleTemp.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "Microphone1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "Microphone2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/MicGrilleTemp.imageset/Microphone1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/MicGrilleTemp.imageset/Microphone1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/MicGrilleTemp.imageset/Microphone2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/MicGrilleTemp.imageset/Microphone2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonActiveTemp.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "RecordButtonActive_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "RecordButtonActive_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonActiveTemp.imageset/RecordButtonActive_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/RecordButtonActiveTemp.imageset/RecordButtonActive_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonActiveTemp.imageset/RecordButtonActive_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/RecordButtonActiveTemp.imageset/RecordButtonActive_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonGlow.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "Record Button Glow_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "Record Button Glow_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonGlow.imageset/Record Button Glow_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/RecordButtonGlow.imageset/Record Button Glow_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonGlow.imageset/Record Button Glow_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/RecordButtonGlow.imageset/Record Button Glow_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonIndent.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "RecordButtonIndent_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "RecordButtonIndent_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonIndent.imageset/RecordButtonIndent_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/RecordButtonIndent.imageset/RecordButtonIndent_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonIndent.imageset/RecordButtonIndent_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/RecordButtonIndent.imageset/RecordButtonIndent_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonTemp.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "RecordButton_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "RecordButton_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | },
22 | "properties" : {
23 | "template-rendering-intent" : "original"
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonTemp.imageset/RecordButton_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/RecordButtonTemp.imageset/RecordButton_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordButtonTemp.imageset/RecordButton_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/RecordButtonTemp.imageset/RecordButton_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordingTapeBlackFlipped.symbolset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | },
6 | "symbols" : [
7 | {
8 | "filename" : "RecordingTapeBlackFlippedLg.symbols.svg",
9 | "idiom" : "universal"
10 | }
11 | ]
12 | }
13 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/RecordingTapeBlackFlipped.symbolset/RecordingTapeBlackFlippedLg.symbols.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Small
7 | Medium
8 | Large
9 |
10 |
11 | Ultralight
12 | Regular
13 | Black
14 | Template v.3.0
15 |
16 | https://wangchujiang.com/#/app
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/SettingsButton.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "Settings_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "Settings_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/SettingsButton.imageset/Settings_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/SettingsButton.imageset/Settings_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/SettingsButton.imageset/Settings_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/SettingsButton.imageset/Settings_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/SourceButton.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "Source_1x.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "Source_2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/SourceButton.imageset/Source_1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/SourceButton.imageset/Source_1x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/SourceButton.imageset/Source_2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Assets.xcassets/SourceButton.imageset/Source_2x.png
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/unlinked.symbolset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | },
6 | "symbols" : [
7 | {
8 | "filename" : "untagged.svg",
9 | "idiom" : "universal"
10 | }
11 | ]
12 | }
13 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/unlinked.symbolset/untagged.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
6 |
7 |
8 |
22 |
23 |
24 |
25 | Weight/Scale Variations
26 | Ultralight
27 | Thin
28 | Light
29 | Regular
30 | Medium
31 | Semibold
32 | Bold
33 | Heavy
34 | Black
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | Design Variations
46 | Symbols are supported in up to nine weights and three scales.
47 | For optimal layout with text and other symbols, vertically align
48 | symbols with the adjacent text.
49 |
50 |
51 |
52 |
53 |
54 | Margins
55 | Leading and trailing margins on the left and right side of each symbol
56 | can be adjusted by modifying the x-location of the margin guidelines.
57 | Modifications are automatically applied proportionally to all
58 | scales and weights.
59 |
60 |
61 |
62 | Exporting
63 | Symbols should be outlined when exporting to ensure the
64 | design is preserved when submitting to Xcode.
65 | Template v.5.0
66 | Requires Xcode 15 or greater
67 | Generated from
68 | Typeset at 100.0 points
69 | Small
70 | Medium
71 | Large
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
--------------------------------------------------------------------------------
/rm2000/Assets.xcassets/untagged.symbolset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | },
6 | "symbols" : [
7 | {
8 | "filename" : "tagslash.svg",
9 | "idiom" : "universal"
10 | }
11 | ]
12 | }
13 |
--------------------------------------------------------------------------------
/rm2000/Credits.rtfd/TXT.rtf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Credits.rtfd/TXT.rtf
--------------------------------------------------------------------------------
/rm2000/Credits.rtfd/spec_smalles1tblhe.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Credits.rtfd/spec_smalles1tblhe.png
--------------------------------------------------------------------------------
/rm2000/Fonts/TASAExplorer-SemiBold.otf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Fonts/TASAExplorer-SemiBold.otf
--------------------------------------------------------------------------------
/rm2000/Fonts/Tachyo.otf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/marceloexc/RM2000TapeRecorder/df910cc157da29a6dbd8a8ae0681def3b7e9314a/rm2000/Fonts/Tachyo.otf
--------------------------------------------------------------------------------
/rm2000/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | UIAppFonts
6 |
7 | Tachyo.otf
8 | TASAExplorer-SemiBold.otf
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/rm2000/Logging.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import OSLog
3 |
4 | extension Logger {
5 | private static var subsystem = Bundle.main.bundleIdentifier!
6 |
7 | // logger object for taperecorder
8 | static let streamProcess = Logger(subsystem: subsystem, category: "taperecorder")
9 |
10 | static let sharedStreamState = Logger(subsystem: subsystem, category: "sharedstreamstate")
11 |
12 | static let viewModels = Logger(subsystem: subsystem, category: "viewmodels")
13 |
14 | static let appState = Logger(subsystem: subsystem, category: "appState")
15 | }
16 |
--------------------------------------------------------------------------------
/rm2000/Models/AudioFormat.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioFormat.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 4/23/25.
6 | //
7 |
8 |
9 | enum AudioFormat: String, CaseIterable {
10 | case aac, mp3, flac, wav
11 |
12 | var asString: String {
13 | switch self {
14 | case .aac: return "aac"
15 | case .mp3: return "mp3"
16 | case .flac: return "flac"
17 | case .wav: return "wav"
18 | }
19 | }
20 |
21 | static func isSupported(extension ext: String) -> Bool {
22 | allCases.contains { $0.rawValue.lowercased() == ext.lowercased() }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/rm2000/Models/RecordingStateEnum.swift:
--------------------------------------------------------------------------------
1 |
2 | enum RecordingState {
3 | case idle, recording, busy, error
4 | }
5 |
--------------------------------------------------------------------------------
/rm2000/Models/Sample.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct Sample: Identifiable, Hashable {
4 | var id: UUID
5 | let fileURL: URL
6 | var filename: String?
7 | private var _metadata: SampleMetadata
8 |
9 | var metadata: SampleMetadata {
10 | get { return _metadata }
11 | set { _metadata = newValue }
12 | }
13 |
14 | var title: String {
15 | get { return metadata.title }
16 | set { metadata.title = newValue }
17 | }
18 |
19 | var tags: Set {
20 | get { return metadata.tags }
21 | set { metadata.tags = newValue }
22 | }
23 |
24 | // Initialize from an existing recording
25 | init(from newRecording: TemporaryActiveRecording) {
26 | self.id = newRecording.id
27 | self.fileURL = newRecording.fileURL
28 | self.filename = fileURL.lastPathComponent
29 | self._metadata = SampleMetadata()
30 | }
31 |
32 | init?(fileURL: URL) {
33 |
34 | guard AudioFormat.isSupported(extension: fileURL.pathExtension) else {
35 | return nil
36 | }
37 |
38 | self.id = UUID()
39 | self.fileURL = fileURL
40 | self.filename = fileURL.lastPathComponent
41 | self._metadata = SampleMetadata(fileURL: fileURL)
42 | }
43 |
44 | init(fileURL: URL, metadata: SampleMetadata) {
45 | self.fileURL = fileURL
46 | self._metadata = metadata
47 | self.id = UUID()
48 | }
49 |
50 | private static func passesRegex(_ pathName: String) -> Bool {
51 | (try? regString.wholeMatch(in: pathName)) != nil
52 | }
53 |
54 | func hash(into hasher: inout Hasher) {
55 | hasher.combine(id)
56 | }
57 |
58 | static func == (lhs: Sample, rhs: Sample) -> Bool {
59 | return lhs.id == rhs.id
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/rm2000/Models/SampleEditConfiguration.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import CoreMedia
3 |
4 | struct SampleEditConfiguration {
5 | var deleteAfterComplete: Bool = false // this is true for TemporaryActiveRecording recordings
6 |
7 | var desiredAudioFormat: AudioFormat = .wav
8 |
9 | var forwardEndTime: CMTime? = nil
10 |
11 | var reverseEndTime: CMTime? = nil
12 |
13 | var directoryDestination: SampleDirectory? = nil
14 |
15 | init() { }
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/rm2000/Models/SampleEditOperation.swift:
--------------------------------------------------------------------------------
1 | enum SampleEditOperation {
2 | case metadataOnly
3 | case reEncode
4 | case createFromNewRecording
5 | }
6 |
--------------------------------------------------------------------------------
/rm2000/Models/SampleLibraryViewModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import CoreTransferable
3 | import Combine
4 |
5 | @MainActor
6 | class SampleLibraryViewModel: ObservableObject {
7 | @Published var listOfAllSamples: [Sample] = []
8 | @Published var indexedTags: [String] = []
9 | @Published var finishedProcessing: Bool = false
10 | @Published var sidebarSelection: SidebarSelection?
11 | @Published var detailSelection: SampleListItemModel.ID?
12 | @Published var showInspector: Bool = false
13 | @Published var slAudioPlayer = SLAudioPlayer()
14 |
15 | private var sampleStorage: SampleStorage
16 | private var cancellables = Set()
17 |
18 | var selectedSample: Sample? {
19 | return matchToSample(id: detailSelection)
20 | }
21 |
22 | init(sampleStorage: SampleStorage = SampleStorage.shared) {
23 | self.sampleStorage = sampleStorage
24 |
25 | sampleStorage.UserDirectory.$samplesInStorage
26 | .receive(on: DispatchQueue.main)
27 | .sink { [weak self] newFiles in
28 | self?.listOfAllSamples = newFiles
29 | self?.finishedProcessing = true
30 | }
31 | .store(in: &cancellables)
32 |
33 | sampleStorage.UserDirectory.$indexedTags
34 | .receive(on: DispatchQueue.main)
35 | .sink { [weak self] newTags in
36 | self?.indexedTags = Array(newTags).sorted()
37 | }
38 | .store(in: &cancellables)
39 |
40 | // Watch for changes in selection and update audio player
41 | $detailSelection
42 | .receive(on: DispatchQueue.main)
43 | .sink { [weak self] newSelection in
44 | guard let self = self else { return }
45 | if let sample = self.matchToSample(id: newSelection) {
46 | self.slAudioPlayer.loadAudio(from: sample.fileURL)
47 | if (self.slAudioPlayer.isAutoplay) {
48 | self.slAudioPlayer.play()
49 | }
50 | }
51 | }
52 | .store(in: &cancellables)
53 |
54 | // update music player slider as song plays
55 | slAudioPlayer.objectWillChange
56 | .receive(on: DispatchQueue.main)
57 | .sink { [weak self] _ in
58 | self?.objectWillChange.send()
59 | }
60 | .store(in: &cancellables)
61 |
62 | }
63 |
64 | private func matchToSample(id: UUID?) -> Sample? {
65 | // match uuid from detailSelection to its according sample object
66 | guard let id = id else { return nil }
67 | return listOfAllSamples.first { $0.id == id }
68 | }
69 | }
70 |
71 | struct SampleListItemModel: Identifiable, Hashable {
72 | var id: UUID
73 | var text: String
74 | var file: FileRepresentable
75 |
76 | init(file: FileRepresentable) {
77 | if let sample = file as? Sample {
78 | self.id = sample.id
79 | self.text = sample.title
80 | } else {
81 | self.id = UUID()
82 | self.text = file.fileURL.lastPathComponent
83 | }
84 | self.file = file
85 | }
86 |
87 |
88 | func hash(into hasher: inout Hasher) {
89 | hasher.combine(id)
90 | }
91 |
92 | static func == (lhs: SampleListItemModel, rhs: SampleListItemModel) -> Bool {
93 | return lhs.id == rhs.id
94 | }
95 | }
96 |
97 | // necessary extension for draggable objects in sample library window
98 | extension SampleListItemModel: Transferable {
99 | static var transferRepresentation: some TransferRepresentation {
100 | FileRepresentation(exportedContentType: .audio) { fileRepresentable in
101 | // when dragging from app to finder
102 | SentTransferredFile(fileRepresentable.file.fileURL)
103 | }
104 | // without this, finder wont recognize our dropped item
105 | ProxyRepresentation { fileRepresentable in fileRepresentable.file.fileURL}
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/rm2000/Models/SampleMetadata.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | let regString = /(.+)--(.+)\.(.+)/
4 |
5 | struct SampleMetadata {
6 | var title: String = ""
7 | var tags: Set = []
8 | var description: String? = ""
9 | var fileFormat: AudioFormat = .wav
10 | var group: URL?
11 |
12 | init() {
13 |
14 | }
15 |
16 | init(fileURL: URL) {
17 | if let match = try? regString.firstMatch(in: fileURL.lastPathComponent) {
18 | // if passes regex, assume it is tagged
19 | self.title = String(match.1)
20 | self.tags = Set(String(match.2).components(separatedBy: "_"))
21 | } else {
22 | // else, just use the filename as the title
23 | self.title = fileURL.deletingPathExtension().lastPathComponent
24 | }
25 | }
26 |
27 | var tagsAsString: String {
28 | get { tags.sorted().joined(separator: ",") }
29 | set {
30 | tags = Set(newValue
31 | .components(separatedBy: ",")
32 | .map { $0.trimmingCharacters(in: .whitespacesAndNewlines) }
33 | .filter { !$0.isEmpty })
34 | }
35 | }
36 |
37 | func finalFilename(fileExtension: String) -> String {
38 | // Construct the filename in the format "title--tag1_tag2_tag3.aac"
39 | if tags.isEmpty {
40 | // tags are empty, use omit them
41 | return "\(title).\(fileExtension)"
42 | } else {
43 | let formattedTags = tags.joined(separator: "_")
44 | return "\(title)--\(formattedTags).\(fileExtension)"
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/rm2000/Models/SampleStorage.swift:
--------------------------------------------------------------------------------
1 | import Combine
2 | import UniformTypeIdentifiers
3 | import Foundation
4 | import OSLog
5 | import SwiftUICore
6 | import SwiftDirectoryWatcher
7 |
8 | @MainActor
9 | final class SampleStorage: ObservableObject {
10 |
11 | let appState = AppState.shared
12 | static let shared = SampleStorage()
13 |
14 | @Published var UserDirectory: SampleDirectory
15 | @Published var ArchiveDirectory: SampleDirectory
16 |
17 | init() {
18 | self.UserDirectory = SampleDirectory(
19 | directory: appState.sampleDirectory ?? WorkingDirectory.applicationSupportPath())
20 | self.ArchiveDirectory = SampleDirectory(
21 | directory: WorkingDirectory.applicationSupportPath())
22 | }
23 | }
24 |
25 | class SampleDirectory: ObservableObject, DirectoryWatcherDelegate {
26 |
27 |
28 | @Published var samplesInStorage: [Sample] = []
29 | // todo - refactor indexedTags to automatically be called
30 | // when [files] changes in size
31 | @Published var indexedTags: Set = []
32 | var directory: URL
33 | private var processedFilePaths: Set = []
34 |
35 | private var watcher: DirectoryWatcher?
36 |
37 | let fileManager = FileManager.default
38 |
39 |
40 | init(directory: URL) {
41 | self.directory = directory
42 | startInitialFileScan()
43 | setupDirectoryWatching()
44 | }
45 |
46 | private func startInitialFileScan() {
47 | do {
48 | let directoryContents = try FileManager.default.contentsOfDirectory(
49 | at: self.directory, includingPropertiesForKeys: nil)
50 |
51 | for fileURL in directoryContents {
52 | // Only add files we haven't processed yet
53 | let filePath = fileURL.path
54 | if !processedFilePaths.contains(filePath) {
55 | if let SampleFile = Sample(fileURL: fileURL) {
56 | samplesInStorage.append(SampleFile)
57 | indexedTags.formUnion(SampleFile.tags)
58 | processedFilePaths.insert(filePath)
59 | }
60 | }
61 | }
62 | Logger.appState.info("Added \(directoryContents.count) files to \(self.directory.description)")
63 |
64 | } catch {
65 | Logger().error("Error initial listing of directory contents: \(error.localizedDescription)")
66 | }
67 | }
68 |
69 | // having a lot of fun with arg labels today :)
70 | func applySampleEdits(to sample: FileRepresentable, for metadata: SampleMetadata, with configuration: SampleEditConfiguration) {
71 |
72 | var needsEncoding: Bool = false
73 |
74 | if (sample is TemporaryActiveRecording) {
75 | needsEncoding = true
76 | }
77 |
78 | Task {
79 | do {
80 | let encoder = Encoder(fileURL: sample.fileURL)
81 | let audioFormat = TapeRecorderState.shared.sampleRecordAudioFormat
82 | let filename = sample.id.uuidString + "." + audioFormat.asString
83 | let tempFilePath = WorkingDirectory.applicationSupportPath().appendingPathComponent(filename)
84 |
85 | let encodingConfig = EncodingConfig(outputFormat: TapeRecorderState.shared.sampleRecordAudioFormat, outputURL: tempFilePath, forwardStartTime: configuration.forwardEndTime, backwardsEndTime: configuration.reverseEndTime)
86 |
87 | try await encoder.encode(with: encodingConfig)
88 |
89 | let finalFilename = metadata.finalFilename(fileExtension: audioFormat.asString)
90 |
91 | try fileManager.moveItem(
92 | at: tempFilePath,
93 | to: self.directory.appendingPathComponent(finalFilename)
94 | )
95 |
96 | indexedTags.formUnion(metadata.tags)
97 | }
98 | }
99 | }
100 |
101 | private func setupDirectoryWatching() {
102 | let watcher = DirectoryWatcher(url: directory)
103 | watcher.delegate = self
104 | watcher.start()
105 | self.watcher = watcher
106 | Logger().info("DirectoryWatcher initialized at \(self.directory.path)")
107 | }
108 |
109 | func directoryWatcher(_ watcher: DirectoryWatcher, changed: DirectoryChangeSet) {
110 | DispatchQueue.main.async {
111 | for url in changed.newFiles {
112 | Logger().debug("New file added in sample directory....: \(url)")
113 | let path = url.path
114 | if !self.processedFilePaths.contains(path),
115 | let sample = Sample(fileURL: url) {
116 | self.samplesInStorage.append(sample)
117 | self.indexedTags.formUnion(sample.tags)
118 | self.processedFilePaths.insert(path)
119 | Logger().debug("\(url.lastPathComponent) fits sample criteria!")
120 | }
121 | }
122 |
123 | for url in changed.deletedFiles {
124 | let path = url.path
125 | if self.processedFilePaths.contains(path) {
126 | self.samplesInStorage.removeAll { $0.fileURL.path == path }
127 | self.processedFilePaths.remove(path)
128 | Logger().debug("File deleted: \(url.lastPathComponent)")
129 | }
130 | }
131 | }
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/rm2000/Models/TemporaryActiveRecording.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import OSLog
3 |
4 | struct TemporaryActiveRecording {
5 | var id: UUID
6 | var fileURL: URL
7 |
8 | // TODO - hardcoded file extension string
9 | init() {
10 |
11 | // ensure directory exists
12 | // TODO - terrible - maybe belongs in SampleStorage instead?
13 | // (why are we still using workingdirectory? that thing needs to die...
14 | if !(WorkingDirectory.applicationSupportPath().isDirectory) {
15 |
16 | let directory = WorkingDirectory.applicationSupportPath()
17 |
18 | try? FileManager.default.createDirectory(at: directory, withIntermediateDirectories: true)
19 | Logger().info("Had to make a directory for the application support path at: \(directory)")
20 | }
21 | self.id = UUID()
22 | self.fileURL = WorkingDirectory.applicationSupportPath()
23 | .appendingPathComponent("\(id.uuidString).caf")
24 | }
25 |
26 | init(fileURL: URL) {
27 | self.fileURL = fileURL
28 | self.id = UUID()
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/rm2000/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/rm2000/RM2000 Tape Recorder.swift:
--------------------------------------------------------------------------------
1 | import SettingsAccess
2 | import SwiftUI
3 |
4 | @main
5 | struct RM2000TapeRecorderApp: App {
6 | @StateObject var appState = AppState.shared
7 | @StateObject var sampleStorage = SampleStorage.shared
8 | @StateObject private var recordingState = TapeRecorderState.shared
9 | @NSApplicationDelegateAdaptor(AppKitWindowManagerDelegate.self) var appDelegate
10 |
11 | var body: some Scene {
12 | MenuBarExtra {
13 | MenuBarView()
14 | .environmentObject(appDelegate.recordingState)
15 | .environmentObject(sampleStorage)
16 | } label: {
17 | Image("RecordingTapeBlackFlipped")
18 | }
19 | .menuBarExtraStyle(.window)
20 |
21 | Window("Recordings", id: "recordings-window") {
22 | SampleLibraryView()
23 | .environmentObject(sampleStorage)
24 | }
25 | WindowGroup("Welcome", id: "onboarding") {
26 | OnboardingView(viewModel: OnboardingViewModel())
27 | .environmentObject(appState)
28 | }
29 | .windowResizability(.contentSize)
30 | .windowStyle(.hiddenTitleBar)
31 |
32 | Settings {
33 | SettingsView()
34 | .environmentObject(appState)
35 | .environmentObject(recordingState)
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/rm2000/Shared.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUICore
3 | import Combine
4 | import AVKit
5 | import KeyboardShortcuts
6 |
7 | struct WorkingDirectory {
8 | static let appIdentifier = "com.marceloexc.rm2000"
9 |
10 | static func applicationSupportPath() -> URL {
11 | let documentURL = FileManager.default.urls(
12 | for: .applicationSupportDirectory, in: .userDomainMask
13 | ).first!
14 |
15 | let path = documentURL.appendingPathComponent(appIdentifier)
16 |
17 | return path
18 | }
19 | }
20 |
21 | extension KeyboardShortcuts.Name {
22 | static let recordGlobalShortcut = Self("recordGlobalShortcut", default: .init(.g, modifiers: [.command, .option]))
23 | }
24 |
25 | extension URL {
26 | var isDirectory: Bool {
27 | (try? resourceValues(forKeys: [.isDirectoryKey]))?.isDirectory == true
28 | }
29 |
30 | // https://stackoverflow.com/a/56044623
31 | var fileSize: Int? {
32 | let value = try? resourceValues(forKeys: [.fileSizeKey])
33 | return value?.fileSize
34 | }
35 | }
36 |
37 | func timeString(_ time: TimeInterval) -> String {
38 | let minutes = Int(time) / 60
39 | let seconds = Int(time) % 60
40 | return String(format: "%02d:%02d", minutes, seconds)
41 | }
42 |
43 | // https://stackoverflow.com/a/56894458
44 | extension Color {
45 | init(hex: UInt, alpha: Double = 1) {
46 | self.init(
47 | .sRGB,
48 | red: Double((hex >> 16) & 0xff) / 255,
49 | green: Double((hex >> 08) & 0xff) / 255,
50 | blue: Double((hex >> 00) & 0xff) / 255,
51 | opacity: alpha
52 | )
53 | }
54 | }
55 |
56 | protocol FileRepresentable {
57 | var fileURL: URL { get }
58 | var id: UUID { get }
59 | }
60 |
61 | extension TemporaryActiveRecording: FileRepresentable { }
62 | extension Sample: FileRepresentable { }
63 |
64 | // i borrowed a lot of this from https://github.com/sindresorhus/Gifski/blob/main/Gifski/Utilities.swift
65 | extension NSView {
66 | /**
67 | Get a subview matching a condition.
68 | */
69 | func firstSubview(deep: Bool = false, where matches: (NSView) -> Bool) -> NSView? {
70 | for subview in subviews {
71 | if matches(subview) {
72 | return subview
73 | }
74 |
75 | if deep, let match = subview.firstSubview(deep: deep, where: matches) {
76 | return match
77 | }
78 | }
79 |
80 | return nil
81 | }
82 | }
83 |
84 | extension NSObjectProtocol where Self: NSObject {
85 | func updates(
86 | for keyPath: KeyPath,
87 | options: NSKeyValueObservingOptions = [.initial, .new]
88 | ) -> AsyncStream {
89 | publisher(for: keyPath, options: options).toAsyncStream
90 | }
91 | }
92 |
93 | extension Publisher where Failure == Never {
94 | var toAsyncStream: AsyncStream {
95 | AsyncStream(Output.self) { continuation in
96 | let cancellable = sink { completion in
97 | switch completion {
98 | case .finished:
99 | continuation.finish()
100 | }
101 | } receiveValue: { output in
102 | continuation.yield(output)
103 | }
104 |
105 | continuation.onTermination = { [cancellable] _ in
106 | cancellable.cancel()
107 | }
108 | }
109 | }
110 | }
111 |
112 | extension NSObject {
113 | // Note: It's intentionally a getter to get the dynamic self.
114 | /**
115 | Returns the class name without module name.
116 | */
117 | static var simpleClassName: String { String(describing: self) }
118 |
119 | /**
120 | Returns the class name of the instance without module name.
121 | */
122 | var simpleClassName: String { Self.simpleClassName }
123 | }
124 |
125 | extension NSLayoutConstraint {
126 | /**
127 | Returns copy of the constraint with changed properties provided as arguments.
128 | */
129 | func changing(
130 | firstItem: Any? = nil,
131 | firstAttribute: Attribute? = nil,
132 | relation: Relation? = nil,
133 | secondItem: NSView? = nil,
134 | secondAttribute: Attribute? = nil,
135 | multiplier: Double? = nil,
136 | constant: Double? = nil
137 | ) -> Self {
138 | .init(
139 | item: firstItem ?? self.firstItem as Any,
140 | attribute: firstAttribute ?? self.firstAttribute,
141 | relatedBy: relation ?? self.relation,
142 | toItem: secondItem ?? self.secondItem,
143 | attribute: secondAttribute ?? self.secondAttribute,
144 | // The compiler fails to auto-convert to CGFloat here.
145 | multiplier: multiplier.flatMap(CGFloat.init) ?? self.multiplier,
146 | constant: constant.flatMap(CGFloat.init) ?? self.constant
147 | )
148 | }
149 | }
150 |
151 | // https://stackoverflow.com/questions/38343186/write-extend-file-attributes-swift-example/38343753#38343753
152 | extension URL {
153 |
154 | /// Get extended attribute.
155 | func extendedAttribute(forName name: String) throws -> Data {
156 |
157 | let data = try self.withUnsafeFileSystemRepresentation { fileSystemPath -> Data in
158 |
159 | // Determine attribute size:
160 | let length = getxattr(fileSystemPath, name, nil, 0, 0, 0)
161 | guard length >= 0 else { throw URL.posixError(errno) }
162 |
163 | // Create buffer with required size:
164 | var data = Data(count: length)
165 |
166 | // Retrieve attribute:
167 | let result = data.withUnsafeMutableBytes { [count = data.count] in
168 | getxattr(fileSystemPath, name, $0.baseAddress, count, 0, 0)
169 | }
170 | guard result >= 0 else { throw URL.posixError(errno) }
171 | return data
172 | }
173 | return data
174 | }
175 |
176 | /// Set extended attribute.
177 | func setExtendedAttribute(data: Data, forName name: String) throws {
178 |
179 | try self.withUnsafeFileSystemRepresentation { fileSystemPath in
180 | let result = data.withUnsafeBytes {
181 | setxattr(fileSystemPath, name, $0.baseAddress, data.count, 0, 0)
182 | }
183 | guard result >= 0 else { throw URL.posixError(errno) }
184 | }
185 | }
186 |
187 | /// Remove extended attribute.
188 | func removeExtendedAttribute(forName name: String) throws {
189 |
190 | try self.withUnsafeFileSystemRepresentation { fileSystemPath in
191 | let result = removexattr(fileSystemPath, name, 0)
192 | guard result >= 0 else { throw URL.posixError(errno) }
193 | }
194 | }
195 |
196 | /// Get list of all extended attributes.
197 | func listExtendedAttributes() throws -> [String] {
198 |
199 | let list = try self.withUnsafeFileSystemRepresentation { fileSystemPath -> [String] in
200 | let length = listxattr(fileSystemPath, nil, 0, 0)
201 | guard length >= 0 else { throw URL.posixError(errno) }
202 |
203 | // Create buffer with required size:
204 | var namebuf = Array(repeating: 0, count: length)
205 |
206 | // Retrieve attribute list:
207 | let result = listxattr(fileSystemPath, &namebuf, namebuf.count, 0)
208 | guard result >= 0 else { throw URL.posixError(errno) }
209 |
210 | // Extract attribute names:
211 | let list = namebuf.split(separator: 0).compactMap {
212 | $0.withUnsafeBufferPointer {
213 | $0.withMemoryRebound(to: UInt8.self) {
214 | String(bytes: $0, encoding: .utf8)
215 | }
216 | }
217 | }
218 | return list
219 | }
220 | return list
221 | }
222 |
223 | /// Helper function to create an NSError from a Unix errno.
224 | private static func posixError(_ err: Int32) -> NSError {
225 | return NSError(domain: NSPOSIXErrorDomain, code: Int(err),
226 | userInfo: [NSLocalizedDescriptionKey: String(cString: strerror(err))])
227 | }
228 | }
229 |
230 | extension CMTime {
231 | var displayString: String {
232 | guard CMTIME_IS_NUMERIC(self) && isValid && !self.seconds.isNaN else {
233 | return "--:--"
234 | }
235 | let totalSeconds = Int(seconds)
236 | let minutes = totalSeconds / 60
237 | let seconds = totalSeconds % 60
238 | return String(format: "%02d:%02d", minutes, seconds)
239 | }
240 | }
241 |
--------------------------------------------------------------------------------
/rm2000/Tape Mechanism/AudioManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioManager.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 9/23/24.
6 | //
7 |
8 | import Foundation
9 | import AVFAudio
10 | import OSLog
11 |
12 | class AudioManager {
13 |
14 | var pcmBufferHandler: ((AVAudioPCMBuffer) -> Void)?
15 | private let writeQueue = DispatchQueue(label: "audio.writer.queue")
16 | private var audioFile: AVAudioFile?
17 | private let encodingParams: [String: Any] = [
18 | AVFormatIDKey: kAudioFormatLinearPCM,
19 | AVSampleRateKey: 48000.0,
20 | AVNumberOfChannelsKey: 2,
21 | AVLinearPCMBitDepthKey: 16,
22 | AVLinearPCMIsFloatKey: true,
23 | AVLinearPCMIsBigEndianKey: false,
24 | AVLinearPCMIsNonInterleaved: true
25 | ]
26 |
27 | func setupAudioWriter(fileURL: URL) throws {
28 | audioFile = try AVAudioFile(forWriting: fileURL, settings: encodingParams, commonFormat: .pcmFormatFloat32, interleaved: false)
29 | }
30 |
31 | func writeSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
32 | writeQueue.async {
33 | guard sampleBuffer.isValid else {
34 | Logger.audioManager.warning("Invalid sample buffer or conversion failed")
35 | return
36 | }
37 |
38 | try? sampleBuffer.withAudioBufferList { audioBufferList, blockBuffer in
39 | guard let description = sampleBuffer.formatDescription?.audioStreamBasicDescription,
40 | let format = AVAudioFormat(standardFormatWithSampleRate: description.mSampleRate, channels: description.mChannelsPerFrame),
41 | let samples = AVAudioPCMBuffer(pcmFormat: format, bufferListNoCopy: audioBufferList.unsafePointer)
42 | else { return }
43 | self.pcmBufferHandler?(samples)
44 | do {
45 | try self.audioFile?.write(from: samples)
46 | // post the audiolevel into the wild for observing
47 | let currentAudioLevel = self.getAudioLevel(from: samples)
48 |
49 | DispatchQueue.main.async {
50 | NotificationCenter.default.post(name: .audioLevelUpdated, object: nil, userInfo: ["level": currentAudioLevel])
51 | }
52 | } catch {
53 | Logger.audioManager.error("Couldn't write samples: \(error.localizedDescription)")
54 | }
55 | }
56 | }
57 | }
58 |
59 | func getAudioLevel(from samples: AVAudioPCMBuffer) -> Float {
60 |
61 | // calculate root mean square
62 | // https://stackoverflow.com/a/43789556
63 | let channelCount = Int(samples.format.channelCount)
64 | let arraySize = samples.frameLength
65 | let bufferPointer = samples.floatChannelData!
66 |
67 | var sumOfSquares: Float = 0.0
68 | var sampleCount: Int = 0
69 |
70 | // process all channels
71 | for channel in 0.. 0 else { return 0.0 }
84 |
85 | // calculate RMS
86 | let rms = sqrt(sumOfSquares / Float(sampleCount))
87 |
88 | return pow(rms, 0.3)
89 | }
90 |
91 | func stopAudioWriter() {
92 | writeQueue.sync { [weak self] in
93 | if #available(macOS 15.0, *) {
94 | // close func barely added to macos15? wtf?
95 | try? self?.audioFile?.close()
96 | }
97 | self?.audioFile = nil
98 | }
99 | }
100 |
101 | deinit {
102 | // just to be sure
103 | try? self.audioFile = nil
104 | }
105 | }
106 |
107 | extension Notification.Name {
108 | static let audioLevelUpdated = Notification.Name("audioLevelUpdated")
109 | }
110 |
--------------------------------------------------------------------------------
/rm2000/Tape Mechanism/Backends/CoreAudioTaps/CoreAudio.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import AudioToolbox
3 |
4 | @MainActor
5 | class AudioProcessController: ObservableObject {
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/rm2000/Tape Mechanism/Backends/Protocols.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Protocols.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 9/23/24.
6 | //
7 |
8 | import Foundation
9 | import CoreMedia
10 | import ScreenCaptureKit
11 |
12 | protocol StreamManagerDelegate: AnyObject {
13 | func streamManager(_ manager: SCStreamManager, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType)
14 | func streamManager(_ manager: SCStreamManager, didStopWithError error: Error)
15 | }
16 |
--------------------------------------------------------------------------------
/rm2000/Tape Mechanism/Backends/ScreenCaptureKit/SCStreamManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StreamManager.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 9/23/24.
6 | //
7 |
8 | import Foundation
9 | import ScreenCaptureKit
10 |
11 | class SCStreamManager: NSObject, SCStreamDelegate, @unchecked Sendable {
12 |
13 | weak var delegate: StreamManagerDelegate?
14 | private var stream: SCStream?
15 |
16 | func setupAudioStream() async throws {
17 | let streamConfiguration = SCStreamConfiguration()
18 | streamConfiguration.sampleRate = 48000
19 | streamConfiguration.channelCount = 2
20 | streamConfiguration.capturesAudio = true
21 | streamConfiguration.minimumFrameInterval = CMTime(seconds: 1.0 / 2.0, preferredTimescale: 600)
22 |
23 | let availableContent = try await SCShareableContent.current
24 | guard let display = availableContent.displays.first(where: { $0.displayID == CGMainDisplayID() }) else {
25 | throw NSError(domain: "RecordingError", code: 1, userInfo: [NSLocalizedDescriptionKey: "Can't find display with ID \(CGMainDisplayID()) in sharable content"])
26 | }
27 |
28 | let filter = SCContentFilter(display: display, excludingApplications: [], exceptingWindows: [])
29 | stream = SCStream(filter: filter, configuration: streamConfiguration, delegate: self)
30 | }
31 |
32 | func startCapture() throws {
33 | guard let stream = stream else {
34 | throw NSError(domain: "RecordingError", code: 2, userInfo: [NSLocalizedDescriptionKey: "Stream not prepared"])
35 | }
36 | let audioProcessingQueue = DispatchQueue(label: "AudioProcessingQueue")
37 | try stream.addStreamOutput(self, type: .audio, sampleHandlerQueue: audioProcessingQueue)
38 | stream.startCapture()
39 | }
40 |
41 | func stopCapture() {
42 | stream?.stopCapture()
43 | try? stream?.removeStreamOutput(self, type: .audio)
44 | stream = nil
45 | }
46 |
47 | // make scstreamdelegate ghappy
48 |
49 | func stream(_ stream: SCStream, didStopWithError error: Error) {
50 | Task { @MainActor in
51 | delegate?.streamManager(self, didStopWithError: error)
52 | }
53 | }
54 | }
55 |
56 |
57 | extension SCStreamManager: SCStreamOutput {
58 | func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) {
59 | delegate?.streamManager(self, didOutputSampleBuffer: sampleBuffer, of: type)
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/rm2000/Tape Mechanism/Encoder/Encoder.swift:
--------------------------------------------------------------------------------
1 | import AVFoundation
2 | import CoreMedia
3 | import Foundation
4 | import OSLog
5 | import SFBAudioEngine
6 | import CSFBAudioEngine
7 |
8 | struct RMAudioConverter {
9 | static func convert(input: URL, output: URL, format: AudioFormat) async {
10 | do {
11 | try AudioConverter.convert(input, to: output)
12 | Logger().info("Conversion complete")
13 | } catch {
14 | Logger().error("Conversion failed: \(error.localizedDescription)")
15 | }
16 | }
17 | }
18 |
19 | struct EncodingConfig {
20 | let outputFormat: AudioFormat
21 | let outputURL: URL?
22 | let forwardsEndTime: CMTime?
23 | let reverseEndTime: CMTime?
24 |
25 | init(
26 | outputFormat: AudioFormat,
27 | outputURL: URL? = nil,
28 | forwardStartTime: CMTime?,
29 | backwardsEndTime: CMTime?
30 | ) {
31 | self.outputFormat = outputFormat
32 | self.outputURL = outputURL
33 | self.forwardsEndTime = forwardStartTime
34 | self.reverseEndTime = backwardsEndTime
35 | }
36 | }
37 |
38 | enum EncodingInputType {
39 | case fileURL
40 | case pcmBuffer
41 | case existingSample
42 | }
43 |
44 | class Encoder {
45 | private(set) var isProcessing = false
46 | private(set) var sourceType: EncodingInputType
47 |
48 | private var sourceBuffer: AVAudioPCMBuffer?
49 | private var sourceURL: URL?
50 |
51 | private var needsTrimming: Bool = false
52 |
53 | init(fileURL: URL?) {
54 | self.sourceURL = fileURL
55 | self.sourceType = .fileURL
56 | }
57 |
58 | init(pcmBuffer: AVAudioPCMBuffer?) {
59 | self.sourceBuffer = pcmBuffer
60 | self.sourceType = .pcmBuffer
61 | }
62 |
63 | func encode(with config: EncodingConfig) async throws {
64 |
65 | // let glyphs update
66 | await MainActor.run {
67 | TapeRecorderState.shared.status = .busy
68 | }
69 |
70 | if config.forwardsEndTime != nil || config.reverseEndTime != nil {
71 | needsTrimming = true
72 | }
73 |
74 | isProcessing = true
75 |
76 | /*
77 | TODO - this will need refactoring once we allow users to save their
78 | quick recordings and convert them to normal samples
79 | */
80 | switch sourceType {
81 | case .pcmBuffer:
82 | if needsTrimming {
83 | // awesome, we have the pcmBuffer already, just extract it, render as .caf, and then formatconvert
84 | }
85 | case .fileURL:
86 |
87 | if needsTrimming {
88 | Logger().debug("Sample needs trimming")
89 |
90 | guard let decoder = try? AudioDecoder(url: self.sourceURL!) else {
91 | Logger().error("Failed to init decoder for \(self.sourceURL!)")
92 | return
93 | }
94 | try decoder.open()
95 | let processingFormat = decoder.processingFormat
96 | print("Processing format: \(processingFormat), processing format length: \(decoder.length)")
97 | let frameCount = AVAudioFrameCount(decoder.length)
98 | guard let buffer = AVAudioPCMBuffer(pcmFormat: processingFormat, frameCapacity: frameCount) else {
99 | Logger().error("Failed to get buffers from the decoder for \(self.sourceURL!)")
100 | return
101 | }
102 |
103 | try decoder.decode(into: buffer, length: frameCount)
104 |
105 | guard let trimmedBuffer = trimPCMBuffer(
106 | buffer: buffer,
107 | forwardsEndTime: config.forwardsEndTime!,
108 | reverseEndTime: config.reverseEndTime!
109 | ) else {
110 | Logger().error("Failed to trim buffer")
111 | return
112 | }
113 |
114 | let baseName = sourceURL?.deletingPathExtension().lastPathComponent
115 | let newFileName = "\(baseName!)_t.caf"
116 | let trimmedSourceURL = sourceURL?.deletingLastPathComponent().appendingPathComponent(newFileName)
117 |
118 | try writeToAACWithAVAudioFile(buffer: trimmedBuffer, to: trimmedSourceURL!)
119 | await RMAudioConverter.convert(
120 | input: trimmedSourceURL!, output: config.outputURL!,
121 | format: config.outputFormat)
122 |
123 | } else {
124 | Logger().debug("Sending encode configuration as \(String(describing: config))")
125 | await RMAudioConverter.convert(
126 | input: self.sourceURL!, output: config.outputURL!,
127 | format: config.outputFormat)
128 | }
129 |
130 | case .existingSample:
131 | fatalError("Not implemented yet")
132 | }
133 |
134 | await MainActor.run {
135 | TapeRecorderState.shared.status = .idle
136 | }
137 | }
138 |
139 | private func trimPCMBuffer(buffer: AVAudioPCMBuffer, forwardsEndTime: CMTime, reverseEndTime: CMTime) -> AVAudioPCMBuffer? {
140 |
141 | let sampleRate = buffer.format.sampleRate
142 | let startTimeSeconds = reverseEndTime.seconds
143 | let endTimeSeconds = forwardsEndTime.seconds
144 |
145 | // Validate range
146 | let bufferDuration = Double(buffer.frameLength) / sampleRate
147 | guard startTimeSeconds >= 0 && endTimeSeconds <= bufferDuration && startTimeSeconds < endTimeSeconds else {
148 | Logger().error("Invalid trim range: \(startTimeSeconds) to \(endTimeSeconds) seconds (buffer duration: \(bufferDuration))")
149 | return nil
150 | }
151 |
152 | let startFrame = AVAudioFramePosition(startTimeSeconds * sampleRate)
153 | let endFrame = AVAudioFramePosition(endTimeSeconds * sampleRate)
154 | let frameCount = AVAudioFrameCount(endFrame - startFrame)
155 |
156 | guard let trimmedBuffer = AVAudioPCMBuffer(pcmFormat: buffer.format, frameCapacity: frameCount) else {
157 | Logger().error("Failed to create trimmed buffer")
158 | return nil
159 | }
160 |
161 | // copy audio data - handle interleaved vs non-interleaved
162 | if buffer.format.isInterleaved {
163 | // Interleaved
164 | let sourcePtr = buffer.floatChannelData![0]
165 | let destPtr = trimmedBuffer.floatChannelData![0]
166 | let channelCount = Int(buffer.format.channelCount)
167 |
168 | for frame in 0.. AVAudioPCMBuffer? {
230 |
231 | do {
232 | let audioFile = try! AVAudioFile(forReading: fileURL)
233 |
234 | let format = audioFile.processingFormat
235 | let frameCount = AVAudioFrameCount(audioFile.length)
236 |
237 | guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCount) else {
238 | Logger().error("Failed to create AVAudioPCMBuffer for \(audioFile)")
239 | return nil
240 | }
241 |
242 | try audioFile.read(into: buffer)
243 |
244 | return buffer
245 | } catch {
246 | Logger().error("Error: \(error.localizedDescription)")
247 | return nil
248 | }
249 | }
250 |
251 | private func getExtractedBufferPortion(pcmBuffer: AVAudioPCMBuffer) -> AVAudioPCMBuffer {
252 | return pcmBuffer
253 | }
254 |
255 | private func saveTemporaryAudioFile(pcmBuffer: AVAudioPCMBuffer) -> URL? {
256 |
257 | let temporaryFilename = UUID().uuidString + ".caf"
258 |
259 | do {
260 | let outputURL = WorkingDirectory.applicationSupportPath().appendingPathComponent(temporaryFilename)
261 |
262 | let outputFile = try AVAudioFile(forWriting: outputURL, settings: pcmBuffer.format.settings)
263 | try outputFile.write(from: pcmBuffer)
264 |
265 | Logger().info("Successfully wrote temporary audiofile as \(temporaryFilename)")
266 |
267 | return outputURL
268 | } catch {
269 | Logger().error("Error: \(error.localizedDescription)")
270 | return nil
271 | }
272 | }
273 |
274 | private func saveToFile() {
275 |
276 | }
277 | }
278 |
279 |
280 | ================================================
281 | File: /StreamManager.swift
282 | ================================================
283 | //
284 | // StreamManager.swift
285 | // rm2000
286 | //
287 | // Created by Marcelo Mendez on 9/23/24.
288 | //
289 |
290 | import Foundation
291 | import ScreenCaptureKit
292 |
293 | class StreamManager: NSObject, SCStreamDelegate {
294 |
295 | weak var delegate: StreamManagerDelegate?
296 | private var stream: SCStream?
297 |
298 | func setupAudioStream() async throws {
299 | let streamConfiguration = SCStreamConfiguration()
300 | streamConfiguration.width = 2
301 | streamConfiguration.height = 2
302 | streamConfiguration.minimumFrameInterval = CMTime(value: 1, timescale: CMTimeScale.max)
303 | streamConfiguration.showsCursor = true
304 | streamConfiguration.sampleRate = 48000
305 | streamConfiguration.channelCount = 2
306 | streamConfiguration.capturesAudio = true
307 | streamConfiguration.minimumFrameInterval = CMTime(seconds: 1.0 / 2.0, preferredTimescale: 600)
308 |
309 | let availableContent = try await SCShareableContent.current
310 | guard let display = availableContent.displays.first(where: { $0.displayID == CGMainDisplayID() }) else {
311 | throw NSError(domain: "RecordingError", code: 1, userInfo: [NSLocalizedDescriptionKey: "Can't find display with ID \(CGMainDisplayID()) in sharable content"])
312 | }
313 |
314 | let filter = SCContentFilter(display: display, excludingApplications: [], exceptingWindows: [])
315 | stream = SCStream(filter: filter, configuration: streamConfiguration, delegate: self)
316 | }
317 |
318 | func startCapture() throws {
319 | guard let stream = stream else {
320 | throw NSError(domain: "RecordingError", code: 2, userInfo: [NSLocalizedDescriptionKey: "Stream not prepared"])
321 | }
322 |
323 | try stream.addStreamOutput(self, type: .audio, sampleHandlerQueue: .global())
324 | stream.startCapture()
325 | }
326 |
327 | func stopCapture() {
328 | stream?.stopCapture()
329 | stream = nil
330 | }
331 |
332 | // make scstreamdelegate ghappy
333 |
334 | func stream(_ stream: SCStream, didStopWithError error: Error) {
335 | delegate?.streamManager(self, didStopWithError: error)
336 | }
337 | }
338 |
339 |
340 | extension StreamManager: SCStreamOutput {
341 | func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) {
342 | delegate?.streamManager(self, didOutputSampleBuffer: sampleBuffer, of: type)
343 | }
344 | }
345 |
346 |
347 | ================================================
348 | File: /AudioManager.swift
349 | ================================================
350 | //
351 | // AudioManager.swift
352 | // rm2000
353 | //
354 | // Created by Marcelo Mendez on 9/23/24.
355 | //
356 |
357 | import Foundation
358 | import AVFAudio
359 | import OSLog
360 |
361 | class AudioManager {
362 |
363 | func setupAudioWriter(fileURL: URL) throws {
364 | audioFile = try AVAudioFile(forWriting: fileURL, settings: encodingParams, commonFormat: .pcmFormatFloat32, interleaved: false)
365 | }
366 |
367 | func writeSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
368 | guard sampleBuffer.isValid, let samples = sampleBuffer.asPCMBuffer else {
369 | Logger.audioManager.warning("Invalid sample buffer or conversion failed")
370 | return
371 | }
372 |
373 | do {
374 | try audioFile?.write(from: samples)
375 | } catch {
376 | Logger.audioManager.error("Couldn't write samples: \(error.localizedDescription)")
377 | }
378 | }
379 |
380 | func stopAudioWriter() {
381 | audioFile = nil
382 | }
383 |
384 | private var audioFile: AVAudioFile?
385 |
386 | private let encodingParams: [String: Any] = [
387 | AVFormatIDKey: kAudioFormatMPEG4AAC,
388 | AVSampleRateKey: 48000,
389 | AVNumberOfChannelsKey: 2,
390 | AVEncoderBitRateKey: 128000
391 | ]
392 | }
393 |
394 |
395 | ================================================
396 | File: /Extensions.swift
397 | ================================================
398 | //
399 | // Extensions.swift
400 | // rm2000
401 | //
402 | // Created by Marcelo Mendez on 9/23/24.
403 | //
404 |
405 | import OSLog
406 | import Foundation
407 | import CoreMedia
408 | import AVFAudio
409 |
410 | extension CMSampleBuffer {
411 | var asPCMBuffer: AVAudioPCMBuffer? {
412 | try? self.withAudioBufferList { audioBufferList, _ -> AVAudioPCMBuffer? in
413 | guard let absd = self.formatDescription?.audioStreamBasicDescription else {
414 | Logger.audioManager.error("Failed setting description for basic audio stream")
415 | return nil
416 | }
417 | guard let format = AVAudioFormat(standardFormatWithSampleRate: absd.mSampleRate, channels: absd.mChannelsPerFrame) else {
418 | Logger.audioManager.error("Failed formatting the audio file with the set sample size of \(absd.mSampleRate)")
419 | return nil
420 | }
421 | return AVAudioPCMBuffer(pcmFormat: format, bufferListNoCopy: audioBufferList.unsafePointer)
422 | }
423 | }
424 | }
425 |
426 | extension Logger {
427 | static let tapeRecorder = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "TapeRecorder")
428 | static let streamManager = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "StreamManager")
429 | static let audioManager = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "AudioManager")
430 | }
431 |
432 |
433 | ================================================
434 | File: /Protocols.swift
435 | ================================================
436 | //
437 | // Protocols.swift
438 | // rm2000
439 | //
440 | // Created by Marcelo Mendez on 9/23/24.
441 | //
442 |
443 | import Foundation
444 | import CoreMedia
445 | import ScreenCaptureKit
446 |
447 | protocol StreamManagerDelegate: AnyObject {
448 | func streamManager(_ manager: StreamManager, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType)
449 | func streamManager(_ manager: StreamManager, didStopWithError error: Error)
450 | }
451 |
452 |
453 |
--------------------------------------------------------------------------------
/rm2000/TapeRecorderState.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import OSLog
3 |
4 | class TapeRecorderState: ObservableObject, TapeRecorderDelegate {
5 | static let shared = TapeRecorderState()
6 | @Published var status: RecordingState = .idle
7 | @Published var currentSampleFilename: String?
8 | @Published var showRenameDialogInMainWindow: Bool = false
9 | @Published var currentActiveRecording: TemporaryActiveRecording?
10 | @Published var elapsedTimeRecording: TimeInterval = 0
11 | @AppStorage("sample_record_audio_format") var sampleRecordAudioFormat: AudioFormat = .mp3
12 | private var timer: Timer?
13 | let recorder = TapeRecorder()
14 |
15 | init() {
16 | recorder.delegate = self
17 | }
18 |
19 | @MainActor
20 | func startRecording() {
21 | Task {
22 | await MainActor.run {
23 | self.status = .recording
24 | }
25 | startTimer()
26 | let newRecording = TemporaryActiveRecording()
27 | currentSampleFilename = newRecording.fileURL.lastPathComponent
28 | self.currentActiveRecording = newRecording
29 | NSApp.dockTile.badgeLabel = "REC"
30 |
31 | await recorder.startRecording(to: newRecording.fileURL)
32 | }
33 | }
34 |
35 | func stopRecording() {
36 | recorder.stopRecording()
37 | timer?.invalidate()
38 | timer = nil
39 | showRenameDialogInMainWindow = true
40 | NSApp.dockTile.badgeLabel = nil
41 | Task {
42 | do {
43 | await AppState.shared.closeHUDWindow() // ensure hud window is closed
44 | // TODO - this is very hacky
45 | }
46 | }
47 | Logger.sharedStreamState.info("showing edit sample sheet")
48 | }
49 |
50 | private func startTimer() {
51 | self.elapsedTimeRecording = 0
52 | timer?.invalidate()
53 | timer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { _ in
54 | self.elapsedTimeRecording += 1
55 | }
56 | }
57 |
58 | func tapeRecorderDidStartRecording(_ recorder: TapeRecorder) {
59 | // This might not be necessary if we set isRecording to true in startRecording
60 | }
61 |
62 | func tapeRecorderDidStopRecording(_ recorder: TapeRecorder) {
63 | Task { @MainActor in
64 | self.status = .idle
65 | }
66 | }
67 |
68 | func tapeRecorder(_ recorder: TapeRecorder, didEncounterError error: Error) {
69 | Task { @MainActor in
70 | self.status = .idle
71 | Logger.sharedStreamState.error("Recording error: \(error.localizedDescription)")
72 | // You might want to update UI or show an alert here
73 | }
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/rm2000/Views/HUD Window/HUDWindowView.swift:
--------------------------------------------------------------------------------
1 |
2 | import SwiftUI
3 | import AppKit
4 | import FluidGradient
5 |
6 | class FloatingWindow: NSWindow {
7 | init(contentRect: NSRect, backing: NSWindow.BackingStoreType = .buffered, defer flag: Bool = false) {
8 | super.init(contentRect: contentRect,
9 | styleMask: [.borderless, .fullSizeContentView],
10 | backing: backing,
11 | defer: flag)
12 |
13 | // Window configuration
14 | self.isOpaque = false
15 | self.backgroundColor = .clear
16 | self.level = .floating
17 | self.collectionBehavior = [.canJoinAllSpaces, .fullScreenAuxiliary]
18 | self.hasShadow = true
19 |
20 | // Create the visual effect view for blurred edges
21 | let visualEffectView = NSVisualEffectView(frame: contentRect)
22 | visualEffectView.blendingMode = .behindWindow
23 | visualEffectView.material = .fullScreenUI
24 | visualEffectView.state = .active
25 | visualEffectView.wantsLayer = true
26 | // visualEffectView.layer?.opacity = 0.8
27 | visualEffectView.layer?.masksToBounds = true
28 |
29 | visualEffectView.maskImage = maskImage(cornerRadius: 20.0)
30 |
31 | // This is the key part - create a mask that makes the center transparent
32 | let maskLayer = CALayer()
33 | maskLayer.frame = visualEffectView.bounds
34 | maskLayer.backgroundColor = NSColor.black.cgColor
35 |
36 | // Create a hole in the center
37 | let centerRect = NSRect(
38 | x: contentRect.width * 0.1,
39 | y: contentRect.height * 0.1,
40 | width: contentRect.width * 1,
41 | height: contentRect.height * 1
42 | )
43 |
44 | let path = CGMutablePath()
45 | path.addRect(visualEffectView.bounds)
46 | path.addRoundedRect(
47 | in: centerRect,
48 | cornerWidth: 10,
49 | cornerHeight: 10
50 | )
51 |
52 | let maskShapeLayer = CAShapeLayer()
53 | maskShapeLayer.path = path
54 | maskShapeLayer.fillRule = .evenOdd
55 |
56 | maskLayer.mask = maskShapeLayer
57 | visualEffectView.layer?.mask = maskLayer
58 |
59 | self.contentView = visualEffectView
60 | }
61 |
62 | override var canBecomeKey: Bool {
63 | return true
64 | }
65 |
66 | // https://eon.codes/blog/2016/01/23/Chromeless-window/
67 | private func maskImage(cornerRadius: CGFloat) -> NSImage {
68 | let edgeLength = 2.0 * cornerRadius + 1.0
69 | let maskImage = NSImage(size: NSSize(width: edgeLength, height: edgeLength), flipped: false) { rect in
70 | let bezierPath = NSBezierPath(roundedRect: rect, xRadius: cornerRadius, yRadius: cornerRadius)
71 | NSColor.black.set()
72 | bezierPath.fill()
73 | return true
74 | }
75 | maskImage.capInsets = NSEdgeInsets(top: cornerRadius, left: cornerRadius, bottom: cornerRadius, right: cornerRadius)
76 | maskImage.resizingMode = .stretch
77 | return maskImage
78 | }
79 | }
80 |
81 | struct FloatingGradientView: View {
82 | @EnvironmentObject private var recordingState: TapeRecorderState
83 | @State private var opacity: Double = 0.0
84 | @State private var isAnimating = true
85 | @State private var showHintText = false
86 |
87 | var body: some View {
88 | ZStack {
89 |
90 | FluidGradient(blobs: [Color(hex: 0xCA7337), Color(hex: 0xd9895d)],
91 | highlights: [ .gray],
92 | speed: 1.0,
93 | blur: 0.70)
94 |
95 | VStack {
96 | HStack(spacing: 90) {
97 |
98 | if recordingState.status == .recording {
99 | LCDTextBigWithGradientHUD(timeString(recordingState.elapsedTimeRecording))
100 | .frame(maxWidth: 150, alignment: .leading)
101 | } else {
102 | LCDTextBigWithGradientHUD("STBY")
103 | .frame(maxWidth: 150, alignment: .leading)
104 | }
105 |
106 | VUMeter()
107 | .mask(LinearGradient(
108 | colors: [Color(hex: 0x220300, alpha: 0.02),
109 | Color(hex: 0x220300)],
110 | startPoint: .bottom,
111 | endPoint: .top
112 | ))
113 | .colorEffect(Shader(function: .init(library: .default, name: "dotMatrix"), arguments: []))
114 | .shadow(color: .black.opacity(0.35), radius: 1, x: 2, y: 4)
115 |
116 | .frame(width: 60, height: 135)
117 | .padding(.leading, -20)
118 | }
119 | Group {
120 | if showHintText {
121 | LCDTextCaptionWithGradient("Press ⌘ + ⌥ + G to stop recording")
122 | .transition(.blurReplace)
123 | }
124 | }
125 | .font(Font.tasaFont)
126 | .animation(.easeInOut, value: showHintText)
127 | }
128 | }
129 | .frame(width: 400, height: 250)
130 | .opacity(opacity)
131 | .onAppear() {
132 | withAnimation(.easeIn(duration: 0.3)) { opacity = 1.0 }
133 | DispatchQueue.main.asyncAfter(deadline: .now() + 4) {
134 | withAnimation {
135 | showHintText = true
136 | }
137 | }
138 | }
139 | .onDisappear() {
140 | withAnimation(.easeIn(duration: 0.3)) { opacity = 0.0 }
141 | }
142 | }
143 | }
144 |
145 | #Preview {
146 | FloatingGradientView()
147 | .environmentObject(TapeRecorderState())
148 | }
149 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/Buttons.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct UtilityButtons: View {
4 | @Environment(\.openWindow) var openWindow
5 | @Environment(\.openSettingsLegacy) private var openSettingsLegacy
6 | @State private var isPressed = false
7 |
8 | var body: some View {
9 | Button(action: { try? openSettingsLegacy() }) {
10 | Image("SettingsButton")
11 | }
12 | .buttonStyle(AnimatedButtonStyle())
13 |
14 | Button(action: { openWindow(id: "recordings-window") }) {
15 | Image("FolderButton")
16 | .renderingMode(.original)
17 | }.buttonStyle(AnimatedButtonStyle())
18 |
19 | Menu {
20 | Button("None of these work yet!", action: { print("Selected Microphone") }).disabled(true)
21 | Button("Microphone", action: { print("Selected Microphone") })
22 | Button("System Audio", action: { print("Selected System Audio") })
23 | Button("External Device", action: { print("Selected External Device") })
24 | } label: {
25 | Image("SourceButton")
26 | }
27 | .buttonStyle(AnimatedButtonStyle())
28 | }
29 | }
30 |
31 | struct AnimatedButtonStyle: ButtonStyle {
32 | func makeBody(configuration: Configuration) -> some View {
33 | configuration.label
34 | .background(.clear)
35 | .scaleEffect(configuration.isPressed ? 0.94 : 1.0)
36 | .animation(
37 | .spring(response: 0.3, dampingFraction: 0.6),
38 | value: configuration.isPressed)
39 | }
40 | }
41 |
42 | struct StandbyRecordButton: View {
43 | var onPress: () -> Void
44 |
45 | var body: some View {
46 | ZStack {
47 | Image("RecordButtonIndent")
48 | Image("RecordButtonTemp")
49 | Image("RecordButtonGlow")
50 | .resizable()
51 | .frame(width: 200, height: 200)
52 | .allowsHitTesting(false)
53 |
54 | Button(action: onPress) {
55 | Rectangle()
56 | // i cant have opactiy(0) on a button, because then that disables it completely
57 | .fill(Color.white.opacity(0.001))
58 | .frame(width: 70, height: 70)
59 | }
60 | .buttonStyle(AnimatedButtonStyle())
61 | }
62 | .frame(height: 80)
63 | }
64 | }
65 |
66 | struct ActiveRecordButton: View {
67 | var onPress: () -> Void
68 |
69 | var body: some View {
70 | ZStack {
71 | Image("RecordButtonIndent")
72 | Image("RecordButtonActiveTemp")
73 | Image("RecordButtonTemp")
74 | .pulseEffect()
75 | Image("RecordButtonGlow")
76 | .resizable()
77 | .frame(width: 200, height: 200)
78 | .pulseEffect()
79 | .allowsHitTesting(false)
80 |
81 |
82 | Button(action: onPress) {
83 | Rectangle()
84 | .fill(Color.white.opacity(0.001)) //stupid hack again
85 | .frame(width: 70, height: 70)
86 | }
87 | .buttonStyle(AnimatedButtonStyle())
88 | }
89 | .frame(height: 80)
90 | }
91 | }
92 |
93 | // https:stackoverflow.com/questions/61778108/swiftui-how-to-pulsate-image-opacity
94 | struct PulseEffect: ViewModifier {
95 | @State private var pulseIsInMaxState: Bool = true
96 | private let range: ClosedRange
97 | private let duration: TimeInterval
98 |
99 | init(range: ClosedRange, duration: TimeInterval) {
100 | self.range = range
101 | self.duration = duration
102 | }
103 |
104 | func body(content: Content) -> some View {
105 | content
106 | .opacity(pulseIsInMaxState ? range.upperBound : range.lowerBound)
107 | .onAppear { pulseIsInMaxState.toggle() }
108 | .animation(
109 | .easeInOut(duration: duration).repeatForever(autoreverses: true),
110 | value: pulseIsInMaxState)
111 | }
112 | }
113 |
114 | extension View {
115 | public func pulseEffect(
116 | range: ClosedRange = 0.1...1, duration: TimeInterval = 1
117 | ) -> some View {
118 | modifier(PulseEffect(range: range, duration: duration))
119 | }
120 | }
121 |
122 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/ContentView.swift:
--------------------------------------------------------------------------------
1 | import OSLog
2 | import SwiftUI
3 |
4 | struct ContentView: View {
5 | @Environment(\.openWindow) var openWindow
6 | @EnvironmentObject private var recordingState: TapeRecorderState
7 |
8 | var body: some View {
9 | ZStack {
10 | Image("BodyBackgroundTemp")
11 | .scaledToFill()
12 | .ignoresSafeArea(.all) // extend under the titlebar
13 | VStack(spacing: 10) {
14 | LCDScreenView()
15 | .frame(height: 225)
16 | .padding(.top, -45)
17 |
18 | HStack(spacing: 5) {
19 | UtilityButtons()
20 | }
21 | .padding(.top, -5)
22 |
23 | if recordingState.status == .recording {
24 | ActiveRecordButton(onPress: stopRecording)
25 | } else {
26 | StandbyRecordButton(onPress: startRecording)
27 | }
28 |
29 | }
30 |
31 | .sheet(isPresented: $recordingState.showRenameDialogInMainWindow) {
32 | if let newRecording = recordingState.currentActiveRecording {
33 | EditSampleView(recording: newRecording) { FileRepresentable, SampleMetadata, SampleEditConfiguration in
34 |
35 | // TODO - trainwreck. if i already have to pass in the shared.userdirectory, then this probably belongs in samplestorage itself, not sampledirectory
36 | SampleStorage.shared.UserDirectory.applySampleEdits(to: FileRepresentable, for: SampleMetadata, with: SampleEditConfiguration)
37 | recordingState.showRenameDialogInMainWindow = false
38 | }
39 | .frame(minWidth: 420, maxWidth: 500, minHeight: 320)
40 | .presentationBackground(.thinMaterial)
41 | }
42 | }
43 | }
44 | }
45 |
46 | private func startRecording() {
47 | recordingState.startRecording()
48 | }
49 |
50 | private func stopRecording() {
51 | recordingState.stopRecording()
52 | }
53 |
54 | }
55 |
56 |
57 | #Preview("Main Window") {
58 | ContentView()
59 | .environmentObject(TapeRecorderState())
60 | }
61 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/Glyphs/DonutSpinner.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import Foundation
3 |
4 | enum SpinnerGlyphDirection {
5 | case clockwise, counterclockwise
6 | }
7 |
8 | struct DonutSpinner: View {
9 | let direction: SpinnerGlyphDirection
10 | var wedgeCount: Int = 11
11 | var gapAngle: Double = 2
12 | var strokeWidth: CGFloat = 1.0
13 | var innerRadiusRatio: CGFloat = 0.3
14 | let active: Bool
15 |
16 | @State private var activeWedgeIndex: Int = 0
17 |
18 | var body: some View {
19 | GeometryReader { geometry in
20 | let size = min(geometry.size.width, geometry.size.height)
21 | let outerRadius = (size / 2) - (strokeWidth / 2)
22 | let innerRadius = outerRadius * innerRadiusRatio
23 | let baseAngle = 360.0 / Double(wedgeCount)
24 | let offsetAngle = -105.0 // Make wedge 0 appear at the top
25 |
26 | ZStack {
27 | DonutShape(outerRadius: outerRadius, innerRadius: innerRadius)
28 | .stroke(.clear, lineWidth: strokeWidth)
29 |
30 | ForEach(0.. Path {
88 | let center = CGPoint(x: rect.midX, y: rect.midY)
89 |
90 | var path = Path()
91 |
92 | // Outer circle
93 | path.addArc(center: center,
94 | radius: outerRadius,
95 | startAngle: .degrees(0),
96 | endAngle: .degrees(360),
97 | clockwise: false)
98 |
99 | // Inner circle (hole)
100 | path.addArc(center: center,
101 | radius: innerRadius,
102 | startAngle: .degrees(0),
103 | endAngle: .degrees(360),
104 | clockwise: true)
105 |
106 | return path
107 | }
108 | }
109 |
110 | struct DonutWedgeShape: Shape {
111 | var outerRadius: CGFloat
112 | var innerRadius: CGFloat
113 | var startAngle: Angle
114 | var endAngle: Angle
115 |
116 | func path(in rect: CGRect) -> Path {
117 | let center = CGPoint(x: rect.midX, y: rect.midY)
118 |
119 | var path = Path()
120 |
121 | // Line from inner start to outer start
122 | path.move(to: CGPoint(
123 | x: center.x + innerRadius * CGFloat(cos(startAngle.radians)),
124 | y: center.y + innerRadius * CGFloat(sin(startAngle.radians))
125 | ))
126 |
127 | path.addLine(to: CGPoint(
128 | x: center.x + outerRadius * CGFloat(cos(startAngle.radians)),
129 | y: center.y + outerRadius * CGFloat(sin(startAngle.radians))
130 | ))
131 |
132 | // Outer arc
133 | path.addArc(
134 | center: center,
135 | radius: outerRadius,
136 | startAngle: startAngle,
137 | endAngle: endAngle,
138 | clockwise: false
139 | )
140 |
141 | // Line from outer end to inner end
142 | path.addLine(to: CGPoint(
143 | x: center.x + innerRadius * CGFloat(cos(endAngle.radians)),
144 | y: center.y + innerRadius * CGFloat(sin(endAngle.radians))
145 | ))
146 |
147 | // Inner arc
148 | path.addArc(
149 | center: center,
150 | radius: innerRadius,
151 | startAngle: endAngle,
152 | endAngle: startAngle,
153 | clockwise: true
154 | )
155 |
156 | path.closeSubpath()
157 |
158 | return path
159 | }
160 | }
161 |
162 | #Preview("Donut Spinner") {
163 | // DonutSpinner(direction: .clockwise)
164 | }
165 | #Preview("LCD Screen") {
166 | LCDScreenView()
167 | .environmentObject(TapeRecorderState())
168 | }
169 |
170 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/Glyphs/ErrorGlyph.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ErrorGlyph.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 4/19/25.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ErrorGlyph: View {
11 | var body: some View {
12 | Image(systemName: "exclamationmark.triangle")
13 | .fontWeight(.black)
14 | .foregroundColor(Color("LCDTextColor").opacity(0.25))
15 | }
16 | }
17 |
18 | #Preview {
19 | ErrorGlyph()
20 | }
21 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/Glyphs/RecordingGlyph.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | // CLAUDE SUCKS
4 | struct SmoothPulseEffect: ViewModifier {
5 | let active: Bool
6 | @State private var opacity: Double = 1.0
7 | @State private var isAnimating: Bool = false
8 | private let minOpacity: Double
9 | private let maxOpacity: Double
10 | private let duration: TimeInterval
11 |
12 | init(active: Bool, range: ClosedRange = 0.2...1.0, duration: TimeInterval = 0.8) {
13 | self.active = active
14 | self.minOpacity = range.lowerBound
15 | self.maxOpacity = range.upperBound
16 | self.duration = duration
17 | }
18 |
19 | func body(content: Content) -> some View {
20 | content
21 | .opacity(opacity)
22 | .onChange(of: active) { wasActive, isActive in
23 | if isActive && !wasActive {
24 | startPulsing()
25 | } else if !isActive && wasActive {
26 | stopPulsing()
27 | }
28 | }
29 | .onAppear {
30 | opacity = active ? maxOpacity : minOpacity
31 | if active {
32 | startPulsing()
33 | }
34 | }
35 | }
36 |
37 | private func startPulsing() {
38 | isAnimating = true
39 | withAnimation(.easeIn(duration: 0.3)) {
40 | opacity = maxOpacity
41 | }
42 | withAnimation(.easeInOut(duration: duration).repeatForever(autoreverses: true)) {
43 | opacity = minOpacity
44 | }
45 | }
46 |
47 | private func stopPulsing() {
48 | isAnimating = false
49 | // smoothly transition to the idle opacity
50 | withAnimation(.easeOut(duration: 0.3)) {
51 | opacity = minOpacity
52 | }
53 | }
54 | }
55 |
56 | extension View {
57 | func smoothPulseEffect(
58 | active: Bool,
59 | range: ClosedRange = 0.2...1.0,
60 | duration: TimeInterval = 0.8
61 | ) -> some View {
62 | modifier(SmoothPulseEffect(active: active, range: range, duration: duration))
63 | }
64 | }
65 |
66 | struct RecordingGlyph: View {
67 | @EnvironmentObject private var recordingState: TapeRecorderState
68 |
69 | var body: some View {
70 | Image(systemName: "recordingtape")
71 | .rotationEffect(.degrees(180))
72 | .fontWeight(.black)
73 | .foregroundColor(Color("LCDTextColor"))
74 | .smoothPulseEffect(
75 | active: recordingState.status == .busy,
76 | range: recordingState.status == .busy ? 0.2...0.9 : 0.25...0.25)
77 | }
78 | }
79 |
80 | #Preview {
81 | RecordingGlyph()
82 | }
83 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/Glyphs/SourceGlyph.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourceGlyph.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 4/19/25.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct SourceGlyph: View {
11 | var body: some View {
12 | Image(systemName: "desktopcomputer")
13 | .fontWeight(.black)
14 | .foregroundColor(Color("LCDTextColor").opacity(0.25))
15 | }
16 | }
17 |
18 | #Preview {
19 | SourceGlyph()
20 | }
21 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/Glyphs/VUMeterView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct VUMeter: View {
4 |
5 | @State var volumeAsString: Float = 0.0
6 |
7 | var body: some View {
8 | GeometryReader
9 | { geometry in
10 | ZStack(alignment: .bottom){
11 |
12 | // Colored rectangle in back of ZStack
13 | Rectangle()
14 | .fill(Color("LCDTextColor"))
15 | .frame(height: geometry.size.height * CGFloat(self.volumeAsString))
16 | .animation(.easeOut(duration:0.05))
17 |
18 | // idle blocks for volume
19 | Rectangle()
20 | .fill(Color.black.opacity(0.2)) }
21 | .padding(geometry.size.width * 0.2)
22 | .onReceive(NotificationCenter.default.publisher(for: .audioLevelUpdated)) { levels in
23 | if var level = levels.userInfo?["level"] as? Float {
24 | volumeAsString = level
25 | } else {
26 | volumeAsString = 0.0
27 | }
28 | }
29 | }
30 | }
31 | }
32 |
33 | #Preview {
34 | VUMeter()
35 | }
36 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/LCDScreenView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct LCDScreenView: View {
4 | @EnvironmentObject private var recordingState: TapeRecorderState
5 |
6 | var body: some View {
7 | ZStack {
8 |
9 | if recordingState.status == .recording {
10 | Image("LCDScreenFrameRecording")
11 | .resizable()
12 | .scaledToFit()
13 | .frame(width: 300)
14 | .offset(x: 0, y: 0)
15 | } else {
16 | Image("LCDScreenFrameInactive")
17 | .resizable()
18 | .scaledToFit()
19 | .frame(width: 300)
20 | .offset(x: 0, y: 0)
21 | }
22 | LCDSymbolGlyphs()
23 |
24 | Image("LCDOuterGlow")
25 | .resizable()
26 | .frame(width: 330)
27 | }
28 | }
29 | }
30 |
31 | struct LCDSymbolGlyphs: View {
32 | @EnvironmentObject private var recordingState: TapeRecorderState
33 |
34 | var body: some View {
35 | HStack(alignment: .center) {
36 | VStack(alignment: .leading) {
37 | HStack { // top half
38 | VStack(alignment: .leading, spacing: 4) {
39 | LCDTextCaptionWithGradient("STEREO 44.1kHz")
40 |
41 | HStack(spacing: 6) {
42 |
43 | // todo - just make donutsspinner have an @EnvrionmentObject of recordingState
44 | if recordingState.status == .recording {
45 | DonutSpinner(direction: .counterclockwise, active: true)
46 | DonutSpinner(direction: .clockwise, active: true)
47 | } else {
48 | DonutSpinner(direction: .counterclockwise, active: false)
49 | DonutSpinner(direction: .clockwise, active: false)
50 | }
51 |
52 | RecordingGlyph()
53 | SourceGlyph()
54 | ErrorGlyph()
55 | }
56 | }.frame(width: 125, height: 40)
57 | .padding(.trailing, -20)
58 | }
59 |
60 | VStack(alignment: .leading) {
61 | LCDTextBig(recordingState.sampleRecordAudioFormat.asString.uppercased())
62 |
63 | if recordingState.status == .recording {
64 | LCDTextBigWithGradient(timeString(recordingState.elapsedTimeRecording))
65 | .frame(maxWidth: 150, alignment: .leading)
66 | } else {
67 | LCDTextBigWithGradient("STBY")
68 | .frame(maxWidth: 150, alignment: .leading)
69 | }
70 | }.padding(.leading, 3)
71 | }
72 |
73 | VUMeter()
74 | .mask(LinearGradient(
75 | colors: [Color(hex: 0x220300, alpha: 0.02),
76 | Color(hex: 0x220300)],
77 | startPoint: .bottom,
78 | endPoint: .top
79 | ))
80 | .colorEffect(Shader(function: .init(library: .default, name: "dotMatrix"), arguments: []))
81 | .shadow(color: .black.opacity(0.35), radius: 1, x: 2, y: 4)
82 |
83 | .frame(width: 60, height: 155)
84 | .padding(.leading, -20)
85 | // todo - too close. claustrophobic
86 | } .frame(width: 200, height: 168)
87 | }
88 | }
89 |
90 | struct LCDTextStyle: ViewModifier {
91 | func body(content: Content) -> some View {
92 | content
93 |
94 | }
95 | }
96 |
97 | extension Font {
98 | static let tachyoFont = Font.custom("Tachyo", size: 41)
99 | static let tachyoFontBig = Font.custom("Tachyo", size: 61)
100 | static let tasaFont = Font.custom("TASAExplorer-SemiBold", size: 14)
101 | }
102 |
103 | extension View {
104 | func LCDText() -> some View {
105 | modifier(LCDTextStyle())
106 | }
107 | }
108 |
109 | struct LCDTextCaption: View {
110 | var title: String
111 |
112 | init(_ title: String) {
113 | self.title = title
114 | }
115 |
116 | var body: some View {
117 | Text(title)
118 | .foregroundColor(Color("LCDTextColor"))
119 | .shadow(color: .black.opacity(0.25), radius: 1, x: 0, y: 4)
120 | .font(Font.tasaFont)
121 | }
122 | }
123 |
124 | struct LCDTextCaptionWithGradient: View {
125 | var title: String
126 |
127 | init(_ title: String) {
128 | self.title = title
129 | }
130 |
131 | var body: some View {
132 | Text(title)
133 | .foregroundStyle(LinearGradient(
134 | colors: [Color(hex: 0x220300, alpha: 0.32),
135 | Color(hex: 0x220300)],
136 | startPoint: .top,
137 | endPoint: .bottom
138 | ))
139 | .shadow(color: .black.opacity(0.25), radius: 1, x: 0, y: 4)
140 | .font(Font.tasaFont)
141 | }
142 | }
143 |
144 | struct LCDTextBig: View {
145 | var title: String
146 |
147 | init(_ title: String) {
148 | self.title = title
149 | }
150 |
151 | var body: some View {
152 | Text(" \(title) ")
153 | .foregroundColor(Color("LCDTextColor"))
154 | .shadow(color: .black.opacity(0.25), radius: 1, x: 0, y: 4)
155 | .font(Font.tachyoFont)
156 | .fontWeight(.medium)
157 | .fixedSize()
158 | .offset(x: -15)
159 | .kerning(-1.5)
160 | }
161 | }
162 |
163 | struct LCDTextBigWithGradient: View {
164 | var title: String
165 |
166 | init(_ title: String) {
167 | self.title = title
168 | }
169 |
170 | var body: some View {
171 | Text(" \(title) ")
172 | .foregroundStyle(LinearGradient(
173 | colors: [Color(hex: 0x220300, alpha: 0.32),
174 | Color(hex: 0x220300)],
175 | startPoint: .bottom,
176 | endPoint: .top
177 | ))
178 | .shadow(color: .black.opacity(0.25), radius: 1, x: 0, y: 4)
179 | .font(Font.tachyoFont)
180 | .fontWeight(.medium)
181 | .fixedSize()
182 | .offset(x: -15)
183 | .kerning(-1.5)
184 | }
185 | }
186 |
187 | struct LCDTextBigWithGradientHUD: View {
188 | var title: String
189 |
190 | init(_ title: String) {
191 | self.title = title
192 | }
193 |
194 | var body: some View {
195 | Text(" \(title) ")
196 | .foregroundStyle(LinearGradient(
197 | colors: [Color(hex: 0x220300, alpha: 0.32),
198 | Color(hex: 0x220300)],
199 | startPoint: .bottom,
200 | endPoint: .top
201 | ))
202 | .shadow(color: .black.opacity(0.25), radius: 1, x: 0, y: 4)
203 | .font(Font.tachyoFontBig)
204 | .fontWeight(.medium)
205 | .fixedSize()
206 | .offset(x: -15)
207 | .kerning(-1.5)
208 | }
209 | }
210 |
211 | #Preview("LCD Screen") {
212 | LCDScreenView()
213 | .environmentObject(TapeRecorderState())
214 | }
215 |
216 | #Preview("LCD Symbols") {
217 | LCDSymbolGlyphs()
218 | .environmentObject(TapeRecorderState())
219 | .border(.black)
220 | .padding()
221 | }
222 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/Shaders.metal:
--------------------------------------------------------------------------------
1 | // https://stackoverflow.com/a/79176991
2 | #include
3 | using namespace metal;
4 |
5 | [[ stitchable ]] half4 dotMatrix(float2 position, half4 color) {
6 | if (int(position.x) % 2 < 2 && int(position.y) % 4 < 2) {
7 | return color;
8 | } else {
9 | return half4(0, 0, 0, 0);
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/rm2000/Views/Main Window/TitleBar.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 | import AppKit
4 |
5 | class SkeuromorphicWindow: NSWindow {
6 | override init(contentRect: NSRect, styleMask style: NSWindow.StyleMask, backing backingStoreType: NSWindow.BackingStoreType, defer flag: Bool) {
7 | super.init(contentRect: contentRect, styleMask: style, backing: backingStoreType, defer: flag)
8 |
9 | // basic window customizations
10 | self.titlebarAppearsTransparent = true
11 | self.titleVisibility = .visible
12 |
13 | self.backgroundColor = .windowBackgroundColor
14 | self.isMovableByWindowBackground = true
15 |
16 | let toolbar = NSToolbar(identifier: "MainToolbar")
17 | self.toolbar = toolbar
18 | self.toolbarStyle = .unified
19 | self.toolbar?.showsBaselineSeparator = false
20 |
21 | if let zoomButton = standardWindowButton(.zoomButton) {
22 | zoomButton.isHidden = true
23 | }
24 |
25 | drawMicrophoneGrille()
26 | }
27 |
28 | private func drawMicrophoneGrille() {
29 |
30 | //omg skeuromorphism.
31 |
32 | let isDarkMode = self.effectiveAppearance.bestMatch(from: [.darkAqua, .aqua]) == .darkAqua
33 |
34 | let imageName = isDarkMode ? "MicGrilleDark" : "MicGrilleTemp"
35 | let imageView = NSImageView(frame: NSRect(x: -70, y: -14, width: 140, height: 28))
36 |
37 | if let image = NSImage(named: imageName) {
38 | image.size = NSSize(width: 130, height: 19)
39 | imageView.image = image
40 | imageView.setAccessibilityElement(false)
41 | imageView.setAccessibilityHidden(true)
42 | }
43 |
44 | let customView = NSView(frame: NSRect(x: 0, y: 30, width: 30, height: 20))
45 | customView.addSubview(imageView)
46 | customView.setAccessibilityElement(false)
47 | customView.setAccessibilityHidden(true)
48 |
49 | if let titlebarController = self.standardWindowButton(.closeButton)?.superview?.superview {
50 | titlebarController.addSubview(customView)
51 |
52 | customView.translatesAutoresizingMaskIntoConstraints = false
53 | NSLayoutConstraint.activate([
54 | customView.centerYAnchor.constraint(equalTo: titlebarController.centerYAnchor),
55 | customView.centerXAnchor.constraint(equalTo: titlebarController.centerXAnchor)
56 | ])
57 | }
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/rm2000/Views/Menu Bar/MenuBar.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 | import OSLog
4 |
5 | struct MenuBarView: View {
6 | @EnvironmentObject private var recordingState: TapeRecorderState
7 | @EnvironmentObject private var sampleStorage: SampleStorage
8 | @Environment(\.openWindow) private var openWindow
9 |
10 | private var appDelegate = AppKitWindowManagerDelegate()
11 |
12 | var body: some View {
13 | VStack(spacing: 12) {
14 | // Header
15 | HStack {
16 | Text("RM2000 Tape Recorder")
17 | .font(.system(.headline))
18 | .fontWeight(.bold)
19 | Text("Beta")
20 | .font(.caption)
21 | .foregroundColor(.secondary)
22 | .padding(.horizontal, 4)
23 | .background(
24 | RoundedRectangle(cornerRadius: 4)
25 | .fill(Color.secondary.opacity(0.2))
26 | )
27 | }
28 | .padding(.top, 5)
29 |
30 | VStack() {
31 |
32 | Button(action: {
33 | if recordingState.status == .recording {
34 | recordingState.stopRecording()
35 | } else {
36 | recordingState.startRecording()
37 | }
38 | }) {
39 | HStack {
40 | Image(systemName: recordingState.status == .recording ? "stop.circle" : "record.circle")
41 | .contentTransition(.symbolEffect)
42 | .foregroundColor(recordingState.status == .recording ? .red.opacity(0.70) : .red)
43 | Text(recordingState.status == .recording ? "Stop Recording" : "Start Recording")
44 | .fontWeight(.medium)
45 | Spacer()
46 |
47 | if recordingState.status == .recording {
48 | ElapsedTime(textString: $recordingState.elapsedTimeRecording)
49 | .font(.system(.footnote, design: .monospaced))
50 | .padding(.horizontal, 6)
51 | .background(Color.secondary.opacity(0.15))
52 | .cornerRadius(4)
53 | }
54 | }
55 | .contentShape(Rectangle())
56 | }
57 | }
58 | .buttonStyle(MenuButtonStyle())
59 | .padding(.vertical, 3)
60 | .padding(.horizontal, 8)
61 | .background(
62 | RoundedRectangle(cornerRadius: 6)
63 | .fill(Color.secondary.opacity(0.07))
64 | )
65 |
66 | Divider()
67 |
68 | VStack() {
69 | Button(action: {
70 | appDelegate.showMainWindow()
71 | }) {
72 | HStack {
73 | Image(systemName: "macwindow")
74 | Text("Open Main Window...")
75 | .fontWeight(.medium)
76 | Spacer()
77 | }
78 | .contentShape(Rectangle())
79 | }
80 | .buttonStyle(MenuButtonStyle())
81 |
82 | Button(action: {
83 | openWindow(id: "recordings-window")
84 | }) {
85 | HStack {
86 | Image(systemName: "rectangle.split.3x1")
87 | Text("Open Sample Library...")
88 | .fontWeight(.medium)
89 | Spacer()
90 | }
91 | .contentShape(Rectangle())
92 | }
93 | .buttonStyle(MenuButtonStyle())
94 | }
95 |
96 | Divider()
97 |
98 | // Footer
99 | Button(action: {
100 | NSApplication.shared.terminate(nil)
101 | }) {
102 | HStack {
103 | Image(systemName: "power")
104 | .foregroundColor(.red.opacity(0.8))
105 | .fontWeight(.bold)
106 |
107 | Text("Quit RM2000")
108 | .fontWeight(.medium)
109 | Spacer()
110 | Text("⌘Q")
111 | .font(.caption2)
112 | .foregroundColor(.secondary)
113 | }
114 | .contentShape(Rectangle())
115 | }
116 | .buttonStyle(MenuButtonStyle())
117 | .keyboardShortcut("q")
118 | .padding(.bottom, 5)
119 | }
120 | .padding(.horizontal, 16)
121 | .padding(.vertical, 8)
122 | .frame(width: 240)
123 | }
124 | }
125 |
126 | struct MenuButtonStyle: ButtonStyle {
127 | func makeBody(configuration: Configuration) -> some View {
128 | configuration.label
129 | .padding(.vertical, 2)
130 | .padding(.horizontal, 8)
131 | .background(
132 | RoundedRectangle(cornerRadius: 6)
133 | .fill(configuration.isPressed ?
134 | Color.accentColor.opacity(0.15) :
135 | Color.clear)
136 | )
137 | }
138 | }
139 |
140 | struct ElapsedTime: View {
141 | @Binding var textString: TimeInterval
142 |
143 | var body: some View {
144 | Text(timeString(textString))
145 | }
146 | }
147 |
148 | #Preview {
149 | MenuBarView()
150 | .environmentObject(TapeRecorderState())
151 | .environmentObject(SampleStorage())
152 | }
153 |
--------------------------------------------------------------------------------
/rm2000/Views/Onboarding/OnboardingView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import UserNotifications
3 | import OSLog
4 |
5 |
6 | enum OnboardingStep {
7 | case welcome
8 | case settings
9 | case complete
10 | }
11 |
12 | class OnboardingViewModel: ObservableObject {
13 | @Published var currentStep: OnboardingStep = .welcome
14 | }
15 |
16 | struct FinalOnboardingCompleteView: View {
17 | @Environment(\.dismiss) var dismiss
18 | @ObservedObject var viewModel: OnboardingViewModel
19 | @EnvironmentObject var appState: AppState
20 |
21 | var body: some View {
22 | Text("Complete!")
23 |
24 | Text("App will now close. Please restart")
25 | HStack {
26 | Button("Back") {
27 | viewModel.currentStep = .settings
28 | }
29 |
30 | Button("Finish") {
31 | appState.hasCompletedOnboarding = true
32 | /*
33 | this has to be appkit compatible as the mainwindow uses
34 | an appkit based lifetime
35 | */
36 | print("closing")
37 | exit(0)
38 | }
39 | .buttonStyle(.borderedProminent)
40 | }
41 | }
42 | }
43 |
44 |
45 | struct SettingsStepView: View {
46 |
47 | private let streamManager = SCStreamManager()
48 |
49 | @ObservedObject var viewModel: OnboardingViewModel
50 | @EnvironmentObject var appState: AppState
51 |
52 | @State private var showFileChooser: Bool = false
53 |
54 | var body: some View {
55 | Text("Set directory for all samples to get saved in")
56 | HStack {
57 | TextField("Set RM2000 Sample Directory", text: Binding(
58 | get: { appState.sampleDirectory?.path ?? "" },
59 | set: { appState.sampleDirectory = URL(fileURLWithPath: $0) }
60 | ))
61 | Button("Browse") {
62 | showFileChooser = true
63 | }
64 | .fileImporter(isPresented: $showFileChooser, allowedContentTypes: [.directory]) { result in
65 | switch result {
66 | case .success(let directory):
67 |
68 | // get security scoped bookmark
69 | guard directory.startAccessingSecurityScopedResource() else {
70 | Logger.appState.error("Could not get security scoped to the directory \(directory)")
71 | return
72 | }
73 | appState.sampleDirectory = directory
74 | Logger.viewModels.info("Set new sampleDirectory as \(directory)")
75 | case .failure(let error):
76 | Logger.viewModels.error("Could not set sampleDirectory: \(error)")
77 | }
78 | }
79 | }
80 | HStack {
81 | Button("Back") {
82 | viewModel.currentStep = .welcome
83 | }
84 |
85 | Button("Next") {
86 | viewModel.currentStep = .complete
87 | print(appState.sampleDirectory?.path ?? "No directory set")
88 | }
89 | .buttonStyle(.borderedProminent)
90 | }
91 | }
92 |
93 | private func invokeRecordingPermission() async {
94 | do {
95 | try await streamManager.setupAudioStream()
96 | }
97 | catch {
98 | Logger.viewModels.error("Recording permission declined")
99 |
100 | // https://stackoverflow.com/a/78740238
101 | // i seriously have to use NSAlert for this?
102 |
103 | let alert = showPermissionAlert()
104 | if alert.runModal() == . alertFirstButtonReturn {
105 | NSWorkspace.shared.open(URL(string: "x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture")!)
106 | }
107 | }
108 | }
109 |
110 | private func showPermissionAlert() -> NSAlert {
111 | let alert = NSAlert()
112 | alert.messageText = "Permission Request"
113 | alert.alertStyle = .informational
114 | alert.informativeText = "RM2000 requires permission to record the screen in order to grab system audio."
115 | alert.addButton(withTitle: "Open System Settings")
116 | alert.addButton(withTitle: "Quit")
117 | return alert
118 | }
119 | }
120 |
121 | struct WelcomeView:View {
122 |
123 | @ObservedObject var viewModel: OnboardingViewModel
124 | var body: some View {
125 | VStack {
126 | Image(nsImage: NSApp.applicationIconImage)
127 | Text("Welcome to RM2000")
128 | .font(.title)
129 | }
130 | Text("This build is considered ")
131 | + Text("incredibly fragile")
132 | .foregroundColor(.red)
133 |
134 | Text("Consider all the samples you record with this app as ephemeral")
135 |
136 | Text("More stable builds will follow in the next weeks")
137 | HStack {
138 | Button("Next") {
139 | viewModel.currentStep = .settings
140 | }
141 | .buttonStyle(.borderedProminent)
142 | }
143 | }
144 | }
145 |
146 | struct OnboardingView: View {
147 |
148 | @EnvironmentObject var appState: AppState
149 | @ObservedObject var viewModel: OnboardingViewModel
150 |
151 | var body: some View {
152 | VStack(spacing: 20) {
153 |
154 | switch viewModel.currentStep {
155 | case.welcome:
156 | WelcomeView(viewModel: viewModel)
157 | case .settings:
158 | SettingsStepView(viewModel: viewModel)
159 | case .complete:
160 | FinalOnboardingCompleteView(viewModel: viewModel)
161 | }
162 | }
163 | .frame(minWidth: 500, minHeight: 500)
164 | .padding()
165 |
166 | }
167 | }
168 |
169 | #Preview {
170 | OnboardingView(viewModel: OnboardingViewModel())
171 | .environmentObject(AppState.shared) // Ensure AppState is injected
172 | }
173 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Editing/EditSampleView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import Combine
3 | import CoreMedia
4 |
5 | struct EditSampleView: View {
6 |
7 | let model: Model
8 | @State private var title: String
9 | @State private var tags: Set
10 | @State private var description: String?
11 | @State private var forwardEndTime: CMTime? = nil
12 | @State private var reverseEndTime: CMTime? = nil
13 | @State private var sampleExists: Bool = false
14 | @State private var didErrorForOverride: Bool = false
15 | @State private var didErrorForCancel: Bool = false
16 | @Environment(\.dismiss) private var dismiss
17 | @FocusState private var focusedField: Bool
18 |
19 | private let onComplete: (FileRepresentable, SampleMetadata, SampleEditConfiguration) -> Void
20 |
21 | init(recording: Model, onComplete: @escaping (FileRepresentable, SampleMetadata, SampleEditConfiguration) -> Void) {
22 | self.onComplete = onComplete
23 | _title = State(initialValue: "")
24 | _tags = State(initialValue: Set())
25 | _description = State(initialValue: "")
26 | self.model = recording
27 | }
28 |
29 | var body: some View {
30 | ScrollView {
31 | VStack(alignment: .leading, spacing: 12) {
32 | Text("Rename Recording")
33 | .font(.headline)
34 | TrimmingPlayerView(
35 | recording: model,
36 | forwardEndTime: $forwardEndTime,
37 | reverseEndTime: $reverseEndTime)
38 |
39 | VStack(alignment: .leading, spacing: 4) {
40 |
41 | Text("Title")
42 | .font(.caption)
43 | .foregroundColor(.secondary)
44 |
45 | TextField("New Filename", text: $title)
46 | .textFieldStyle(RoundedBorderTextFieldStyle())
47 | .autocorrectionDisabled()
48 | .focused($focusedField)
49 | .onAppear {
50 | focusedField = true
51 | }
52 | .onChange(of: title) { formattedText in
53 | title = formattedText.replacingOccurrences(of: "-", with: " ")
54 | sampleExists = doesSampleAlreadyExist()
55 | }
56 | }
57 |
58 | VStack(alignment: .leading, spacing: 4) {
59 | Text("Tags (comma-separated)")
60 | .font(.caption)
61 | .foregroundColor(.secondary)
62 | TokenInputField(tags: $tags)
63 |
64 | .onChange(of: tags) { newValue in
65 | let forbiddenChars = CharacterSet(charactersIn: "_-/:*?\"<>|,;[]{}'&\t\n\r")
66 | tags = Set(newValue.map { tag in
67 | String(tag.unicodeScalars.filter { !forbiddenChars.contains($0) })
68 | })
69 | sampleExists = doesSampleAlreadyExist()
70 | }
71 |
72 | }
73 | DisclosureGroup("Additional Settings") {
74 | VStack(alignment: .leading, spacing: 4) {
75 | Text("Description (optional)")
76 | .font(.caption)
77 | .foregroundColor(.secondary)
78 | TextEditor(text: .constant("Placeholder"))
79 | .font(.system(size: 14, weight: .medium, design: .rounded)) // Uses a rounded, medium-weight system font
80 | .lineSpacing(10) // Sets the line spacing to 10 points
81 | .border(Color.gray, width: 1)
82 | }.padding(.top, 8)
83 | }
84 | VStack(alignment: .leading, spacing: 4) {
85 | Text("Preview:")
86 | .font(.caption)
87 | .foregroundColor(.secondary)
88 | PreviewFilenameView(title: $title, tags: $tags)
89 | }
90 | .padding(.top, 8)
91 |
92 | HStack {
93 | Button("Cancel", role: .cancel) {
94 | didErrorForCancel = true
95 | }.keyboardShortcut(.cancelAction)
96 |
97 | Spacer()
98 |
99 | if sampleExists {
100 | HStack {
101 | Label("Sample with same title and tags already exists", systemImage: "exclamationmark.triangle")
102 | .id(sampleExists)
103 | .foregroundColor(.red)
104 | .contentTransition(.opacity)
105 | .font(.caption)
106 | }
107 | }
108 |
109 | Button("Save Sample") {
110 | if (title.isEmpty && tags.isEmpty) {
111 | NSSound.beep()
112 | } else {
113 | if (sampleExists) {
114 | didErrorForOverride = true
115 | } else {
116 | gatherAndComplete()
117 | }
118 | }
119 | }
120 | .buttonStyle(.borderedProminent)
121 | .padding(.top, 8)
122 | }.keyboardShortcut(.defaultAction)
123 | }
124 | .padding()
125 | }
126 | .alert("Replace existing sample?", isPresented: $didErrorForOverride) {
127 | Button("Replace", role: .destructive) {
128 | gatherAndComplete()
129 | }
130 | Button("Cancel", role: .cancel) { }
131 | } message: {
132 | Text("Another sample with identical title and tags already exists.")
133 | }
134 | .alert("Cancel Editing?", isPresented: $didErrorForCancel) {
135 | Button("Confirm") {
136 | dismiss()
137 | }
138 | } message: {
139 | Text("This recording will be lost once the app is quit.")
140 | }
141 | }
142 |
143 | private func gatherAndComplete() {
144 | var configuration = SampleEditConfiguration()
145 | configuration.directoryDestination = SampleStorage.shared.UserDirectory
146 | configuration.forwardEndTime = forwardEndTime
147 | configuration.reverseEndTime = reverseEndTime
148 |
149 | var metadata = SampleMetadata()
150 | metadata.title = title
151 | metadata.tags = tags
152 | var createdSample = Sample(fileURL: model.fileURL, metadata: metadata)
153 | onComplete(createdSample, metadata, configuration)
154 | }
155 |
156 | @MainActor private func doesSampleAlreadyExist() -> Bool {
157 | for sample in SampleStorage.shared.UserDirectory.samplesInStorage {
158 | if sample.metadata.title == title && sample.metadata.tags == tags {
159 | return true
160 | }
161 | }
162 | return false
163 | }
164 | }
165 |
166 | struct TokenInputField: View {
167 |
168 | @Binding var tags: Set
169 | let suggestions = SampleStorage.shared.UserDirectory.indexedTags
170 |
171 | var body: some View {
172 | TokenField(.init(get: { Array(tags) }, set: { tags = Set($0) })) // converting set to [string]...stupid...
173 | .completions([String](suggestions))
174 | }
175 | }
176 |
177 | #Preview {
178 | let testFile = URL(fileURLWithPath: "/Users/marceloexc/Developer/replica/rm2000Tests/Example--sample.aac")
179 | let recording = TemporaryActiveRecording(fileURL: testFile)
180 | return EditSampleView(recording: recording) { _, _, _ in
181 | // Empty completion handler
182 | }
183 | }
184 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Editing/PreviewFilenameView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct PreviewFilenameView: View {
4 | @State var previewFilename: String = ""
5 | @State private var sortedTagsArray: [String] = []
6 | @Binding var title: String
7 | @Binding var tags: Set
8 | let audioFormat = TapeRecorderState.shared.sampleRecordAudioFormat.asString
9 |
10 | var body: some View {
11 | Text(generatePreviewFilename())
12 | .font(.system(size: 12, weight: .regular, design: .monospaced))
13 | .foregroundColor(Color(red: 1, green: 0.6, blue: 0))
14 | .padding(4)
15 | .frame(maxWidth: .infinity)
16 | .background(Color.black)
17 | .contentTransition(.numericText())
18 | .animation(.easeInOut, value: title)
19 | .onChange(of: tags) { newTags in
20 | sortedTagsArray = newTags.sorted()
21 | }
22 | .onAppear {
23 | sortedTagsArray = tags.sorted()
24 | }
25 | }
26 |
27 | // TODO - hardcoded file extension string
28 | private func generatePreviewFilename() -> String {
29 | var taggedString = ""
30 |
31 | for tag in sortedTagsArray {
32 | taggedString.append("\(tag)-")
33 | }
34 |
35 | return "\(title)__\(taggedString).\(audioFormat)"
36 | }
37 | }
38 |
39 | #Preview {
40 | PreviewFilenameView(
41 | title: .constant("ExampleTitle"),
42 | tags: .constant(["tag1", "tag2", "tag3"])
43 | )
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Editing/TokenField/TokenFieldView.swift:
--------------------------------------------------------------------------------
1 | // Modified version of this library
2 | // https://github.com/fcanas/TokenField
3 |
4 | import SwiftUI
5 | import AppKit
6 | import OSLog
7 |
8 | fileprivate let Log = Logger(subsystem: "TokenField", category: "tokenfield")
9 |
10 | public struct TokenField: View, NSViewRepresentable where Data: RandomAccessCollection {
11 |
12 | @Binding private var data: Data
13 |
14 | private var conversion: (Data.Element) -> String
15 | var completions: [String] = []
16 |
17 | public init(_ data: Binding, _ tokenConversion: @escaping (Data.Element) -> String) {
18 | conversion = tokenConversion
19 | _data = data
20 | }
21 |
22 | public func makeCoordinator() -> Coordinator {
23 | Coordinator(self)
24 | }
25 |
26 | public final class Coordinator: NSObject, NSTokenFieldDelegate, ObservableObject where Data: RandomAccessCollection {
27 |
28 | var data: Binding?
29 | var parent: TokenField
30 | var completions: [String] = []
31 |
32 | internal init(_ parent: TokenField) {
33 | self.parent = parent
34 | self.conversion = parent.conversion
35 | self.completions = parent.completions
36 | }
37 |
38 | private final class RepresentedToken where E: Identifiable {
39 | internal init(token: E, conversion: @escaping (E) -> String) {
40 | self.token = token
41 | self.conversion = conversion
42 | }
43 | var token: E
44 | var conversion: (E) -> String
45 | }
46 |
47 | var conversion: ((Data.Element) -> String)! = nil
48 |
49 | public func tokenField(_ tokenField: NSTokenField, displayStringForRepresentedObject representedObject: Any) -> String? {
50 | return representedObject as? String
51 | }
52 |
53 | public func tokenField(_ tokenField: NSTokenField, hasMenuForRepresentedObject representedObject: Any) -> Bool {
54 | return false
55 | }
56 |
57 | public func tokenField(_ tokenField: NSTokenField, styleForRepresentedObject representedObject: Any) -> NSTokenField.TokenStyle {
58 | return .rounded
59 | }
60 |
61 | public func tokenField(_ tokenField: NSTokenField, shouldAdd tokens: [Any], at index: Int) -> [Any] {
62 | guard let newTokens = tokens as? [AnyHashable] else {
63 | Log.debug("New tokens are not hashable")
64 | return tokens
65 | }
66 | guard let existingTokens = tokenField.objectValue as? [AnyHashable] else {
67 | Log.debug("Existing tokens are not hashable")
68 | return tokens
69 | }
70 | Log.debug("candidate: \(newTokens)")
71 | Log.debug("existing: \(existingTokens)")
72 | var set = Set()
73 |
74 | return newTokens.filter { t in
75 | defer {set.insert(t)}
76 | return !set.contains(t)
77 | }
78 | }
79 |
80 | public func tokenField(_ tokenField: NSTokenField, completionsForSubstring substring: String, indexOfToken tokenIndex: Int, indexOfSelectedItem selectedIndex: UnsafeMutablePointer?) -> [Any]? {
81 | // Only match suggestions that START with the typed substring
82 | return completions.filter { $0.lowercased().hasPrefix(substring.lowercased()) }
83 | }
84 |
85 | public func completions(_ completions: [String]) -> Self {
86 | var copy = self
87 | copy.completions = completions
88 | return copy
89 | }
90 |
91 | public func controlTextDidChange(_ obj: Notification) {
92 | guard let tf = obj.object as? NSTokenField else {
93 | Log.debug("Control text did change, but object not a token field")
94 | return
95 | }
96 | guard let data = tf.objectValue as? Data else {
97 | Log.debug("Control text did change, but object value data unexpected type: \(type(of: tf.objectValue))")
98 | return
99 | }
100 | self.data?.wrappedValue = data
101 | }
102 | }
103 |
104 | public func makeNSView(context: Context) -> NSTokenField {
105 | let tf = NSTokenField()
106 | tf.autoresizingMask = [.width, .height]
107 | tf.tokenStyle = .rounded
108 | tf.setContentHuggingPriority(.defaultLow, for: .vertical)
109 |
110 | tf.controlSize = .regular
111 | tf.font = NSFont.systemFont(ofSize: NSFont.systemFontSize)
112 | tf.placeholderString = "Enter tags here"
113 |
114 | tf.isBezeled = false // prevent ugly border style
115 | tf.isBordered = true
116 |
117 | // Layer-based styling
118 | tf.wantsLayer = true
119 | tf.layer?.cornerRadius = 6
120 | tf.layer?.masksToBounds = true
121 | tf.layer?.backgroundColor = NSColor.controlBackgroundColor.cgColor
122 |
123 | tf.appearance = NSApp.effectiveAppearance
124 |
125 | tf.translatesAutoresizingMaskIntoConstraints = false
126 | NSLayoutConstraint.activate([
127 | tf.heightAnchor.constraint(greaterThanOrEqualToConstant: 24)
128 | ])
129 |
130 | let cell = tf.cell as? NSTokenFieldCell
131 | cell?.tokenStyle = .rounded
132 |
133 | context.coordinator.data = _data
134 | context.coordinator.conversion = self.conversion
135 | tf.delegate = context.coordinator
136 |
137 | tf.lineBreakMode = .byTruncatingMiddle
138 | tf.objectValue = data
139 |
140 | return tf
141 | }
142 |
143 |
144 | public func updateNSView(_ nsView: NSTokenField, context: Context) {
145 | if let b = nsView.superview?.bounds {
146 | context.coordinator.completions = self.completions
147 | nsView.frame = b
148 | }
149 | }
150 | }
151 |
152 | extension TokenField where Data.Element == String {
153 | public init(_ data: Binding) {
154 | conversion = {$0}
155 | _data = data
156 | }
157 | }
158 |
159 | extension TokenField {
160 | public func completions(_ completions: [String]) -> Self {
161 | var copy = self
162 | copy.completions = completions
163 | return copy
164 | }
165 | }
166 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Editing/TrimmingPlayerView.swift:
--------------------------------------------------------------------------------
1 | // TrimmablePlayerView.swift
2 | import SwiftUI
3 | import AVKit
4 | import AVFoundation
5 | import Combine
6 | import CoreMedia
7 |
8 | class PlayerViewModel: ObservableObject {
9 | @Published var playerView: AVPlayerView?
10 | @Published var playerItem: AVPlayerItem? {
11 | didSet {
12 | setupAVPlayerObservations()
13 | }
14 | }
15 | var player: AVPlayer?
16 | var activeRecording: FileRepresentable
17 |
18 | private var cancellables = Set()
19 |
20 | var forwardEndTime: Binding
21 | var reverseEndTime: Binding
22 |
23 | init(activeRecording: FileRepresentable, forwardEndTime: Binding, reverseEndTime: Binding) {
24 | self.activeRecording = activeRecording
25 | self.forwardEndTime = forwardEndTime
26 | self.reverseEndTime = reverseEndTime
27 | setupPlayer()
28 | }
29 |
30 | fileprivate func setupPlayer() {
31 | let fileURL = activeRecording.fileURL
32 |
33 | let asset = AVAsset(url: fileURL)
34 | let item = AVPlayerItem(asset: asset)
35 | self.playerItem = item
36 |
37 | player = AVPlayer(playerItem: playerItem)
38 |
39 | let view = AVPlayerView()
40 | view.player = player
41 | view.controlsStyle = .inline
42 | view.showsTimecodes = true
43 | view.showsSharingServiceButton = false
44 | view.showsFrameSteppingButtons = false
45 | view.showsFullScreenToggleButton = false
46 |
47 | playerView = view
48 | }
49 |
50 | private func setupAVPlayerObservations() {
51 | guard let playerItem = playerItem else { return }
52 |
53 | playerItem.publisher(for: \.status)
54 | .sink { [weak self] status in
55 |
56 | // .readytoplay in the AVPlayerItem.status enum
57 | if status == .readyToPlay {
58 | self?.objectWillChange.send()
59 | }
60 | }
61 | .store(in: &cancellables)
62 |
63 | playerItem.publisher(for: \.forwardPlaybackEndTime)
64 | .sink { [weak self] newTime in
65 | self?.forwardEndTime.wrappedValue = newTime
66 | self?.objectWillChange.send()
67 | }
68 | .store(in: &cancellables)
69 |
70 | playerItem.publisher(for: \.reversePlaybackEndTime)
71 | .sink { [weak self] newTime in
72 | self?.reverseEndTime.wrappedValue = newTime
73 | self?.objectWillChange.send()
74 | }
75 | .store(in: &cancellables)
76 | }
77 |
78 | func beginTrimming() {
79 | guard let playerView = playerView else { return }
80 |
81 | DispatchQueue.main.async {
82 | playerView.beginTrimming { result in
83 | switch result {
84 | case .okButton:
85 | print("Trim completed")
86 | case .cancelButton:
87 | print("Trim cancelled")
88 | @unknown default:
89 | print("Unknown trim result")
90 | }
91 | }
92 | }
93 | }
94 | }
95 |
96 | struct TrimmingPlayerView: View {
97 | @StateObject private var viewModel: PlayerViewModel
98 | // optional's as the CMTime's can be of NaN
99 | @Binding var forwardEndTime: CMTime?
100 | @Binding var reverseEndTime: CMTime?
101 |
102 | let model: Model
103 |
104 | init(recording: Model, forwardEndTime: Binding, reverseEndTime: Binding) {
105 | self.model = recording
106 | _forwardEndTime = forwardEndTime
107 | _reverseEndTime = reverseEndTime
108 | _viewModel = StateObject(wrappedValue:
109 | PlayerViewModel(
110 | activeRecording: recording,
111 | forwardEndTime: forwardEndTime,
112 | reverseEndTime: reverseEndTime
113 | )
114 | )
115 | }
116 |
117 | var body: some View {
118 | VStack {
119 | if let playerView = viewModel.playerView {
120 | AudioPlayerView(playerView: playerView)
121 | .frame(height: 60)
122 | } else {
123 | Text("Player not available")
124 | .foregroundColor(.secondary)
125 | }
126 | }
127 | }
128 | }
129 |
130 | struct AudioPlayerView: NSViewRepresentable {
131 | let playerView: AVPlayerView
132 |
133 | func makeNSView(context: Context) -> AVPlayerView {
134 | Task {
135 | do {
136 | try await playerView.activateTrimming()
137 | playerView.hideTrimButtons()
138 | } catch {
139 | print("Failed to activate trimming: \(error)")
140 | }
141 | }
142 | return playerView
143 | }
144 |
145 | func updateNSView(_ nsView: AVPlayerView, context: Context) {}
146 | }
147 |
148 | extension AVPlayerView {
149 | /**
150 | Activates trim mode without waiting for trimming to finish.
151 | */
152 | func activateTrimming() async throws { // TODO: `throws(CancellationError)`.
153 | _ = await updates(for: \.canBeginTrimming).first { $0 }
154 |
155 | try Task.checkCancellation()
156 |
157 | Task {
158 | await beginTrimming()
159 | }
160 |
161 | await Task.yield()
162 | }
163 |
164 | func hideTrimButtons() {
165 | // This method is a collection of hacks, so it might be acting funky on different OS versions.
166 | guard
167 | let avTrimView = firstSubview(deep: true, where: { $0.simpleClassName == "AVTrimView" }),
168 | let superview = avTrimView.superview
169 | else {
170 | return
171 | }
172 |
173 | // First find the constraints for `avTrimView` that pins to the left edge of the button.
174 | // Then replace the left edge of a button with the right edge - this will stretch the trim view.
175 | if let constraint = superview.constraints.first(where: {
176 | ($0.firstItem as? NSView) == avTrimView && $0.firstAttribute == .right
177 | }) {
178 | superview.removeConstraint(constraint)
179 | constraint.changing(secondAttribute: .right).isActive = true
180 | }
181 |
182 | if let constraint = superview.constraints.first(where: {
183 | ($0.secondItem as? NSView) == avTrimView && $0.secondAttribute == .right
184 | }) {
185 | superview.removeConstraint(constraint)
186 | constraint.changing(firstAttribute: .right).isActive = true
187 | }
188 |
189 | // Now find buttons that are not images (images are playing controls) and hide them.
190 | superview.subviews
191 | .first { $0 != avTrimView }?
192 | .subviews
193 | .filter { ($0 as? NSButton)?.image == nil }
194 | .forEach {
195 | $0.isHidden = true
196 | }
197 | }
198 |
199 | open override func cancelOperation(_ sender: Any?) {}
200 |
201 | }
202 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Library/Audio Player/SampleLibraryAutoPlayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SampleLibraryAutoPlayer.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 5/4/25.
6 | //
7 | import SwiftUI
8 | import AVFoundation
9 | import Combine
10 |
11 |
12 | class SLAudioPlayer: ObservableObject {
13 | private var player: AVPlayer?
14 | @Published var isPlaying = false
15 | @Published var currentTime: Double = 0
16 | @Published var duration: Double = 1
17 | @Published var isAutoplay: Bool = false
18 |
19 | private var timeObserver: Any?
20 | private var timer: AnyCancellable?
21 |
22 | init() {
23 | // nothing
24 | }
25 |
26 | func loadAudio(from url: URL?) {
27 | // Clear any previous observer
28 | removeTimeObserver()
29 |
30 | // Reset state
31 | isPlaying = false
32 | currentTime = 0
33 |
34 | guard let url = url else { return }
35 |
36 | let playerItem = AVPlayerItem(url: url)
37 | player = AVPlayer(playerItem: playerItem)
38 |
39 | // Get duration
40 | if let duration = player?.currentItem?.asset.duration.seconds, !duration.isNaN {
41 | self.duration = duration
42 | }
43 |
44 | // Add time observer
45 | timeObserver = player?.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.05, preferredTimescale: 600), queue: .main) { [weak self] time in
46 | guard let self = self else { return }
47 | let currentSeconds = CMTimeGetSeconds(time)
48 | if currentSeconds.isFinite {
49 | self.currentTime = currentSeconds
50 | // Force object to update, which will refresh dependent views
51 | self.objectWillChange.send()
52 | }
53 | }
54 |
55 | // Listen for when the item finishes playing
56 | NotificationCenter.default.addObserver(
57 | forName: .AVPlayerItemDidPlayToEndTime,
58 | object: player?.currentItem,
59 | queue: .main) { [weak self] _ in
60 | self?.isPlaying = false
61 | self?.player?.seek(to: CMTime.zero)
62 | self?.currentTime = 0
63 | self?.objectWillChange.send()
64 | }
65 | }
66 |
67 | func playPause() {
68 | if isPlaying {
69 | player?.pause()
70 | } else {
71 | player?.play()
72 | }
73 | isPlaying.toggle()
74 | }
75 |
76 | func play() {
77 | if !isPlaying {
78 | player?.play()
79 | isPlaying = true
80 | }
81 | }
82 |
83 | func forcePause() {
84 | if isPlaying {
85 | player?.pause()
86 | isPlaying = false
87 | }
88 | }
89 |
90 | func seekTo(time: Double) {
91 | player?.seek(to: CMTime(seconds: time, preferredTimescale: 600))
92 | currentTime = time
93 | }
94 |
95 | private func removeTimeObserver() {
96 | if let timeObserver = timeObserver {
97 | player?.removeTimeObserver(timeObserver)
98 | self.timeObserver = nil
99 | }
100 | }
101 |
102 | deinit {
103 | removeTimeObserver()
104 | }
105 | }
106 |
107 |
108 | struct SampleLibraryAutoPlayer: View {
109 | var body: some View {
110 | Text(/*@START_MENU_TOKEN@*/"Hello, World!"/*@END_MENU_TOKEN@*/)
111 | }
112 | }
113 |
114 | #Preview {
115 | SampleLibraryAutoPlayer()
116 | }
117 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Library/DetailView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | enum DetailViewType: Hashable {
4 | case all
5 | case tagged(String)
6 | case untagged
7 | }
8 |
9 | struct DetailView: View {
10 | @ObservedObject var viewModel: SampleLibraryViewModel
11 |
12 | private var currentViewType: DetailViewType {
13 | guard let selection = viewModel.sidebarSelection else {
14 | return .all
15 | }
16 |
17 | switch selection {
18 | case .allRecordings:
19 | return .all
20 | case .untaggedRecordings:
21 | return .untagged
22 | case .tag(let tagName):
23 | return .tagged(tagName)
24 | }
25 | }
26 |
27 | var body: some View {
28 | Group {
29 | switch currentViewType {
30 | case .all:
31 | RecordingsListView(viewModel: viewModel, viewType: .all)
32 | case .tagged(let tagName):
33 | RecordingsListView(viewModel: viewModel, viewType: .tagged(tagName))
34 | case .untagged:
35 | RecordingsListView(viewModel: viewModel, viewType: .untagged)
36 | }
37 | }
38 | }
39 | }
40 |
41 | private struct RecordingsListView: View {
42 | @ObservedObject var viewModel: SampleLibraryViewModel
43 | let viewType: DetailViewType
44 |
45 | private var filteredSamples: [Sample] {
46 | switch viewType {
47 | case .all:
48 | return viewModel.listOfAllSamples
49 | case .tagged(let tagName):
50 | return viewModel.listOfAllSamples.filter { $0.tags.contains(tagName) }
51 | case .untagged:
52 | return viewModel.listOfAllSamples.filter { $0.tags.isEmpty }
53 | }
54 | }
55 |
56 | var body: some View {
57 | Group {
58 | if viewModel.finishedProcessing {
59 | List(filteredSamples, id: \.id, selection: $viewModel.detailSelection) { sample in
60 | let itemModel = SampleListItemModel(file: sample)
61 | SampleIndividualListItem(viewModel: viewModel, sample: itemModel)
62 | .tag(sample.id)
63 | }
64 | } else {
65 | ProgressView("Loading recordings...")
66 | }
67 | }
68 | }
69 | }
70 |
71 | struct SampleIndividualListItem: View {
72 | @ObservedObject var viewModel: SampleLibraryViewModel
73 | @Environment(\.openWindow) var openWindow
74 | var sample: SampleListItemModel
75 |
76 | var body: some View {
77 | HStack {
78 | VStack(alignment: .leading, spacing: 4) {
79 | Text("\(sample.text)")
80 | .font(.title3)
81 | if let sampleObj = sample.file as? Sample{
82 | HStack(spacing: 8) {
83 | if (!sampleObj.tags.isEmpty) {
84 | ForEach(Array(sampleObj.tags), id: \.self) { tagName in
85 | TagComponent(string: tagName)
86 | }
87 | }
88 | }
89 | }
90 | }
91 |
92 | Spacer()
93 |
94 | StaticWaveformView(fileURL: sample.file.fileURL)
95 | .frame(maxWidth: 200, maxHeight: 20)
96 |
97 | Spacer()
98 | HStack {
99 | // show extension of the sample
100 | Text(sample.file.fileURL.pathExtension.uppercased())
101 | .font(.system(.caption, design: .monospaced))
102 | .fontWeight(.semibold)
103 | .foregroundColor(.secondary)
104 |
105 | Button {
106 | viewModel.detailSelection = sample.id
107 | viewModel.showInspector = true
108 | } label: {
109 | Image(systemName: "info.circle")
110 | }
111 | .buttonStyle(.borderless)
112 | }
113 | }
114 | .frame(minHeight: 40, maxHeight: 40)
115 | .draggable(sample) {
116 | // example view for now
117 | Label(sample.file.fileURL.lastPathComponent, systemImage: "waveform")
118 | .padding()
119 | .background(Color(NSColor.windowBackgroundColor))
120 | .cornerRadius(8)
121 | .shadow(radius: 2)
122 | }
123 | .contextMenu {
124 | Button("Open File") {
125 | NSWorkspace.shared.open(sample.file.fileURL)
126 | }
127 | }
128 | }
129 | }
130 |
131 | #Preview {
132 | SampleLibraryView()
133 | .environmentObject(SampleStorage.shared)
134 | }
135 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Library/InspectorView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct InspectorView: View {
4 | @ObservedObject var viewModel: SampleLibraryViewModel
5 |
6 | var body: some View {
7 | VStack(alignment: .leading, spacing: 0) {
8 | if let sample = viewModel.selectedSample {
9 | Form {
10 | Section(header: Text("Metadata")) {
11 | HStack {
12 | Text("Title")
13 | Spacer()
14 | Text(sample.title)
15 | .foregroundColor(.secondary)
16 | }
17 |
18 | HStack {
19 | Text("Tags")
20 | Spacer()
21 | ForEach(Array(sample.tags), id: \.self) { tagName in
22 | TagComponent(string: tagName)
23 | }
24 | }
25 |
26 | // if let desc = sample.description {
27 | // VStack(alignment: .leading) {
28 | // Text("Description")
29 | // Text(desc)
30 | // .foregroundColor(.secondary)
31 | // .font(.body)
32 | // .fixedSize(horizontal: false, vertical: true)
33 | // }
34 | // }
35 | }
36 | Section(header: Text("File Info")) {
37 | HStack {
38 | Text("Filename")
39 | Spacer()
40 | Text(sample.filename ?? "Unknown")
41 | .foregroundColor(.secondary)
42 | }
43 |
44 | HStack {
45 | Text("File path")
46 | Spacer()
47 | Text(sample.fileURL.path)
48 | .foregroundColor(.secondary)
49 | .truncationMode(.middle)
50 | }
51 | HStack {
52 | Text("File size")
53 | Spacer()
54 | let rawByteSize: Int64 = Int64(truncatingIfNeeded: sample.fileURL.fileSize ?? 0)
55 | let fileSizeWithUnit = ByteCountFormatter.string(fromByteCount: rawByteSize, countStyle: .file)
56 | Text(fileSizeWithUnit)
57 | .foregroundColor(.secondary)
58 | .truncationMode(.middle)
59 | }
60 | Button {
61 | NSWorkspace.shared.activateFileViewerSelecting([sample.fileURL])
62 | } label: {
63 | Image(nsImage: NSWorkspace.shared.icon(forFile: "/System/Library/CoreServices/Finder.app"))
64 | .resizable()
65 | .scaledToFit()
66 | .frame(width: 16, height: 16)
67 | Text("Reveal in Finder")
68 | }
69 |
70 | }
71 | }
72 | .formStyle(.grouped)
73 | } else {
74 | Text("No Sample selected")
75 | .padding()
76 | }
77 | }
78 | .padding(-10)
79 | }
80 | }
81 |
82 | #Preview {
83 | InspectorView(viewModel: SampleLibraryViewModel())
84 | }
85 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Library/SampleLibraryView.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import Combine
3 | import SwiftUI
4 | import OSLog
5 |
6 | extension ToolbarItemPlacement {
7 | static let favoritesBar = accessoryBar(id: "com.example.favorites")
8 | }
9 |
10 | struct SampleLibraryView: View {
11 | @StateObject private var viewModel: SampleLibraryViewModel
12 | @Environment(\.openURL) private var openURL
13 |
14 | @State private var currentSamplesInView: Int = 0
15 | @State private var selection = "Apple"
16 |
17 | init() {
18 | _viewModel = StateObject(wrappedValue: SampleLibraryViewModel())
19 | }
20 |
21 | var body: some View {
22 | NavigationSplitView {
23 | SidebarView(viewModel: viewModel)
24 | .toolbar(removing: .sidebarToggle)
25 | .navigationSplitViewColumnWidth(min: 200, ideal: 200, max: 300)
26 | } detail: {
27 | DetailView(viewModel: viewModel)
28 | .navigationSplitViewColumnWidth(min: 500, ideal: 500)
29 | }
30 | .toolbar(id: "rm2000.main-toolbar"){
31 |
32 | ToolbarItem(id: "rm2000.sidebar", placement: .navigation) {
33 | SidebarButton()
34 | }.customizationBehavior(.disabled)
35 | ToolbarItem(id: "rm2000.share.button", placement: .primaryAction) {
36 | ShareSampleButton()
37 | }
38 | ToolbarItem(id: "rm2000.import-sample-button", placement: .primaryAction) {
39 | ImportSampleButton()
40 | }
41 | ToolbarItem(id: "rm2000.open-in-finder-button", placement: .primaryAction) {
42 | OpenInFinderButton()
43 | }
44 |
45 | ToolbarItem(id: "rm2000.divider", placement: .primaryAction) {
46 | HStack {
47 | Divider()
48 | }
49 | }
50 | ToolbarItem(id: "rm2000.picker", placement: .primaryAction) {
51 | Picker("View", selection: $selection) {
52 | Label("Grid", systemImage: "square.grid.2x2")
53 | Label("List", systemImage: "list.bullet")
54 | }.pickerStyle(.menu)
55 | }
56 |
57 | }
58 | .toolbar(id: "rm2000.favorites-toolbar") {
59 | ToolbarItem(id: "rm2000.playpause", placement: .favoritesBar) {
60 | Button {
61 | viewModel.slAudioPlayer.playPause()
62 | } label: {
63 | Image(systemName: viewModel.slAudioPlayer.isPlaying ? "pause.fill" : "play.fill")
64 | }
65 | .disabled(viewModel.selectedSample == nil)
66 | }
67 |
68 | ToolbarItem(id: "rm2000.duration", placement: .favoritesBar) {
69 | if (viewModel.slAudioPlayer.isPlaying) {
70 | // https://stackoverflow.com/questions/33401388/get-minutes-and-seconds-from-double-in-swift
71 | let mins: Int = Int(viewModel.slAudioPlayer.currentTime) / 60
72 | let secs: Int = Int(viewModel.slAudioPlayer.currentTime - Double(mins * 60))
73 | Text(String(format: "%d:%02d", mins, secs))
74 | }
75 | else {
76 | Text("0:00")
77 | .disabled(viewModel.selectedSample == nil)
78 | }
79 | }
80 |
81 | ToolbarItem(id: "rm2000.slider", placement: .favoritesBar) {
82 | Slider(
83 | value: Binding(
84 | get: { viewModel.slAudioPlayer.currentTime },
85 | set: { viewModel.slAudioPlayer.seekTo(time: $0) }
86 | ),
87 | in: 0...viewModel.slAudioPlayer.duration
88 | )
89 | .disabled(viewModel.selectedSample == nil)
90 | }
91 |
92 | ToolbarItem(id: "rm2000.autoplay-toggle", placement: .favoritesBar) {
93 | Toggle( "Autoplay",
94 | isOn: $viewModel.slAudioPlayer.isAutoplay
95 | ).toggleStyle(.checkbox)
96 | }
97 |
98 | }
99 | .inspector(isPresented: $viewModel.showInspector) {
100 |
101 | InspectorView(viewModel: viewModel)
102 |
103 | .toolbar(id: "rm2000.inspector.toolbar") {
104 | ToolbarItem(id: "rm2000.spacer") {
105 | Spacer()
106 | }
107 | ToolbarItem(id: "rm2000.inspector.button") {
108 | Button {
109 | viewModel.showInspector.toggle()
110 | } label: {
111 | Label("Inspector", systemImage: "info.circle")
112 | .foregroundStyle(.cyan)
113 |
114 | }
115 | }
116 | }
117 | .inspectorColumnWidth(min: 300, ideal: 400, max: 500)
118 | }
119 | .toolbarRole(.editor)
120 | .navigationTitle("Sample Library")
121 | .navigationSubtitle("\(currentSamplesInView) Samples")
122 | .onAppear {
123 | // automatically set toolbar to "Icon and Label"
124 | setToolbarStyle()
125 | }
126 | .task {
127 | currentSamplesInView = viewModel.listOfAllSamples.count
128 | }
129 | .onReceive(NotificationCenter.default.publisher(for: NSWindow.willCloseNotification)) { newValue in
130 | // window is closed, stop audio playback
131 | if (viewModel.slAudioPlayer.isPlaying) {
132 | viewModel.slAudioPlayer.forcePause()
133 | }
134 | }
135 | .searchable(text: .constant(""), placement: .sidebar)
136 | }
137 | }
138 |
139 |
140 | #Preview {
141 | SampleLibraryView()
142 | .environmentObject(SampleStorage.shared)
143 | .frame(width: 900)
144 | }
145 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Library/SidebarView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | enum SidebarSelection: Hashable {
4 | case allRecordings
5 | case untaggedRecordings
6 | case tag(String)
7 | }
8 |
9 | struct SidebarView: View {
10 | @ObservedObject var viewModel: SampleLibraryViewModel
11 |
12 | var body: some View {
13 | List(selection: $viewModel.sidebarSelection) {
14 | Section(header: Text("Collections")) {
15 | NavigationLink(value: SidebarSelection.allRecordings) {
16 | Label("All Recordings", systemImage: "folder")
17 | }
18 | NavigationLink(value: SidebarSelection.untaggedRecordings) {
19 | HStack {
20 | Image("untagged")
21 | .symbolRenderingMode(.palette)
22 | .foregroundStyle(.red, Color.accentColor)
23 |
24 | Text("Untagged")
25 | }
26 | }
27 | }
28 | Section(header: Text("Available tags")) {
29 | ForEach(viewModel.indexedTags, id: \.self) { tagName in
30 | NavigationLink(value: SidebarSelection.tag(tagName)) {
31 | Label("\(tagName)", systemImage: "number")
32 | }
33 | }
34 | }
35 | }
36 | }
37 | }
38 |
39 | #Preview("Sidebar View") {
40 | let vm = SampleLibraryViewModel()
41 | return SidebarView(viewModel: vm)
42 | }
43 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Library/TagComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TagComponent.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 5/4/25.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct TagComponent: View {
11 | var string: String?
12 | var body: some View {
13 |
14 | if let tag = string {
15 | Text(tag)
16 | .font(.caption)
17 | .padding(2)
18 | .background(Color.gray.opacity(0.2))
19 | .cornerRadius(3)
20 | } else {
21 | Text("")
22 | .font(.caption)
23 | .padding(2)
24 | }
25 |
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Library/Toolbar Buttons/ToolbarButtons.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct SidebarButton: View {
4 | var body: some View {
5 | Button(action: toggleSidebar) {
6 | Label("Sidebar", systemImage: "sidebar.leading")
7 | .foregroundStyle(.teal)
8 | }
9 | }
10 | }
11 |
12 | struct OpenInFinderButton: View {
13 | var body: some View {
14 | Button(action: {
15 | NSWorkspace.shared.open(SampleStorage.shared.UserDirectory.directory)
16 | }) {
17 | Label {
18 | Text("Open in Finder")
19 | } icon: {
20 | Image(nsImage: NSWorkspace.shared.icon(forFile: "/System/Library/CoreServices/Finder.app"))
21 | .resizable()
22 | .scaledToFit()
23 | // .frame(width: 20, height: 20)
24 | }
25 | }
26 | .help("Open in Finder")
27 | }
28 | }
29 |
30 | struct ShareSampleButton: View {
31 | var body: some View {
32 | Button(action: {
33 | print("Shared button pressed")
34 | }) {
35 | Label("Share", systemImage: "square.and.arrow.up")
36 | // .fontWeight(.black)
37 | .foregroundStyle(.gray)
38 | }
39 | // .padding(.bottom, 3) // or else it looks weirdly positioned!
40 | }
41 | }
42 |
43 | struct ImportSampleButton: View {
44 | var body: some View {
45 | Button(action: {
46 | NSWorkspace.shared.open(SampleStorage.shared.UserDirectory.directory)
47 | }) {
48 | Label("Import", systemImage: "plus")
49 | // .fontWeight(.black)
50 | .foregroundStyle(.green)
51 | }
52 | .help("Import a Sample")
53 | }
54 | }
55 |
56 | func toggleSidebar() {
57 | #if os(macOS)
58 | NSApp.keyWindow?.firstResponder?.tryToPerform(#selector(NSSplitViewController.toggleSidebar(_:)), with: nil)
59 | #endif
60 | }
61 |
62 |
63 | func setToolbarStyle() {
64 | #if os(macOS)
65 | if let window = NSApp.windows.first(where: { $0.isKeyWindow }),
66 | let toolbar = window.toolbar {
67 | toolbar.displayMode = .iconAndLabel
68 | toolbar.allowsUserCustomization = true
69 | toolbar.autosavesConfiguration = true
70 | }
71 | #endif
72 | }
73 |
--------------------------------------------------------------------------------
/rm2000/Views/Sample Library/WaveformView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // WaveformView.swift
3 | // rm2000
4 | //
5 | // Created by Marcelo Mendez on 5/4/25.
6 | //
7 |
8 | import SwiftUI
9 | import DSWaveformImage
10 | import DSWaveformImageViews
11 |
12 | struct StaticWaveformView: View {
13 |
14 | var fileURL: URL
15 |
16 | var configuration: Waveform.Configuration = Waveform.Configuration(
17 | style: .striped(Waveform.Style.StripeConfig(color: .white.withAlphaComponent(0.6), width: 2, spacing: 1, lineCap: .butt)),
18 | verticalScalingFactor: 1.0
19 | )
20 |
21 | var body: some View {
22 | WaveformView(audioURL: fileURL, configuration: .init(style: .striped(.init(color: .gray, width: 2, spacing: 1, lineCap: .butt)), verticalScalingFactor: 1)) {
23 | ProgressView()
24 | .controlSize(.extraLarge)
25 | .progressViewStyle(.linear)
26 | }
27 |
28 |
29 | }
30 | }
31 |
32 | #Preview("Waveform") {
33 | let fileURL = URL(fileURLWithPath: "/Users/marceloexc/Music/MusicProd/rm_testing/jazz--ambient_sample.mp3")
34 | StaticWaveformView(fileURL: fileURL)
35 | }
36 |
37 | #Preview("Library") {
38 | SampleLibraryView()
39 | .environmentObject(SampleStorage.shared)
40 | .frame(width: 900)
41 | }
42 |
--------------------------------------------------------------------------------
/rm2000/Views/Settings/GeneralTabView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import OSLog
3 |
4 | struct GeneralTabView: View {
5 |
6 |
7 | var body: some View {
8 | Form {
9 | Section {
10 | // Toggle("Start at Login", isOn: $autostartAtLogin)
11 | // .onChange(of: autostartAtLogin) { newValue in
12 | // autoStartAtLogin()
13 | // }
14 | // Toggle("Minimize to Toolbar", isOn: $minimizeToToolbar)
15 | // .disabled(!autostartAtLogin)
16 | }
17 |
18 | Section {
19 | Toggle("Show File Extensions", isOn: .constant(true))
20 | Toggle("Keep unsaved samples", isOn: .constant(true))
21 | }
22 | }
23 | }
24 | }
25 |
26 | #Preview {
27 | SettingsView()
28 | .environmentObject(AppState.shared)
29 | }
30 |
--------------------------------------------------------------------------------
/rm2000/Views/Settings/RecordingTabView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import OSLog
3 |
4 | struct RecordingTabView: View {
5 | @State private var selectedFileType = AudioFormat.aac
6 | @State private var showFileChooser: Bool = false
7 | @Binding var workingDirectory: URL?
8 | @EnvironmentObject var appState: AppState
9 | @EnvironmentObject var recordingState: TapeRecorderState
10 |
11 | var body: some View {
12 | Form {
13 | HStack {
14 | GroupBox(
15 | label:
16 | Label("Saved Directory", systemImage: "books.vertical")
17 | ) {
18 | HStack {
19 | Text(
20 | "Currently set to \"\(workingDirectory?.lastPathComponent ?? "nil")\""
21 | )
22 | .font(.caption)
23 |
24 | Spacer()
25 |
26 | Button("Browse") {
27 | showFileChooser = true
28 | }
29 | .fileImporter(
30 | isPresented: $showFileChooser,
31 | allowedContentTypes: [.directory]
32 | ) { result in
33 | switch result {
34 | case .success(let directory):
35 |
36 | // get security scoped bookmark
37 | guard directory.startAccessingSecurityScopedResource() else {
38 | Logger.appState.error("Could not get security scoped to the directory \(directory)")
39 | return
40 | }
41 | appState.sampleDirectory = directory
42 | workingDirectory = directory
43 | Logger.appState.info(
44 | "Settings set new sample directory to \(directory)"
45 | )
46 | case .failure(let error):
47 | Logger.appState.error(
48 | "Could not set new sampleDirectory from settings view: \(error)"
49 | )
50 | }
51 | }
52 | }
53 | }
54 | }
55 |
56 |
57 | GroupBox(
58 | label:
59 | Label("Recording", systemImage: "recordingtape")
60 | ) {
61 | HStack {
62 | Text("Audio Format")
63 | .font(.caption)
64 |
65 | Spacer()
66 | Picker("Sample File Type", selection: $selectedFileType) {
67 | Text("AAC").tag(AudioFormat.aac)
68 | Text("MP3").tag(AudioFormat.mp3)
69 | Text("WAV").tag(AudioFormat.wav)
70 | Text("FLAC").tag(AudioFormat.flac)
71 |
72 | }
73 | .frame(width:200)
74 | .labelsHidden() //misbehaves otherwise
75 | .pickerStyle(.segmented)
76 | .clipped()
77 | .onChange(of: selectedFileType) { newValue in
78 | // Logger().debug("New audio format of \(newValue) selected")
79 | //
80 | // ^^^ uncomment that and you get this build error:
81 | // Failed to produce diagnostic for expression; please submit a bug report (https://swift.org/contributing/#reporting-bugs)
82 | // what the fuck?
83 | recordingState.sampleRecordAudioFormat = newValue
84 | }
85 | }
86 | }
87 |
88 | }.onAppear {
89 | selectedFileType = recordingState.sampleRecordAudioFormat
90 | }
91 | }
92 | }
93 |
94 | #Preview {
95 | RecordingTabView(workingDirectory: .constant(URL(string: "file:///Users/user/Documents")!))
96 | .environmentObject(AppState.shared)
97 | .environmentObject(TapeRecorderState())
98 | .padding()
99 | .frame(width: 350)
100 | }
101 |
102 |
--------------------------------------------------------------------------------
/rm2000/Views/Settings/SettingsView.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import OSLog
3 | import SwiftUI
4 |
5 | struct SettingsView: View {
6 |
7 | @EnvironmentObject private var appState: AppState
8 | @State private var workingDirectory: URL? = nil
9 | @State private var autostartAtLogin = false
10 | @State private var minimizeToToolbar = false
11 | @State private var selectedTab = "General"
12 |
13 | var body: some View {
14 | TabView(selection: $selectedTab) {
15 | GeneralTabView()
16 | .tabItem {
17 | Label("General", systemImage: "gear")
18 | }
19 | .padding()
20 | .frame(width: 450)
21 | .tag("General")
22 |
23 | RecordingTabView(workingDirectory: $workingDirectory)
24 | .tabItem {
25 | Label {
26 | Text("Recording")
27 | } icon: {
28 | Image(systemName: "recordingtape")
29 | .rotationEffect(.degrees(180))
30 | .fontWeight(.black)
31 | }
32 | }
33 | .padding()
34 | .frame(width: 450)
35 | .tag("Recording")
36 | }
37 | .onAppear {
38 | workingDirectory = appState.sampleDirectory
39 | }
40 | }
41 |
42 | private func autoStartAtLogin() {
43 | Logger.viewModels.warning("Not implemented yet")
44 | }
45 | }
46 |
47 | #Preview {
48 | SettingsView()
49 | .environmentObject(AppState.shared)
50 | }
51 |
--------------------------------------------------------------------------------
/rm2000/Views/UserNotifications.swift:
--------------------------------------------------------------------------------
1 | import UserNotifications
2 |
3 | func displayTestingGlobalNotication() async {
4 | let center = UNUserNotificationCenter.current()
5 |
6 | do {
7 | try await center.requestAuthorization(options: [.alert, .criticalAlert, .provisional])
8 | } catch {
9 | print("User Notifications not enabled.")
10 | }
11 |
12 | let content = UNMutableNotificationContent()
13 |
14 | content.title = "RM2000 Tape Recorder"
15 | content.body = "Sample Recorded!"
16 | let uuid = UUID().uuidString
17 |
18 | let trigger = UNTimeIntervalNotificationTrigger(timeInterval: 0.1, repeats: false)
19 | print(trigger)
20 | let request = UNNotificationRequest(identifier: uuid, content: content, trigger: trigger)
21 |
22 | do {
23 | try await center.add(request)
24 | } catch {
25 | print("not enabled")
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/rm2000/rm2000.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-write
8 |
9 | com.apple.security.files.bookmarks.app-scope
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/rm2000Tests/rm2000Tests.swift:
--------------------------------------------------------------------------------
1 | //import XCTest
2 | //@testable import rm2000
3 | //
4 | //final class rm2000Tests: XCTestCase {
5 | //
6 | // override func setUpWithError() throws {
7 | // // Put setup code here. This method is called before the invocation of each test method in the class.
8 | // }
9 | //
10 | // override func tearDownWithError() throws {
11 | // // Put teardown code here. This method is called after the invocation of each test method in the class.
12 | // }
13 | //
14 | // func testExample() throws {
15 | // // This is an example of a functional test case.
16 | // // Use XCTAssert and related functions to verify your tests produce the correct results.
17 | // // Any test you write for XCTest can be annotated as throws and async.
18 | // // Mark your test throws to produce an unexpected failure when your test encounters an uncaught error.
19 | // // Mark your test async to allow awaiting for asynchronous code to complete. Check the results with assertions afterwards.
20 | // }
21 | //
22 | // func testPerformanceExample() throws {
23 | // // This is an example of a performance test case.
24 | // self.measure {
25 | // // Put the code you want to measure the time of here.
26 | // }
27 | // }
28 | //
29 | //}
30 |
--------------------------------------------------------------------------------
/rm2000UITests/rm2000UITests.swift:
--------------------------------------------------------------------------------
1 | //import XCTest
2 | //
3 | //final class rm2000UITests: XCTestCase {
4 | //
5 | // override func setUpWithError() throws {
6 | // // Put setup code here. This method is called before the invocation of each test method in the class.
7 | //
8 | // // In UI tests it is usually best to stop immediately when a failure occurs.
9 | // continueAfterFailure = false
10 | //
11 | // // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
12 | // }
13 | //
14 | // override func tearDownWithError() throws {
15 | // // Put teardown code here. This method is called after the invocation of each test method in the class.
16 | // }
17 | //
18 | // func testExample() throws {
19 | // // UI tests must launch the application that they test.
20 | // let app = XCUIApplication()
21 | // app.launch()
22 | //
23 | // // Use XCTAssert and related functions to verify your tests produce the correct results.
24 | // }
25 | //
26 | // func testLaunchPerformance() throws {
27 | // if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) {
28 | // // This measures how long it takes to launch your application.
29 | // measure(metrics: [XCTApplicationLaunchMetric()]) {
30 | // XCUIApplication().launch()
31 | // }
32 | // }
33 | // }
34 | //}
35 |
--------------------------------------------------------------------------------
/rm2000UITests/rm2000UITestsLaunchTests.swift:
--------------------------------------------------------------------------------
1 | //import XCTest
2 | //
3 | //final class rm2000UITestsLaunchTests: XCTestCase {
4 | //
5 | // override class var runsForEachTargetApplicationUIConfiguration: Bool {
6 | // true
7 | // }
8 | //
9 | // override func setUpWithError() throws {
10 | // continueAfterFailure = false
11 | // }
12 | //
13 | // func testLaunch() throws {
14 | // let app = XCUIApplication()
15 | // app.launch()
16 | //
17 | // // Insert steps here to perform after app launch but before taking a screenshot,
18 | // // such as logging into a test account or navigating somewhere in the app
19 | //
20 | // let attachment = XCTAttachment(screenshot: app.screenshot())
21 | // attachment.name = "Launch Screen"
22 | // attachment.lifetime = .keepAlways
23 | // add(attachment)
24 | // }
25 | //}
26 |
--------------------------------------------------------------------------------