├── CaptureSample
├── Assets.xcassets
│ ├── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Audio
│ └── Synth.aif
├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
├── CaptureSample.entitlements
├── CaptureSampleApp.swift
├── Views
│ ├── MaterialView.swift
│ ├── AudioLevelsView.swift
│ ├── CapturePreview.swift
│ ├── PickerSettingsView.swift
│ └── ConfigurationView.swift
├── AudioPlayer.swift
├── ContentView.swift
├── PowerMeter.swift
├── CaptureEngine.swift
└── ScreenRecorder.swift
├── CaptureSample.xcodeproj
├── .xcodesamplecode.plist
├── project.xcworkspace
│ └── xcshareddata
│ │ └── WorkspaceSettings.xcsettings
├── xcshareddata
│ └── xcschemes
│ │ └── CaptureSample.xcscheme
└── project.pbxproj
├── Configuration
└── SampleCode.xcconfig
├── .gitignore
├── LICENSE
└── LICENSE.txt
└── README.md
/CaptureSample/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/CaptureSample/Audio/Synth.aif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Fidetro/CapturingScreenContentInMacOS/HEAD/CaptureSample/Audio/Synth.aif
--------------------------------------------------------------------------------
/CaptureSample/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/CaptureSample/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/CaptureSample.xcodeproj/.xcodesamplecode.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/CaptureSample.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | BuildSystemType
6 | Latest
7 |
8 |
9 |
--------------------------------------------------------------------------------
/CaptureSample/CaptureSample.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-only
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/CaptureSample/CaptureSampleApp.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | The entry point into this app.
6 | */
7 | import SwiftUI
8 |
9 | @main
10 | struct CaptureSampleApp: App {
11 | var body: some Scene {
12 | WindowGroup {
13 | ContentView()
14 | .frame(minWidth: 960, minHeight: 724)
15 | .background(.black)
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/CaptureSample/Views/MaterialView.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | A wrapper view around NSVisualEffectView.
6 | */
7 |
8 | import SwiftUI
9 |
10 | struct MaterialView: NSViewRepresentable {
11 |
12 | func makeNSView(context: Context) -> NSVisualEffectView {
13 | let view = NSVisualEffectView()
14 | view.blendingMode = .behindWindow
15 | return view
16 | }
17 |
18 | func updateNSView(_ nsView: NSVisualEffectView, context: Context) {}
19 | }
20 |
--------------------------------------------------------------------------------
/Configuration/SampleCode.xcconfig:
--------------------------------------------------------------------------------
1 | //
2 | // See the LICENSE.txt file for this sample’s licensing information.
3 | //
4 | // SampleCode.xcconfig
5 | //
6 |
7 | // The `SAMPLE_CODE_DISAMBIGUATOR` configuration is to make it easier to build
8 | // and run a sample code project. Once you set your project's development team,
9 | // you'll have a unique bundle identifier. This is because the bundle identifier
10 | // is derived based on the 'SAMPLE_CODE_DISAMBIGUATOR' value. Do not use this
11 | // approach in your own projects—it's only useful for sample code projects because
12 | // they are frequently downloaded and don't have a development team set.
13 | SAMPLE_CODE_DISAMBIGUATOR=${DEVELOPMENT_TEAM}
14 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See LICENSE folder for this sample’s licensing information.
2 | #
3 | # Apple sample code gitignore configuration.
4 |
5 | # Finder
6 | .DS_Store
7 |
8 | # Xcode - User files
9 | xcuserdata/
10 |
11 | **/*.xcodeproj/project.xcworkspace/*
12 | !**/*.xcodeproj/project.xcworkspace/xcshareddata
13 |
14 | **/*.xcodeproj/project.xcworkspace/xcshareddata/*
15 | !**/*.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
16 |
17 | **/*.playground/playground.xcworkspace/*
18 | !**/*.playground/playground.xcworkspace/xcshareddata
19 |
20 | **/*.playground/playground.xcworkspace/xcshareddata/*
21 | !**/*.playground/playground.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
22 |
--------------------------------------------------------------------------------
/CaptureSample/AudioPlayer.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | An object that holds an AVAudioPlayer that plays an AIFF file.
6 | */
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | class AudioPlayer: ObservableObject {
12 |
13 | let audioPlayer: AVAudioPlayer
14 |
15 | @Published var isPlaying = false
16 |
17 | init() {
18 | guard let url = Bundle.main.url(forResource: "Synth", withExtension: "aif") else {
19 | fatalError("Couldn't find Synth.aif in the app bundle.")
20 | }
21 | audioPlayer = try! AVAudioPlayer(contentsOf: url, fileTypeHint: AVFileType.aiff.rawValue)
22 | audioPlayer.numberOfLoops = -1 // Loop indefinitely.
23 | audioPlayer.prepareToPlay()
24 | }
25 |
26 | func play() {
27 | audioPlayer.play()
28 | isPlaying = true
29 | }
30 |
31 | func stop() {
32 | audioPlayer.stop()
33 | isPlaying = false
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/CaptureSample/Views/AudioLevelsView.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | A view that renders an audio level meter.
6 | */
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | struct AudioLevelsView: NSViewRepresentable {
12 |
13 | @StateObject var audioLevelsProvider: AudioLevelsProvider
14 |
15 | func makeNSView(context: Context) -> NSLevelIndicator {
16 | let levelIndicator = NSLevelIndicator(frame: .zero)
17 | levelIndicator.minValue = 0
18 | levelIndicator.maxValue = 10
19 | levelIndicator.warningValue = 6
20 | levelIndicator.criticalValue = 8
21 | levelIndicator.levelIndicatorStyle = .continuousCapacity
22 | levelIndicator.heightAnchor.constraint(equalToConstant: 5).isActive = true
23 | return levelIndicator
24 | }
25 |
26 | func updateNSView(_ levelMeter: NSLevelIndicator, context: Context) {
27 | levelMeter.floatValue = audioLevelsProvider.audioLevels.level * 10
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/LICENSE/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright © 2023 Apple Inc.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4 |
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6 |
7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
8 |
9 |
--------------------------------------------------------------------------------
/CaptureSample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "mac",
5 | "scale" : "1x",
6 | "size" : "16x16"
7 | },
8 | {
9 | "idiom" : "mac",
10 | "scale" : "2x",
11 | "size" : "16x16"
12 | },
13 | {
14 | "idiom" : "mac",
15 | "scale" : "1x",
16 | "size" : "32x32"
17 | },
18 | {
19 | "idiom" : "mac",
20 | "scale" : "2x",
21 | "size" : "32x32"
22 | },
23 | {
24 | "idiom" : "mac",
25 | "scale" : "1x",
26 | "size" : "128x128"
27 | },
28 | {
29 | "idiom" : "mac",
30 | "scale" : "2x",
31 | "size" : "128x128"
32 | },
33 | {
34 | "idiom" : "mac",
35 | "scale" : "1x",
36 | "size" : "256x256"
37 | },
38 | {
39 | "idiom" : "mac",
40 | "scale" : "2x",
41 | "size" : "256x256"
42 | },
43 | {
44 | "idiom" : "mac",
45 | "scale" : "1x",
46 | "size" : "512x512"
47 | },
48 | {
49 | "idiom" : "mac",
50 | "scale" : "2x",
51 | "size" : "512x512"
52 | }
53 | ],
54 | "info" : {
55 | "author" : "xcode",
56 | "version" : 1
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/CaptureSample/Views/CapturePreview.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | A view that renders a video frame.
6 | */
7 |
8 | import SwiftUI
9 |
10 | struct CapturePreview: NSViewRepresentable {
11 |
12 | // A layer that renders the video contents.
13 | private let contentLayer = CALayer()
14 |
15 | init() {
16 | contentLayer.contentsGravity = .resizeAspect
17 | }
18 |
19 | func makeNSView(context: Context) -> CaptureVideoPreview {
20 | CaptureVideoPreview(layer: contentLayer)
21 | }
22 |
23 | // Called by ScreenRecorder as it receives new video frames.
24 | func updateFrame(_ frame: CapturedFrame) {
25 | contentLayer.contents = frame.surface
26 | }
27 |
28 | // The view isn't updatable. Updates to the layer's content are done in outputFrame(frame:).
29 | func updateNSView(_ nsView: CaptureVideoPreview, context: Context) {}
30 |
31 | class CaptureVideoPreview: NSView {
32 | // Create the preview with the video layer as the backing layer.
33 | init(layer: CALayer) {
34 | super.init(frame: .zero)
35 | // Make this a layer-hosting view. First set the layer, then set wantsLayer to true.
36 | self.layer = layer
37 | wantsLayer = true
38 | }
39 |
40 | required init?(coder: NSCoder) {
41 | fatalError("init(coder:) has not been implemented")
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/CaptureSample/ContentView.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | The app's main view.
6 | */
7 |
8 | import SwiftUI
9 | import ScreenCaptureKit
10 | import OSLog
11 | import Combine
12 |
13 | struct ContentView: View {
14 |
15 | @State var userStopped = false
16 | @State var disableInput = false
17 | @State var isUnauthorized = false
18 |
19 | @StateObject var screenRecorder = ScreenRecorder()
20 |
21 | var body: some View {
22 | HSplitView {
23 | ConfigurationView(screenRecorder: screenRecorder, userStopped: $userStopped)
24 | .frame(minWidth: 280, maxWidth: 280)
25 | .disabled(disableInput)
26 | screenRecorder.capturePreview
27 | .frame(maxWidth: .infinity, maxHeight: .infinity)
28 | .aspectRatio(screenRecorder.contentSize, contentMode: .fit)
29 | .padding(8)
30 | .overlay {
31 | if userStopped {
32 | Image(systemName: "nosign")
33 | .font(.system(size: 250, weight: .bold))
34 | .foregroundColor(Color(white: 0.3, opacity: 1.0))
35 | .frame(maxWidth: .infinity, maxHeight: .infinity)
36 | .background(Color(white: 0.0, opacity: 0.5))
37 | }
38 | }
39 | }
40 | .overlay {
41 | if isUnauthorized {
42 | VStack() {
43 | Spacer()
44 | VStack {
45 | Text("No screen recording permission.")
46 | .font(.largeTitle)
47 | .padding(.top)
48 | Text("Open System Settings and go to Privacy & Security > Screen Recording to grant permission.")
49 | .font(.title2)
50 | .padding(.bottom)
51 | }
52 | .frame(maxWidth: .infinity)
53 | .background(.red)
54 |
55 | }
56 | }
57 | }
58 | .navigationTitle("Screen Capture Sample")
59 | .onAppear {
60 | Task {
61 | if await screenRecorder.canRecord {
62 | await screenRecorder.start()
63 | } else {
64 | isUnauthorized = true
65 | disableInput = true
66 | }
67 | }
68 | }
69 | }
70 | }
71 |
72 | struct ContentView_Previews: PreviewProvider {
73 | static var previews: some View {
74 | ContentView()
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/CaptureSample.xcodeproj/xcshareddata/xcschemes/CaptureSample.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
43 |
45 |
51 |
52 |
53 |
54 |
60 |
62 |
68 |
69 |
70 |
71 |
73 |
74 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/CaptureSample/PowerMeter.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | An object that calculates the average and peak power levels for the captured audio samples.
6 | */
7 |
8 | import Foundation
9 | import AVFoundation
10 | import Accelerate
11 |
12 | struct AudioLevels {
13 | static let zero = AudioLevels(level: 0, peakLevel: 0)
14 | let level: Float
15 | let peakLevel: Float
16 | }
17 |
18 | // The protocol for the object that provides peak and average power levels to adopt.
19 | protocol AudioLevelProvider {
20 | var levels: AudioLevels { get }
21 | }
22 |
23 | class PowerMeter: AudioLevelProvider {
24 | private let kMinLevel: Float = 0.000_000_01 // -160 dB
25 |
26 | private struct PowerLevels {
27 | let average: Float
28 | let peak: Float
29 | }
30 |
31 | private var values = [PowerLevels]()
32 |
33 | private var meterTableAverage = MeterTable()
34 | private var meterTablePeak = MeterTable()
35 |
36 | var levels: AudioLevels {
37 | if values.isEmpty { return AudioLevels(level: 0.0, peakLevel: 0.0) }
38 | return AudioLevels(level: meterTableAverage.valueForPower(values[0].average),
39 | peakLevel: meterTablePeak.valueForPower(values[0].peak))
40 | }
41 |
42 | func processSilence() {
43 | if values.isEmpty { return }
44 | values = []
45 | }
46 |
47 | // Calculates the average (rms) and peak level of each channel in the PCM buffer and caches data.
48 | func process(buffer: AVAudioPCMBuffer) {
49 | var powerLevels = [PowerLevels]()
50 | let channelCount = Int(buffer.format.channelCount)
51 | let length = vDSP_Length(buffer.frameLength)
52 |
53 | if let floatData = buffer.floatChannelData {
54 | for channel in 0.., strideFrames: Int, length: vDSP_Length) -> PowerLevels {
82 | var max: Float = 0.0
83 | vDSP_maxv(data, strideFrames, &max, length)
84 | if max < kMinLevel {
85 | max = kMinLevel
86 | }
87 |
88 | var rms: Float = 0.0
89 | vDSP_rmsqv(data, strideFrames, &rms, length)
90 | if rms < kMinLevel {
91 | rms = kMinLevel
92 | }
93 |
94 | return PowerLevels(average: 20.0 * log10(rms), peak: 20.0 * log10(max))
95 | }
96 | }
97 |
98 | private struct MeterTable {
99 |
100 | // The decibel value of the minimum displayed amplitude.
101 | private let kMinDB: Float = -60.0
102 |
103 | // The table needs to be large enough so that there are no large gaps in the response.
104 | private let tableSize = 300
105 |
106 | private let scaleFactor: Float
107 | private var meterTable = [Float]()
108 |
109 | init() {
110 | let dbResolution = kMinDB / Float(tableSize - 1)
111 | scaleFactor = 1.0 / dbResolution
112 |
113 | // This controls the curvature of the response.
114 | // 2.0 is the square root, 3.0 is the cube root.
115 | let root: Float = 2.0
116 |
117 | let rroot = 1.0 / root
118 | let minAmp = dbToAmp(dBValue: kMinDB)
119 | let ampRange = 1.0 - minAmp
120 | let invAmpRange = 1.0 / ampRange
121 |
122 | for index in 0.. Float {
131 | return powf(10.0, 0.05 * dBValue)
132 | }
133 |
134 | func valueForPower(_ power: Float) -> Float {
135 | if power < kMinDB {
136 | return 0.0
137 | } else if power >= 0.0 {
138 | return 1.0
139 | } else {
140 | let index = Int(power) * Int(scaleFactor)
141 | return meterTable[index]
142 | }
143 | }
144 | }
145 |
--------------------------------------------------------------------------------
/CaptureSample/Views/PickerSettingsView.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | A view for content picker configuration.
6 | */
7 |
8 | import SwiftUI
9 | import ScreenCaptureKit
10 |
11 | struct PickerSettingsView: View {
12 |
13 | private let verticalLabelSpacing: CGFloat = 8
14 |
15 | @Environment(\.presentationMode) var presentation
16 | @ObservedObject var screenRecorder: ScreenRecorder
17 | @State private var bundleIDToExclude = ""
18 | @State private var maxStreamCount = 3
19 |
20 | func addBundleID() {
21 | guard !bundleIDToExclude.isEmpty else { return }
22 | screenRecorder.excludedBundleIDsList.insert(bundleIDToExclude, at: 0)
23 | bundleIDToExclude = ""
24 | }
25 |
26 | func clearBundleIDs() {
27 | screenRecorder.excludedBundleIDsList = []
28 | }
29 |
30 | private func bindingForPickingModes(_ mode: SCContentSharingPickerMode) -> Binding {
31 | Binding {
32 | screenRecorder.allowedPickingModes.contains(mode)
33 | } set: { isOn in
34 | if isOn {
35 | screenRecorder.allowedPickingModes.insert(mode)
36 | } else {
37 | screenRecorder.allowedPickingModes.remove(mode)
38 | }
39 | }
40 | }
41 |
42 | var body: some View {
43 | Group {
44 | VStack(alignment: .leading, spacing: verticalLabelSpacing) {
45 |
46 | // Picker property: Maximum stream count.
47 | HeaderView("Maximum Stream Count")
48 | TextField("Maximum Stream Count", value: $maxStreamCount, format: .number)
49 | .frame(maxWidth: 150)
50 | .onSubmit {
51 | screenRecorder.maximumStreamCount = maxStreamCount
52 | }
53 |
54 | // Picker configuration: Allowed picking modes.
55 | HeaderView("Allowed Picking Modes")
56 | Toggle("Single Window", isOn: bindingForPickingModes(.singleWindow))
57 | Toggle("Multiple Windows", isOn: bindingForPickingModes(.multipleWindows))
58 | Toggle("Single Application", isOn: bindingForPickingModes(.singleApplication))
59 | Toggle("Multiple Applications", isOn: bindingForPickingModes(.multipleApplications))
60 | Toggle("Single Display", isOn: bindingForPickingModes(.singleDisplay))
61 |
62 | // Picker configuration: Excluded Window IDs.
63 | HeaderView("Excluded Window IDs")
64 | Text("Select window below to exclude it:")
65 | .font(.subheadline)
66 | .foregroundStyle(.primary)
67 | List(screenRecorder.availableWindows, id: \.self, selection: $screenRecorder.excludedWindowIDsSelection) { window in
68 | let windowID = Int(window.windowID)
69 | var windowIsExcluded = screenRecorder.excludedWindowIDsSelection.contains(windowID)
70 | Button {
71 | if !windowIsExcluded {
72 | screenRecorder.excludedWindowIDsSelection.insert(windowID)
73 | } else {
74 | screenRecorder.excludedWindowIDsSelection.remove(windowID)
75 | }
76 | windowIsExcluded.toggle()
77 | } label: {
78 | Image(systemName: windowIsExcluded ? "x.circle.fill" : "checkmark.circle.fill")
79 | .foregroundStyle(.white, windowIsExcluded ? .red : .green)
80 | Text(window.displayName)
81 | }
82 | .cornerRadius(5)
83 | }
84 | .onAppear {
85 | Task {
86 | await screenRecorder.monitorAvailableContent()
87 | }
88 | }
89 |
90 | // Picker configuration: Excluded Bundle IDs.
91 | HeaderView("Excluded Bundle IDs")
92 | HStack {
93 | TextField("\(Bundle.main.bundleIdentifier!)", text: $bundleIDToExclude)
94 | .frame(maxWidth: 300)
95 | .onSubmit {
96 | addBundleID()
97 | }
98 | }
99 | if !screenRecorder.excludedBundleIDsList.isEmpty {
100 | ScrollView {
101 | BundleIDsListView(screenRecorder: screenRecorder)
102 | }
103 | .frame(maxWidth: 300, maxHeight: 50)
104 | .background(MaterialView())
105 | .clipShape(.rect(cornerSize: CGSize(width: 1, height: 1)))
106 | Button("Clear All Bundle IDs") {
107 | clearBundleIDs()
108 | }
109 | }
110 |
111 | // Picker configuration: Allows Repicking.
112 | Toggle("Allows Repicking", isOn: $screenRecorder.allowsRepicking)
113 | .toggleStyle(.switch)
114 | }
115 | // Dismiss the PickerSettingsView.
116 | HStack {
117 | Button {
118 | presentation.wrappedValue.dismiss()
119 | } label: {
120 | Text("Dismiss")
121 | }
122 | }
123 | }
124 | .padding()
125 | }
126 | }
127 |
128 | struct BundleIDsListView: View {
129 | @ObservedObject var screenRecorder: ScreenRecorder
130 |
131 | var body: some View {
132 | Section {
133 | ForEach(Array(screenRecorder.excludedBundleIDsList.enumerated()), id: \.element) { index, element in
134 | HStack {
135 | Text("\(element)")
136 | .padding(.leading, 5)
137 | .foregroundColor(.gray)
138 | Spacer()
139 | Button {
140 | screenRecorder.excludedBundleIDsList.remove(at: index)
141 | } label: {
142 | Image(systemName: "xmark.circle")
143 | }
144 | .padding(.trailing, 10)
145 | .foregroundStyle(.secondary)
146 | .buttonStyle(.plain)
147 | }
148 | }
149 | }
150 | }
151 | }
152 |
--------------------------------------------------------------------------------
/CaptureSample/Views/ConfigurationView.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | A view that provides the UI to configure screen capture.
6 | */
7 |
8 | import SwiftUI
9 | import ScreenCaptureKit
10 |
11 | /// The app's configuration user interface.
12 | struct ConfigurationView: View {
13 |
14 | private let sectionSpacing: CGFloat = 20
15 | private let verticalLabelSpacing: CGFloat = 8
16 |
17 | private let alignmentOffset: CGFloat = 10
18 |
19 | @StateObject private var audioPlayer = AudioPlayer()
20 | @ObservedObject var screenRecorder: ScreenRecorder
21 | @Binding var userStopped: Bool
22 | @State var showPickerSettingsView = false
23 |
24 | var body: some View {
25 | VStack {
26 | Form {
27 | HeaderView("Video")
28 | .padding(EdgeInsets(top: 0, leading: 0, bottom: 1, trailing: 0))
29 |
30 | // A group that hides view labels.
31 | Group {
32 | VStack(alignment: .leading, spacing: verticalLabelSpacing) {
33 | Text("Capture Type")
34 | Picker("Capture", selection: $screenRecorder.captureType) {
35 | Text("Display")
36 | .tag(ScreenRecorder.CaptureType.display)
37 | Text("Window")
38 | .tag(ScreenRecorder.CaptureType.window)
39 | }
40 | }
41 |
42 | VStack(alignment: .leading, spacing: verticalLabelSpacing) {
43 | Text("Screen Content")
44 | switch screenRecorder.captureType {
45 | case .display:
46 | Picker("Display", selection: $screenRecorder.selectedDisplay) {
47 | ForEach(screenRecorder.availableDisplays, id: \.self) { display in
48 | Text(display.displayName)
49 | .tag(SCDisplay?.some(display))
50 | }
51 | }
52 |
53 | case .window:
54 | Picker("Window", selection: $screenRecorder.selectedWindow) {
55 | ForEach(screenRecorder.availableWindows, id: \.self) { window in
56 | Text(window.displayName)
57 | .tag(SCWindow?.some(window))
58 | }
59 | }
60 | }
61 | }
62 | }
63 | .labelsHidden()
64 |
65 | Toggle("Exclude sample app from stream", isOn: $screenRecorder.isAppExcluded)
66 | .disabled(screenRecorder.captureType == .window)
67 | .onChange(of: screenRecorder.isAppExcluded) {
68 | // Capturing app audio is only possible when the sample is included in the stream.
69 | // Ensure the audio stops playing if the user enables the "Exclude app from stream" checkbox.
70 | if screenRecorder.isAppExcluded {
71 | audioPlayer.stop()
72 | }
73 | }
74 |
75 | // Add some space between the Video and Audio sections.
76 | Spacer()
77 | .frame(height: 20)
78 |
79 | HeaderView("Audio")
80 |
81 | Toggle("Capture audio", isOn: $screenRecorder.isAudioCaptureEnabled)
82 | Toggle("Exclude app audio", isOn: $screenRecorder.isAppAudioExcluded)
83 | .disabled(screenRecorder.isAppExcluded)
84 | AudioLevelsView(audioLevelsProvider: screenRecorder.audioLevelsProvider)
85 | Button {
86 | if !audioPlayer.isPlaying {
87 | audioPlayer.play()
88 | } else {
89 | audioPlayer.stop()
90 | }
91 | } label: {
92 | Text("\(!audioPlayer.isPlaying ? "Play" : "Stop") App Audio")
93 | }
94 | .disabled(screenRecorder.isAppExcluded)
95 |
96 | // Picker section.
97 | Spacer()
98 | .frame(height: 20)
99 |
100 | HeaderView("Content Picker")
101 | Toggle("Activate Picker", isOn: $screenRecorder.isPickerActive)
102 | Group {
103 | Button {
104 | showPickerSettingsView = true
105 | } label: {
106 | Image(systemName: "text.badge.plus")
107 | Text("Picker Configuration")
108 | }
109 | Button {
110 | screenRecorder.presentPicker()
111 | } label: {
112 | Image(systemName: "sparkles.tv")
113 | Text("Present Picker")
114 | }
115 | }
116 | .disabled(!screenRecorder.isPickerActive)
117 | }
118 | .padding()
119 |
120 | Spacer()
121 | HStack {
122 | Button {
123 | Task { await screenRecorder.start() }
124 | // Fades the paused screen out.
125 | withAnimation(Animation.easeOut(duration: 0.25)) {
126 | userStopped = false
127 | }
128 | } label: {
129 | Text("Start Capture")
130 | }
131 | .disabled(screenRecorder.isRunning)
132 | Button {
133 | Task { await screenRecorder.stop() }
134 | // Fades the paused screen in.
135 | withAnimation(Animation.easeOut(duration: 0.25)) {
136 | userStopped = true
137 | }
138 |
139 | } label: {
140 | Text("Stop Capture")
141 | }
142 | .disabled(!screenRecorder.isRunning)
143 | }
144 | .frame(maxWidth: .infinity, minHeight: 60)
145 | .onChange(of: screenRecorder.pickerUpdate) {
146 | if !screenRecorder.isRunning {
147 | // start
148 | Task { await screenRecorder.start() }
149 | // Fades the paused screen out.
150 | withAnimation(Animation.easeOut(duration: 0.25)) {
151 | userStopped = false
152 | }
153 | } else {
154 |
155 | }
156 | }
157 | }
158 | .background(MaterialView())
159 | .sheet(isPresented: $showPickerSettingsView) {
160 | PickerSettingsView(screenRecorder: screenRecorder)
161 | .frame(minWidth: 500.0, maxWidth: .infinity, minHeight: 600.0, maxHeight: .infinity)
162 | .padding(.top, 7)
163 | .padding(.leading, 25)
164 | }
165 | }
166 | }
167 |
168 | /// A view that displays a styled header for the Video and Audio sections.
169 | struct HeaderView: View {
170 |
171 | private let title: String
172 | private let alignmentOffset: CGFloat = 10.0
173 |
174 | init(_ title: String) {
175 | self.title = title
176 | }
177 |
178 | var body: some View {
179 | Text(title)
180 | .font(.headline)
181 | .foregroundColor(.secondary)
182 | .alignmentGuide(.leading) { _ in alignmentOffset }
183 | }
184 | }
185 |
--------------------------------------------------------------------------------
/CaptureSample/CaptureEngine.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | An object that captures a stream of captured sample buffers containing screen and audio content.
6 | */
7 | import Foundation
8 | import AVFAudio
9 | import ScreenCaptureKit
10 | import OSLog
11 | import Combine
12 |
13 | /// A structure that contains the video data to render.
14 | struct CapturedFrame {
15 | static let invalid = CapturedFrame(surface: nil, contentRect: .zero, contentScale: 0, scaleFactor: 0)
16 |
17 | let surface: IOSurface?
18 | let contentRect: CGRect
19 | let contentScale: CGFloat
20 | let scaleFactor: CGFloat
21 | var size: CGSize { contentRect.size }
22 | }
23 |
24 | /// An object that wraps an instance of `SCStream`, and returns its results as an `AsyncThrowingStream`.
25 | class CaptureEngine: NSObject, @unchecked Sendable {
26 |
27 | private let logger = Logger()
28 |
29 | private(set) var stream: SCStream?
30 | private var streamOutput: CaptureEngineStreamOutput?
31 | private let videoSampleBufferQueue = DispatchQueue(label: "com.example.apple-samplecode.VideoSampleBufferQueue")
32 | private let audioSampleBufferQueue = DispatchQueue(label: "com.example.apple-samplecode.AudioSampleBufferQueue")
33 |
34 | // Performs average and peak power calculations on the audio samples.
35 | private let powerMeter = PowerMeter()
36 | var audioLevels: AudioLevels { powerMeter.levels }
37 |
38 | // Store the the startCapture continuation, so that you can cancel it when you call stopCapture().
39 | private var continuation: AsyncThrowingStream.Continuation?
40 |
41 | /// - Tag: StartCapture
42 | func startCapture(configuration: SCStreamConfiguration, filter: SCContentFilter) -> AsyncThrowingStream {
43 | AsyncThrowingStream { continuation in
44 | // The stream output object. Avoid reassigning it to a new object every time startCapture is called.
45 | let streamOutput = CaptureEngineStreamOutput(continuation: continuation)
46 | self.streamOutput = streamOutput
47 | streamOutput.capturedFrameHandler = { continuation.yield($0) }
48 | streamOutput.pcmBufferHandler = { self.powerMeter.process(buffer: $0) }
49 |
50 | do {
51 | stream = SCStream(filter: filter, configuration: configuration, delegate: streamOutput)
52 |
53 | // Add a stream output to capture screen content.
54 | try stream?.addStreamOutput(streamOutput, type: .screen, sampleHandlerQueue: videoSampleBufferQueue)
55 | try stream?.addStreamOutput(streamOutput, type: .audio, sampleHandlerQueue: audioSampleBufferQueue)
56 | stream?.startCapture()
57 | } catch {
58 | continuation.finish(throwing: error)
59 | }
60 | }
61 | }
62 |
63 | func stopCapture() async {
64 | do {
65 | try await stream?.stopCapture()
66 | continuation?.finish()
67 | } catch {
68 | continuation?.finish(throwing: error)
69 | }
70 | powerMeter.processSilence()
71 | }
72 |
73 | /// - Tag: UpdateStreamConfiguration
74 | func update(configuration: SCStreamConfiguration, filter: SCContentFilter) async {
75 | do {
76 | try await stream?.updateConfiguration(configuration)
77 | try await stream?.updateContentFilter(filter)
78 | } catch {
79 | logger.error("Failed to update the stream session: \(String(describing: error))")
80 | }
81 | }
82 | }
83 |
84 | /// A class that handles output from an SCStream, and handles stream errors.
85 | private class CaptureEngineStreamOutput: NSObject, SCStreamOutput, SCStreamDelegate {
86 |
87 | var pcmBufferHandler: ((AVAudioPCMBuffer) -> Void)?
88 | var capturedFrameHandler: ((CapturedFrame) -> Void)?
89 |
90 | // Store the startCapture continuation, so you can cancel it if an error occurs.
91 | private var continuation: AsyncThrowingStream.Continuation?
92 |
93 | init(continuation: AsyncThrowingStream.Continuation?) {
94 | self.continuation = continuation
95 | }
96 |
97 | /// - Tag: DidOutputSampleBuffer
98 | func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of outputType: SCStreamOutputType) {
99 |
100 | // Return early if the sample buffer is invalid.
101 | guard sampleBuffer.isValid else { return }
102 |
103 | // Determine which type of data the sample buffer contains.
104 | switch outputType {
105 | case .screen:
106 | // Create a CapturedFrame structure for a video sample buffer.
107 | guard let frame = createFrame(for: sampleBuffer) else { return }
108 | capturedFrameHandler?(frame)
109 | case .audio:
110 | // Process audio as an AVAudioPCMBuffer for level calculation.
111 | handleAudio(for: sampleBuffer)
112 | @unknown default:
113 | fatalError("Encountered unknown stream output type: \(outputType)")
114 | }
115 | }
116 |
117 | /// Create a `CapturedFrame` for the video sample buffer.
118 | private func createFrame(for sampleBuffer: CMSampleBuffer) -> CapturedFrame? {
119 |
120 | // Retrieve the array of metadata attachments from the sample buffer.
121 | guard let attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer,
122 | createIfNecessary: false) as? [[SCStreamFrameInfo: Any]],
123 | let attachments = attachmentsArray.first else { return nil }
124 |
125 | // Validate the status of the frame. If it isn't `.complete`, return nil.
126 | guard let statusRawValue = attachments[SCStreamFrameInfo.status] as? Int,
127 | let status = SCFrameStatus(rawValue: statusRawValue),
128 | status == .complete else { return nil }
129 |
130 | // Get the pixel buffer that contains the image data.
131 | guard let pixelBuffer = sampleBuffer.imageBuffer else { return nil }
132 |
133 | // Get the backing IOSurface.
134 | guard let surfaceRef = CVPixelBufferGetIOSurface(pixelBuffer)?.takeUnretainedValue() else { return nil }
135 | let surface = unsafeBitCast(surfaceRef, to: IOSurface.self)
136 |
137 | // Retrieve the content rectangle, scale, and scale factor.
138 | guard let contentRectDict = attachments[.contentRect],
139 | let contentRect = CGRect(dictionaryRepresentation: contentRectDict as! CFDictionary),
140 | let contentScale = attachments[.contentScale] as? CGFloat,
141 | let scaleFactor = attachments[.scaleFactor] as? CGFloat else { return nil }
142 |
143 | // Create a new frame with the relevant data.
144 | let frame = CapturedFrame(surface: surface,
145 | contentRect: contentRect,
146 | contentScale: contentScale,
147 | scaleFactor: scaleFactor)
148 | return frame
149 | }
150 |
151 | private func handleAudio(for buffer: CMSampleBuffer) -> Void? {
152 | // Create an AVAudioPCMBuffer from an audio sample buffer.
153 | try? buffer.withAudioBufferList { audioBufferList, blockBuffer in
154 | guard let description = buffer.formatDescription?.audioStreamBasicDescription,
155 | let format = AVAudioFormat(standardFormatWithSampleRate: description.mSampleRate, channels: description.mChannelsPerFrame),
156 | let samples = AVAudioPCMBuffer(pcmFormat: format, bufferListNoCopy: audioBufferList.unsafePointer)
157 | else { return }
158 | pcmBufferHandler?(samples)
159 | }
160 | }
161 |
162 | func stream(_ stream: SCStream, didStopWithError error: Error) {
163 | continuation?.finish(throwing: error)
164 | }
165 | }
166 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Capturing screen content in macOS
2 | Stream desktop content like displays, apps, and windows by adopting screen capture in your app.
3 |
4 | ## Overview
5 | This sample shows how to add high-performance screen capture to your Mac app by using [`ScreenCaptureKit`][1]. The sample explores how to create content filters to capture the displays, apps, and windows you choose. It then shows how to configure your stream output, retrieve video frames and audio samples, and update a running stream.
6 |
7 | - Note: This sample code project is associated with WWDC22 [Session 10156: Meet ScreenCaptureKit](https://developer.apple.com/wwdc22/10156) and [Session 10155: Take ScreenCaptureKit to the next level](https://developer.apple.com/wwdc22/10155)
8 |
9 | ## Configure the sample code project
10 | To run this sample app, you need the following:
11 |
12 | - A Mac with macOS 13 beta or later
13 | - Xcode 14 beta or later
14 |
15 | The first time you run this sample, the system prompts you to grant the app screen recording permission. After you grant permission, restart the app to enable capture.
16 |
17 | ## Create a content filter
18 | Displays, running apps, and windows are the shareable content on a device. The sample uses the [`SCShareableContent`][2] class to retrieve these items in the form of [`SCDisplay`][3], [`SCRunningApplication`][4], and [`SCWindow`][5] instances, respectively.
19 |
20 | ``` swift
21 | // Retrieve the available screen content to capture.
22 | let availableContent = try await SCShareableContent.excludingDesktopWindows(false,
23 | onScreenWindowsOnly: true)
24 | ```
25 | [View in Source][6]
26 |
27 | Before the sample begins capture, it creates an [`SCContentFilter`][7] object to specify the content to capture. The sample provides two options that allow for capturing either a single window or an entire display. When the capture type is set to capture a window, the app creates a content filter that only includes that window.
28 |
29 | ``` swift
30 | // Create a content filter that includes a single window.
31 | filter = SCContentFilter(desktopIndependentWindow: window)
32 | ```
33 | [View in Source][8]
34 |
35 | When a user specifies to capture the entire display, the sample creates a filter to capture only content from the main display. To illustrate filtering a running app, the sample contains a toggle to specify whether to exclude the sample app from the stream.
36 |
37 | ``` swift
38 | var excludedApps = [SCRunningApplication]()
39 | // If a user chooses to exclude the app from the stream,
40 | // exclude it by matching its bundle identifier.
41 | if isAppExcluded {
42 | excludedApps = availableApps.filter { app in
43 | Bundle.main.bundleIdentifier == app.bundleIdentifier
44 | }
45 | }
46 | // Create a content filter with excluded apps.
47 | filter = SCContentFilter(display: display,
48 | excludingApplications: excludedApps,
49 | exceptingWindows: [])
50 | ```
51 | [View in Source][9]
52 |
53 | ## Create a stream configuration
54 | An [`SCStreamConfiguration`][10] object provides properties to configure the stream’s output size, pixel format, audio capture settings, and more. The app’s configuration throttles frame updates to 60 fps and queues five frames. Specifying more frames uses more memory, but may allow for processing frame data without stalling the display stream. The default value is three frames and shouldn't exceed eight.
55 |
56 | ``` swift
57 | let streamConfig = SCStreamConfiguration()
58 |
59 | // Configure audio capture.
60 | streamConfig.capturesAudio = isAudioCaptureEnabled
61 | streamConfig.excludesCurrentProcessAudio = isAppAudioExcluded
62 |
63 | // Configure the display content width and height.
64 | if captureType == .display, let display = selectedDisplay {
65 | streamConfig.width = display.width * scaleFactor
66 | streamConfig.height = display.height * scaleFactor
67 | }
68 |
69 | // Configure the window content width and height.
70 | if captureType == .window, let window = selectedWindow {
71 | streamConfig.width = Int(window.frame.width) * 2
72 | streamConfig.height = Int(window.frame.height) * 2
73 | }
74 |
75 | // Set the capture interval at 60 fps.
76 | streamConfig.minimumFrameInterval = CMTime(value: 1, timescale: 60)
77 |
78 | // Increase the depth of the frame queue to ensure high fps at the expense of increasing
79 | // the memory footprint of WindowServer.
80 | streamConfig.queueDepth = 5
81 | ```
82 | [View in Source][11]
83 |
84 | ## Start the capture session
85 | The sample uses the content filter and stream configuration to initialize a new instance of `SCStream`. To retrieve audio and video sample data, the app adds stream outputs that capture media of the specified type. When the stream captures new sample buffers, it delivers them to its stream output object on the indicated dispatch queues.
86 |
87 | ``` swift
88 | stream = SCStream(filter: filter, configuration: configuration, delegate: streamOutput)
89 |
90 | // Add a stream output to capture screen content.
91 | try stream?.addStreamOutput(streamOutput, type: .screen, sampleHandlerQueue: videoSampleBufferQueue)
92 | try stream?.addStreamOutput(streamOutput, type: .audio, sampleHandlerQueue: audioSampleBufferQueue)
93 | stream?.startCapture()
94 | ```
95 | [View in Source][12]
96 |
97 | After the stream starts, further changes to its configuration and content filter don’t require restarting it. Instead, after you update the capture configuration in the user interface, the sample creates new stream configuration and content filter objects and applies them to the running stream to update its state.
98 | ``` swift
99 | try await stream?.updateConfiguration(configuration)
100 | try await stream?.updateContentFilter(filter)
101 | ```
102 | [View in Source][13]
103 |
104 | ## Process the output
105 | When a stream captures a new audio or video sample buffer, it calls the stream output’s [stream(\_:didOutputSampleBuffer:of:)][14] method, passing it the captured data and an indicator of its type. The stream output evaluates and processes the sample buffer as shown below.
106 | ``` swift
107 | func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of outputType: SCStreamOutputType) {
108 |
109 | // Return early if the sample buffer is invalid.
110 | guard sampleBuffer.isValid else { return }
111 |
112 | // Determine which type of data the sample buffer contains.
113 | switch outputType {
114 | case .screen:
115 | // Create a CapturedFrame structure for a video sample buffer.
116 | guard let frame = createFrame(for: sampleBuffer) else { return }
117 | capturedFrameHandler?(frame)
118 | case .audio:
119 | // Process audio as an AVAudioPCMBuffer for level calculation.
120 | handleAudio(for: sampleBuffer)
121 | @unknown default:
122 | fatalError("Encountered unknown stream output type: \(outputType)")
123 | }
124 | }
125 | ```
126 | [View in Source][27]
127 |
128 | ## Process a video sample buffer
129 | If the sample buffer contains video data, it retrieves the sample buffer attachments that describe the output video frame.
130 |
131 | ``` swift
132 | // Retrieve the array of metadata attachments from the sample buffer.
133 | guard let attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer,
134 | createIfNecessary: false) as? [[SCStreamFrameInfo: Any]],
135 | let attachments = attachmentsArray.first else { return nil }
136 | ```
137 | [View in Source][15]
138 |
139 | An [`SCStreamFrameInfo`][16] structure defines dictionary keys that the sample uses to retrieve metadata attached to a sample buffer. Metadata includes information about the frame's display time, scale factor, status, and more. To determine whether a frame is available for processing, the sample inspects the status for [`SCFrameStatus.complete`][17].
140 |
141 | ``` swift
142 | // Validate the status of the frame. If it isn't `.complete`, return nil.
143 | guard let statusRawValue = attachments[SCStreamFrameInfo.status] as? Int,
144 | let status = SCFrameStatus(rawValue: statusRawValue),
145 | status == .complete else { return nil }
146 | ```
147 | [View in Source][18]
148 |
149 | The sample buffer wraps a [`CVPixelBuffer`][19] that’s backed by an [`IOSurface`][20]. The sample casts the surface reference to an `IOSurface` that it later sets as the layer content of an [`NSView`][21].
150 |
151 | ``` swift
152 | // Get the pixel buffer that contains the image data.
153 | guard let pixelBuffer = sampleBuffer.imageBuffer else { return nil }
154 |
155 | // Get the backing IOSurface.
156 | guard let surfaceRef = CVPixelBufferGetIOSurface(pixelBuffer)?.takeUnretainedValue() else { return nil }
157 | let surface = unsafeBitCast(surfaceRef, to: IOSurface.self)
158 |
159 | // Retrieve the content rectangle, scale, and scale factor.
160 | guard let contentRectDict = attachments[.contentRect],
161 | let contentRect = CGRect(dictionaryRepresentation: contentRectDict as! CFDictionary),
162 | let contentScale = attachments[.contentScale] as? CGFloat,
163 | let scaleFactor = attachments[.scaleFactor] as? CGFloat else { return nil }
164 |
165 | // Create a new frame with the relevant data.
166 | let frame = CapturedFrame(surface: surface,
167 | contentRect: contentRect,
168 | contentScale: contentScale,
169 | scaleFactor: scaleFactor)
170 | ```
171 | [View in Source][22]
172 |
173 | ## Process an audio sample buffer
174 |
175 | If the sample buffer contains audio, it processes the data as an [AudioBufferList][23] as shown below.
176 |
177 | ``` swift
178 | private func handleAudio(for buffer: CMSampleBuffer) -> Void? {
179 | // Create an AVAudioPCMBuffer from an audio sample buffer.
180 | try? buffer.withAudioBufferList { audioBufferList, blockBuffer in
181 | guard let description = buffer.formatDescription?.audioStreamBasicDescription,
182 | let format = AVAudioFormat(standardFormatWithSampleRate: description.mSampleRate, channels: description.mChannelsPerFrame),
183 | let samples = AVAudioPCMBuffer(pcmFormat: format, bufferListNoCopy: audioBufferList.unsafePointer)
184 | else { return }
185 | pcmBufferHandler?(samples)
186 | }
187 | }
188 | ```
189 | [View in Source][24]
190 |
191 | The app retrieves the audio stream basic description that it uses to create an [AVAudioFormat][25]. It then uses the format and the audio buffer list to create a new instance of [AVAudioPCMBuffer][26]. If you enable audio capture in the user interface, the sample uses the buffer to calculate average levels for the captured audio to display in a simple level meter.
192 |
193 | - Important: When calling methods like [`withAudioBufferList(blockBufferMemoryAllocator:flags:body:)`][28], letting an instance of an `Unsafe` type or the pointer it refers to escape the closure can cause undefined behavior. For more unformation on working with unsafe instances in Swift, see [UnsafePointer][29] and [WWDC20 - 10648: Unsafe Swift][30].
194 |
195 | ## Manage capture with the screen capture picker
196 |
197 | MacOS can manage a capture filter directly through [`SCContentSharingPicker.shared`][31]. Selecting the Activate Picker toggle in the app sets the `ScreenRecorder.isPickerActive` property.
198 |
199 | ``` swift
200 | @Published var isPickerActive = false {
201 | didSet {
202 | if isPickerActive {
203 | logger.info("Picker is active")
204 | self.initializePickerConfiguration()
205 | self.screenRecorderPicker.isActive = true
206 | self.screenRecorderPicker.add(self)
207 | } else {
208 | logger.info("Picker is inactive")
209 | self.screenRecorderPicker.isActive = false
210 | self.screenRecorderPicker.remove(self)
211 | }
212 | }
213 | }
214 | ```
215 | [View in Source][32]
216 |
217 | In order to get messages from the system picker, the `ScreenRecorder` class conforms to [`SCContentSharingPickerObserver`][33] and is added as an observer for the shared content picker. When the app user changes their streaming source through the picker, or ends streaming, the app handles it in the following code.
218 |
219 | ``` swift
220 | nonisolated func contentSharingPicker(_ picker: SCContentSharingPicker, didCancelFor stream: SCStream?) {
221 | logger.info("Picker canceled for stream \(stream)")
222 | }
223 |
224 | nonisolated func contentSharingPicker(_ picker: SCContentSharingPicker, didUpdateWith filter: SCContentFilter, for stream: SCStream?) {
225 | Task { @MainActor in
226 | logger.info("Picker updated with filter=\(filter) for stream=\(stream)")
227 | pickerContentFilter = filter
228 | shouldUsePickerFilter = true
229 | setPickerUpdate(true)
230 | updateEngine()
231 | }
232 | }
233 | ```
234 | [View in Source][34]
235 |
236 | [1]: https://developer.apple.com/documentation/screencapturekit
237 | [2]: https://developer.apple.com/documentation/screencapturekit/scshareablecontent
238 | [3]: https://developer.apple.com/documentation/screencapturekit/scdisplay
239 | [4]: https://developer.apple.com/documentation/screencapturekit/scrunningapplication
240 | [5]: https://developer.apple.com/documentation/screencapturekit/scwindow
241 | [6]: x-source-tag://GetAvailableContent
242 | [7]: https://developer.apple.com/documentation/screencapturekit/sccontentfilter
243 | [8]: x-source-tag://UpdateFilter
244 | [9]: x-source-tag://UpdateFilter
245 | [10]: https://developer.apple.com/documentation/screencapturekit/scstreamconfiguration
246 | [11]: x-source-tag://CreateStreamConfiguration
247 | [12]: x-source-tag://StartCapture
248 | [13]: x-source-tag://UpdateStreamConfiguration
249 | [14]: https://developer.apple.com/documentation/screencapturekit/scstreamoutput/3928182-stream
250 | [15]: x-source-tag://DidOutputSampleBuffer
251 | [16]: https://developer.apple.com/documentation/screencapturekit/scstreamframeinfo
252 | [17]: https://developer.apple.com/documentation/screencapturekit/scframestatus/complete
253 | [18]: x-source-tag://DidOutputSampleBuffer
254 | [19]: https://developer.apple.com/documentation/corevideo/cvpixelbuffer-q2e
255 | [20]: https://developer.apple.com/documentation/iosurface
256 | [21]: https://developer.apple.com/documentation/appkit/nsview
257 | [22]: x-source-tag://DidOutputSampleBuffer
258 | [23]: https://developer.apple.com/documentation/coreaudiotypes/audiobufferlist
259 | [24]: x-source-tag://ProcessAudioSampleBuffer
260 | [25]: https://developer.apple.com/documentation/avfaudio/avaudioformat
261 | [26]: https://developer.apple.com/documentation/avfaudio/avaudiopcmbuffer
262 | [27]: x-source-tag://DidOutputSampleBuffer
263 | [28]: https://developer.apple.com/documentation/coremedia/cmsamplebuffer/3242577-withaudiobufferlist
264 | [29]: https://developer.apple.com/documentation/swift/unsafepointer
265 | [30]: https://developer.apple.com/videos/play/wwdc2020/10648
266 | [31]: https://developer.apple.com/documentation/screencapturekit/sccontentsharingpicker/4161033-shared
267 | [32]: x-source-tag://TogglePicker
268 | [33]: https://developer.apple.com/documentation/screencapturekit/sccontentsharingpickerobserver
269 |
270 | [34]: x-source-tag://HandlePicker
271 |
--------------------------------------------------------------------------------
/CaptureSample/ScreenRecorder.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See the LICENSE.txt file for this sample’s licensing information.
3 |
4 | Abstract:
5 | A model object that provides the interface to capture screen content and system audio.
6 | */
7 | import Foundation
8 | import ScreenCaptureKit
9 | import Combine
10 | import OSLog
11 | import SwiftUI
12 |
13 | /// A provider of audio levels from the captured samples.
14 | class AudioLevelsProvider: ObservableObject {
15 | @Published var audioLevels = AudioLevels.zero
16 | }
17 |
18 | @MainActor
19 | class ScreenRecorder: NSObject,
20 | ObservableObject,
21 | SCContentSharingPickerObserver {
22 | /// The supported capture types.
23 | enum CaptureType {
24 | case display
25 | case window
26 | }
27 |
28 | private let logger = Logger()
29 |
30 | @Published var isRunning = false
31 |
32 | // MARK: - Video Properties
33 | @Published var captureType: CaptureType = .display {
34 | didSet { updateEngine() }
35 | }
36 |
37 | @Published var selectedDisplay: SCDisplay? {
38 | didSet { updateEngine() }
39 | }
40 |
41 | @Published var selectedWindow: SCWindow? {
42 | didSet { updateEngine() }
43 | }
44 |
45 | @Published var isAppExcluded = true {
46 | didSet { updateEngine() }
47 | }
48 |
49 | // MARK: - SCContentSharingPicker Properties
50 | @Published var maximumStreamCount = Int() {
51 | didSet { updatePickerConfiguration() }
52 | }
53 | @Published var excludedWindowIDsSelection = Set() {
54 | didSet { updatePickerConfiguration() }
55 | }
56 |
57 | @Published var excludedBundleIDsList = [String]() {
58 | didSet { updatePickerConfiguration() }
59 | }
60 |
61 | @Published var allowsRepicking = true {
62 | didSet { updatePickerConfiguration() }
63 | }
64 |
65 | @Published var allowedPickingModes = SCContentSharingPickerMode() {
66 | didSet { updatePickerConfiguration() }
67 | }
68 | @Published var contentSize = CGSize(width: 1, height: 1)
69 | private var scaleFactor: Int { Int(NSScreen.main?.backingScaleFactor ?? 2) }
70 |
71 | /// A view that renders the screen content.
72 | lazy var capturePreview: CapturePreview = {
73 | CapturePreview()
74 | }()
75 | private let screenRecorderPicker = SCContentSharingPicker.shared
76 | private var availableApps = [SCRunningApplication]()
77 | @Published private(set) var availableDisplays = [SCDisplay]()
78 | @Published private(set) var availableWindows = [SCWindow]()
79 | @Published private(set) var pickerUpdate: Bool = false // Update the running stream immediately with picker selection
80 | private var pickerContentFilter: SCContentFilter?
81 | private var shouldUsePickerFilter = false
82 | /// - Tag: TogglePicker
83 | @Published var isPickerActive = false {
84 | didSet {
85 | if isPickerActive {
86 | logger.info("Picker is active")
87 | self.initializePickerConfiguration()
88 | self.screenRecorderPicker.isActive = true
89 | self.screenRecorderPicker.add(self)
90 | } else {
91 | logger.info("Picker is inactive")
92 | self.screenRecorderPicker.isActive = false
93 | self.screenRecorderPicker.remove(self)
94 | }
95 | }
96 | }
97 |
98 | // MARK: - Audio Properties
99 | @Published var isAudioCaptureEnabled = true {
100 | didSet {
101 | updateEngine()
102 | if isAudioCaptureEnabled {
103 | startAudioMetering()
104 | } else {
105 | stopAudioMetering()
106 | }
107 | }
108 | }
109 | @Published var isAppAudioExcluded = false { didSet { updateEngine() } }
110 | @Published private(set) var audioLevelsProvider = AudioLevelsProvider()
111 | // A value that specifies how often to retrieve calculated audio levels.
112 | private let audioLevelRefreshRate: TimeInterval = 0.1
113 | private var audioMeterCancellable: AnyCancellable?
114 |
115 | // The object that manages the SCStream.
116 | private let captureEngine = CaptureEngine()
117 |
118 | private var isSetup = false
119 |
120 | // Combine subscribers.
121 | private var subscriptions = Set()
122 |
123 | var canRecord: Bool {
124 | get async {
125 | do {
126 | // If the app doesn't have screen recording permission, this call generates an exception.
127 | try await SCShareableContent.excludingDesktopWindows(false, onScreenWindowsOnly: true)
128 | return true
129 | } catch {
130 | return false
131 | }
132 | }
133 | }
134 |
135 | func monitorAvailableContent() async {
136 | guard !isSetup || !isPickerActive else { return }
137 | // Refresh the lists of capturable content.
138 | await self.refreshAvailableContent()
139 | Timer.publish(every: 3, on: .main, in: .common).autoconnect().sink { [weak self] _ in
140 | guard let self = self else { return }
141 | Task {
142 | await self.refreshAvailableContent()
143 | }
144 | }
145 | .store(in: &subscriptions)
146 | }
147 |
148 | /// Starts capturing screen content.
149 | func start() async {
150 | // Exit early if already running.
151 | guard !isRunning else { return }
152 |
153 | if !isSetup {
154 | // Starting polling for available screen content.
155 | await monitorAvailableContent()
156 | isSetup = true
157 | }
158 |
159 | // If the user enables audio capture, start monitoring the audio stream.
160 | if isAudioCaptureEnabled {
161 | startAudioMetering()
162 | }
163 |
164 | do {
165 | let config = streamConfiguration
166 | let filter = contentFilter
167 | // Update the running state.
168 | isRunning = true
169 | setPickerUpdate(false)
170 | // Start the stream and await new video frames.
171 | for try await frame in captureEngine.startCapture(configuration: config, filter: filter) {
172 | capturePreview.updateFrame(frame)
173 | if contentSize != frame.size {
174 | // Update the content size if it changed.
175 | contentSize = frame.size
176 | }
177 | }
178 | } catch {
179 | logger.error("\(error.localizedDescription)")
180 | // Unable to start the stream. Set the running state to false.
181 | isRunning = false
182 | }
183 | }
184 |
185 | /// Stops capturing screen content.
186 | func stop() async {
187 | guard isRunning else { return }
188 | await captureEngine.stopCapture()
189 | stopAudioMetering()
190 | isRunning = false
191 | }
192 |
193 | private func startAudioMetering() {
194 | audioMeterCancellable = Timer.publish(every: 0.1, on: .main, in: .common).autoconnect().sink { [weak self] _ in
195 | guard let self = self else { return }
196 | self.audioLevelsProvider.audioLevels = self.captureEngine.audioLevels
197 | }
198 | }
199 |
200 | private func stopAudioMetering() {
201 | audioMeterCancellable?.cancel()
202 | audioLevelsProvider.audioLevels = AudioLevels.zero
203 | }
204 |
205 | /// - Tag: UpdateCaptureConfig
206 | private func updateEngine() {
207 | guard isRunning else { return }
208 | Task {
209 | let filter = contentFilter
210 | await captureEngine.update(configuration: streamConfiguration, filter: filter)
211 | setPickerUpdate(false)
212 | }
213 | }
214 |
215 | // MARK: - Content-sharing Picker
216 | private func initializePickerConfiguration() {
217 | var initialConfiguration = SCContentSharingPickerConfiguration()
218 | // Set the allowedPickerModes from the app.
219 | initialConfiguration.allowedPickerModes = [
220 | .singleWindow,
221 | .multipleWindows,
222 | .singleApplication,
223 | .multipleApplications,
224 | .singleDisplay
225 | ]
226 | self.allowedPickingModes = initialConfiguration.allowedPickerModes
227 | }
228 |
229 | private func updatePickerConfiguration() {
230 | self.screenRecorderPicker.maximumStreamCount = maximumStreamCount
231 | // Update the default picker configuration to pass to Control Center.
232 | self.screenRecorderPicker.defaultConfiguration = pickerConfiguration
233 | }
234 |
235 | /// - Tag: HandlePicker
236 | nonisolated func contentSharingPicker(_ picker: SCContentSharingPicker, didCancelFor stream: SCStream?) {
237 | logger.info("Picker canceled for stream \(stream)")
238 | }
239 |
240 | nonisolated func contentSharingPicker(_ picker: SCContentSharingPicker, didUpdateWith filter: SCContentFilter, for stream: SCStream?) {
241 | Task { @MainActor in
242 | logger.info("Picker updated with filter=\(filter) for stream=\(stream)")
243 | pickerContentFilter = filter
244 | shouldUsePickerFilter = true
245 | setPickerUpdate(true)
246 | updateEngine()
247 | }
248 | }
249 |
250 | nonisolated func contentSharingPickerStartDidFailWithError(_ error: Error) {
251 | logger.error("Error starting picker! \(error)")
252 | }
253 |
254 | func setPickerUpdate(_ update: Bool) {
255 | Task { @MainActor in
256 | self.pickerUpdate = update
257 | }
258 | }
259 |
260 | func presentPicker() {
261 | if let stream = captureEngine.stream {
262 | SCContentSharingPicker.shared.present(for: stream)
263 | } else {
264 | SCContentSharingPicker.shared.present()
265 | }
266 | }
267 |
268 | private var pickerConfiguration: SCContentSharingPickerConfiguration {
269 | var config = SCContentSharingPickerConfiguration()
270 | config.allowedPickerModes = allowedPickingModes
271 | config.excludedWindowIDs = Array(excludedWindowIDsSelection)
272 | config.excludedBundleIDs = excludedBundleIDsList
273 | config.allowsChangingSelectedContent = allowsRepicking
274 | return config
275 | }
276 |
277 | /// - Tag: UpdateFilter
278 | private var contentFilter: SCContentFilter {
279 | var filter: SCContentFilter
280 | switch captureType {
281 | case .display:
282 | guard let display = selectedDisplay else { fatalError("No display selected.") }
283 | var excludedApps = [SCRunningApplication]()
284 | // If a user chooses to exclude the app from the stream,
285 | // exclude it by matching its bundle identifier.
286 | if isAppExcluded {
287 | excludedApps = availableApps.filter { app in
288 | Bundle.main.bundleIdentifier == app.bundleIdentifier
289 | }
290 | }
291 | // Create a content filter with excluded apps.
292 | filter = SCContentFilter(display: display,
293 | excludingApplications: excludedApps,
294 | exceptingWindows: [])
295 | case .window:
296 | guard let window = selectedWindow else { fatalError("No window selected.") }
297 |
298 | // Create a content filter that includes a single window.
299 | filter = SCContentFilter(desktopIndependentWindow: window)
300 | }
301 | // Use filter from content picker, if active.
302 | if shouldUsePickerFilter {
303 | guard let pickerFilter = pickerContentFilter else { return filter }
304 | filter = pickerFilter
305 | shouldUsePickerFilter = false
306 | }
307 | return filter
308 | }
309 |
310 | private var streamConfiguration: SCStreamConfiguration {
311 |
312 | let streamConfig = SCStreamConfiguration()
313 |
314 | // Configure audio capture.
315 | streamConfig.capturesAudio = isAudioCaptureEnabled
316 | streamConfig.excludesCurrentProcessAudio = isAppAudioExcluded
317 |
318 | // Configure the display content width and height.
319 | if captureType == .display, let display = selectedDisplay {
320 | streamConfig.width = display.width * scaleFactor
321 | streamConfig.height = display.height * scaleFactor
322 | }
323 |
324 | // Configure the window content width and height.
325 | if captureType == .window, let window = selectedWindow {
326 | streamConfig.width = Int(window.frame.width) * 2
327 | streamConfig.height = Int(window.frame.height) * 2
328 | }
329 |
330 | // Set the capture interval at 60 fps.
331 | streamConfig.minimumFrameInterval = CMTime(value: 1, timescale: 60)
332 |
333 | // Increase the depth of the frame queue to ensure high fps at the expense of increasing
334 | // the memory footprint of WindowServer.
335 | streamConfig.queueDepth = 5
336 |
337 | return streamConfig
338 | }
339 |
340 | /// - Tag: GetAvailableContent
341 | private func refreshAvailableContent() async {
342 | do {
343 | // Retrieve the available screen content to capture.
344 | let availableContent = try await SCShareableContent.excludingDesktopWindows(false,
345 | onScreenWindowsOnly: true)
346 | availableDisplays = availableContent.displays
347 |
348 | let windows = filterWindows(availableContent.windows)
349 | if windows != availableWindows {
350 | availableWindows = windows
351 | }
352 | availableApps = availableContent.applications
353 |
354 | if selectedDisplay == nil {
355 | selectedDisplay = availableDisplays.first
356 | }
357 | if selectedWindow == nil {
358 | selectedWindow = availableWindows.first
359 | }
360 | } catch {
361 | logger.error("Failed to get the shareable content: \(error.localizedDescription)")
362 | }
363 | }
364 |
365 | private func filterWindows(_ windows: [SCWindow]) -> [SCWindow] {
366 | windows
367 | // Sort the windows by app name.
368 | .sorted { $0.owningApplication?.applicationName ?? "" < $1.owningApplication?.applicationName ?? "" }
369 | // Remove windows that don't have an associated .app bundle.
370 | .filter { $0.owningApplication != nil && $0.owningApplication?.applicationName != "" }
371 | // Remove this app's window from the list.
372 | .filter { $0.owningApplication?.bundleIdentifier != Bundle.main.bundleIdentifier }
373 | }
374 | }
375 |
376 | extension SCWindow {
377 | var displayName: String {
378 | switch (owningApplication, title) {
379 | case (.some(let application), .some(let title)):
380 | return "\(application.applicationName): \(title)"
381 | case (.none, .some(let title)):
382 | return title
383 | case (.some(let application), .none):
384 | return "\(application.applicationName): \(windowID)"
385 | default:
386 | return ""
387 | }
388 | }
389 | }
390 |
391 | extension SCDisplay {
392 | var displayName: String {
393 | "Display: \(width) x \(height)"
394 | }
395 | }
396 |
--------------------------------------------------------------------------------
/CaptureSample.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 56;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 122776ED2A82BC6F0083737C /* PickerSettingsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 122776EC2A82BC6F0083737C /* PickerSettingsView.swift */; };
11 | C44093732816EC8800FB8386 /* Synth.aif in Resources */ = {isa = PBXBuildFile; fileRef = C44093722816EC8800FB8386 /* Synth.aif */; };
12 | C44093752816EC9C00FB8386 /* AudioPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = C44093742816EC9C00FB8386 /* AudioPlayer.swift */; };
13 | C470F0812811C5CB00D29309 /* ScreenRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = C470F0802811C5CB00D29309 /* ScreenRecorder.swift */; };
14 | C471DFFB2809F440001D24C9 /* PowerMeter.swift in Sources */ = {isa = PBXBuildFile; fileRef = C471DFF92809F440001D24C9 /* PowerMeter.swift */; };
15 | C471DFFE280A0968001D24C9 /* AudioLevelsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C471DFFD280A0968001D24C9 /* AudioLevelsView.swift */; };
16 | C4729DA52821BFAD00AAC477 /* MaterialView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C4729DA42821BFAD00AAC477 /* MaterialView.swift */; };
17 | C4B0DAAF276BA4480015082A /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = C4B0DAAE276BA4480015082A /* Assets.xcassets */; };
18 | C4B0DAB2276BA4480015082A /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = C4B0DAB1276BA4480015082A /* Preview Assets.xcassets */; };
19 | C4B0DABA276BA49F0015082A /* CaptureSampleApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = C4B0DAB9276BA49F0015082A /* CaptureSampleApp.swift */; };
20 | C4B0DABE276BA4B50015082A /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C4B0DABB276BA4B50015082A /* ContentView.swift */; };
21 | C4B0DABF276BA4B50015082A /* CapturePreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = C4B0DABC276BA4B50015082A /* CapturePreview.swift */; };
22 | C4B0DAC0276BA4B50015082A /* CaptureEngine.swift in Sources */ = {isa = PBXBuildFile; fileRef = C4B0DABD276BA4B50015082A /* CaptureEngine.swift */; };
23 | C4EB90D428108656006A595C /* ConfigurationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C4EB90D328108656006A595C /* ConfigurationView.swift */; };
24 | /* End PBXBuildFile section */
25 |
26 | /* Begin PBXFileReference section */
27 | 122776EC2A82BC6F0083737C /* PickerSettingsView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PickerSettingsView.swift; sourceTree = ""; };
28 | 7C6C99F1D4B6E3EBA3A7B7DF /* LICENSE.txt */ = {isa = PBXFileReference; includeInIndex = 1; path = LICENSE.txt; sourceTree = ""; };
29 | 8D89E0E7125AFE6B4E2E6500 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = ""; };
30 | 9AEEB9169F5D7FC75E6B35A2 /* SampleCode.xcconfig */ = {isa = PBXFileReference; name = SampleCode.xcconfig; path = Configuration/SampleCode.xcconfig; sourceTree = ""; };
31 | C44093722816EC8800FB8386 /* Synth.aif */ = {isa = PBXFileReference; lastKnownFileType = file; path = Synth.aif; sourceTree = ""; };
32 | C44093742816EC9C00FB8386 /* AudioPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioPlayer.swift; sourceTree = ""; };
33 | C470F0802811C5CB00D29309 /* ScreenRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenRecorder.swift; sourceTree = ""; };
34 | C471DFF92809F440001D24C9 /* PowerMeter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PowerMeter.swift; sourceTree = ""; };
35 | C471DFFD280A0968001D24C9 /* AudioLevelsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioLevelsView.swift; sourceTree = ""; };
36 | C4729DA42821BFAD00AAC477 /* MaterialView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MaterialView.swift; sourceTree = ""; };
37 | C4B0DAA7276BA4460015082A /* CaptureSample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = CaptureSample.app; sourceTree = BUILT_PRODUCTS_DIR; };
38 | C4B0DAAE276BA4480015082A /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Assets.xcassets; path = ../CaptureSample/Assets.xcassets; sourceTree = ""; };
39 | C4B0DAB1276BA4480015082A /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; };
40 | C4B0DAB3276BA4480015082A /* CaptureSample.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; name = CaptureSample.entitlements; path = ../CaptureSample/CaptureSample.entitlements; sourceTree = ""; };
41 | C4B0DAB9276BA49F0015082A /* CaptureSampleApp.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = CaptureSampleApp.swift; path = ../CaptureSample/CaptureSampleApp.swift; sourceTree = ""; };
42 | C4B0DABB276BA4B50015082A /* ContentView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; };
43 | C4B0DABC276BA4B50015082A /* CapturePreview.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CapturePreview.swift; sourceTree = ""; };
44 | C4B0DABD276BA4B50015082A /* CaptureEngine.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CaptureEngine.swift; sourceTree = ""; };
45 | C4EB90D328108656006A595C /* ConfigurationView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConfigurationView.swift; sourceTree = ""; };
46 | /* End PBXFileReference section */
47 |
48 | /* Begin PBXFrameworksBuildPhase section */
49 | C4B0DAA4276BA4460015082A /* Frameworks */ = {
50 | isa = PBXFrameworksBuildPhase;
51 | buildActionMask = 2147483647;
52 | files = (
53 | );
54 | runOnlyForDeploymentPostprocessing = 0;
55 | };
56 | /* End PBXFrameworksBuildPhase section */
57 |
58 | /* Begin PBXGroup section */
59 | 58C6EBE29CE5FC0542EA3228 /* LICENSE */ = {
60 | isa = PBXGroup;
61 | children = (
62 | 7C6C99F1D4B6E3EBA3A7B7DF /* LICENSE.txt */,
63 | );
64 | name = LICENSE;
65 | path = LICENSE;
66 | sourceTree = "";
67 | };
68 | C41D1B1D2814BD230033613F /* Views */ = {
69 | isa = PBXGroup;
70 | children = (
71 | C4B0DABC276BA4B50015082A /* CapturePreview.swift */,
72 | C4EB90D328108656006A595C /* ConfigurationView.swift */,
73 | C471DFFD280A0968001D24C9 /* AudioLevelsView.swift */,
74 | C4729DA42821BFAD00AAC477 /* MaterialView.swift */,
75 | 122776EC2A82BC6F0083737C /* PickerSettingsView.swift */,
76 | );
77 | path = Views;
78 | sourceTree = "";
79 | };
80 | C44093712816EC7800FB8386 /* Audio */ = {
81 | isa = PBXGroup;
82 | children = (
83 | C44093722816EC8800FB8386 /* Synth.aif */,
84 | );
85 | path = Audio;
86 | sourceTree = "";
87 | };
88 | C4B0DA9E276BA4460015082A = {
89 | isa = PBXGroup;
90 | children = (
91 | 8D89E0E7125AFE6B4E2E6500 /* README.md */,
92 | C4B0DAA9276BA4460015082A /* CaptureSample */,
93 | C4B0DAA8276BA4460015082A /* Products */,
94 | CFC39354E2BF335FE5D2CDFE /* Configuration */,
95 | 58C6EBE29CE5FC0542EA3228 /* LICENSE */,
96 | );
97 | sourceTree = "";
98 | };
99 | C4B0DAA8276BA4460015082A /* Products */ = {
100 | isa = PBXGroup;
101 | children = (
102 | C4B0DAA7276BA4460015082A /* CaptureSample.app */,
103 | );
104 | name = Products;
105 | sourceTree = "";
106 | };
107 | C4B0DAA9276BA4460015082A /* CaptureSample */ = {
108 | isa = PBXGroup;
109 | children = (
110 | C470F0802811C5CB00D29309 /* ScreenRecorder.swift */,
111 | C4B0DABD276BA4B50015082A /* CaptureEngine.swift */,
112 | C471DFF92809F440001D24C9 /* PowerMeter.swift */,
113 | C44093742816EC9C00FB8386 /* AudioPlayer.swift */,
114 | C4B0DABB276BA4B50015082A /* ContentView.swift */,
115 | C41D1B1D2814BD230033613F /* Views */,
116 | C4B0DAB9276BA49F0015082A /* CaptureSampleApp.swift */,
117 | C44093712816EC7800FB8386 /* Audio */,
118 | C4B0DAAE276BA4480015082A /* Assets.xcassets */,
119 | C4B0DAB3276BA4480015082A /* CaptureSample.entitlements */,
120 | C4B0DAB0276BA4480015082A /* Preview Content */,
121 | );
122 | path = CaptureSample;
123 | sourceTree = "";
124 | };
125 | C4B0DAB0276BA4480015082A /* Preview Content */ = {
126 | isa = PBXGroup;
127 | children = (
128 | C4B0DAB1276BA4480015082A /* Preview Assets.xcassets */,
129 | );
130 | path = "Preview Content";
131 | sourceTree = "";
132 | };
133 | CFC39354E2BF335FE5D2CDFE /* Configuration */ = {
134 | isa = PBXGroup;
135 | children = (
136 | 9AEEB9169F5D7FC75E6B35A2 /* SampleCode.xcconfig */,
137 | );
138 | name = Configuration;
139 | sourceTree = "";
140 | };
141 | /* End PBXGroup section */
142 |
143 | /* Begin PBXNativeTarget section */
144 | C4B0DAA6276BA4460015082A /* CaptureSample */ = {
145 | isa = PBXNativeTarget;
146 | buildConfigurationList = C4B0DAB6276BA4480015082A /* Build configuration list for PBXNativeTarget "CaptureSample" */;
147 | buildPhases = (
148 | C4B0DAA3276BA4460015082A /* Sources */,
149 | C4B0DAA4276BA4460015082A /* Frameworks */,
150 | C4B0DAA5276BA4460015082A /* Resources */,
151 | );
152 | buildRules = (
153 | );
154 | dependencies = (
155 | );
156 | name = CaptureSample;
157 | productName = CaptureIt;
158 | productReference = C4B0DAA7276BA4460015082A /* CaptureSample.app */;
159 | productType = "com.apple.product-type.application";
160 | };
161 | /* End PBXNativeTarget section */
162 |
163 | /* Begin PBXProject section */
164 | C4B0DA9F276BA4460015082A /* Project object */ = {
165 | isa = PBXProject;
166 | attributes = {
167 | BuildIndependentTargetsInParallel = 1;
168 | LastSwiftUpdateCheck = 1330;
169 | LastUpgradeCheck = 1400;
170 | ORGANIZATIONNAME = Apple;
171 | TargetAttributes = {
172 | C4B0DAA6276BA4460015082A = {
173 | CreatedOnToolsVersion = 13.3;
174 | LastSwiftMigration = 1330;
175 | };
176 | };
177 | };
178 | buildConfigurationList = C4B0DAA2276BA4460015082A /* Build configuration list for PBXProject "CaptureSample" */;
179 | compatibilityVersion = "Xcode 14.0";
180 | developmentRegion = en;
181 | hasScannedForEncodings = 0;
182 | knownRegions = (
183 | en,
184 | Base,
185 | );
186 | mainGroup = C4B0DA9E276BA4460015082A;
187 | productRefGroup = C4B0DAA8276BA4460015082A /* Products */;
188 | projectDirPath = "";
189 | projectRoot = "";
190 | targets = (
191 | C4B0DAA6276BA4460015082A /* CaptureSample */,
192 | );
193 | };
194 | /* End PBXProject section */
195 |
196 | /* Begin PBXResourcesBuildPhase section */
197 | C4B0DAA5276BA4460015082A /* Resources */ = {
198 | isa = PBXResourcesBuildPhase;
199 | buildActionMask = 2147483647;
200 | files = (
201 | C4B0DAB2276BA4480015082A /* Preview Assets.xcassets in Resources */,
202 | C4B0DAAF276BA4480015082A /* Assets.xcassets in Resources */,
203 | C44093732816EC8800FB8386 /* Synth.aif in Resources */,
204 | );
205 | runOnlyForDeploymentPostprocessing = 0;
206 | };
207 | /* End PBXResourcesBuildPhase section */
208 |
209 | /* Begin PBXSourcesBuildPhase section */
210 | C4B0DAA3276BA4460015082A /* Sources */ = {
211 | isa = PBXSourcesBuildPhase;
212 | buildActionMask = 2147483647;
213 | files = (
214 | C4B0DABF276BA4B50015082A /* CapturePreview.swift in Sources */,
215 | C4B0DAC0276BA4B50015082A /* CaptureEngine.swift in Sources */,
216 | C44093752816EC9C00FB8386 /* AudioPlayer.swift in Sources */,
217 | C470F0812811C5CB00D29309 /* ScreenRecorder.swift in Sources */,
218 | C4B0DABA276BA49F0015082A /* CaptureSampleApp.swift in Sources */,
219 | C471DFFE280A0968001D24C9 /* AudioLevelsView.swift in Sources */,
220 | 122776ED2A82BC6F0083737C /* PickerSettingsView.swift in Sources */,
221 | C4EB90D428108656006A595C /* ConfigurationView.swift in Sources */,
222 | C4729DA52821BFAD00AAC477 /* MaterialView.swift in Sources */,
223 | C4B0DABE276BA4B50015082A /* ContentView.swift in Sources */,
224 | C471DFFB2809F440001D24C9 /* PowerMeter.swift in Sources */,
225 | );
226 | runOnlyForDeploymentPostprocessing = 0;
227 | };
228 | /* End PBXSourcesBuildPhase section */
229 |
230 | /* Begin XCBuildConfiguration section */
231 | C4B0DAB4276BA4480015082A /* Debug */ = {
232 | isa = XCBuildConfiguration;
233 | baseConfigurationReference = 9AEEB9169F5D7FC75E6B35A2 /* SampleCode.xcconfig */;
234 | buildSettings = {
235 | ALWAYS_SEARCH_USER_PATHS = NO;
236 | CLANG_ANALYZER_NONNULL = YES;
237 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
238 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++17";
239 | CLANG_CXX_LIBRARY = "libc++";
240 | CLANG_ENABLE_MODULES = YES;
241 | CLANG_ENABLE_OBJC_ARC = YES;
242 | CLANG_ENABLE_OBJC_WEAK = YES;
243 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
244 | CLANG_WARN_BOOL_CONVERSION = YES;
245 | CLANG_WARN_COMMA = YES;
246 | CLANG_WARN_CONSTANT_CONVERSION = YES;
247 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
248 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
249 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
250 | CLANG_WARN_EMPTY_BODY = YES;
251 | CLANG_WARN_ENUM_CONVERSION = YES;
252 | CLANG_WARN_INFINITE_RECURSION = YES;
253 | CLANG_WARN_INT_CONVERSION = YES;
254 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
255 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
256 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
257 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
258 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
259 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
260 | CLANG_WARN_STRICT_PROTOTYPES = YES;
261 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
262 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
263 | CLANG_WARN_UNREACHABLE_CODE = YES;
264 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
265 | COPY_PHASE_STRIP = NO;
266 | DEAD_CODE_STRIPPING = YES;
267 | DEBUG_INFORMATION_FORMAT = dwarf;
268 | ENABLE_STRICT_OBJC_MSGSEND = YES;
269 | ENABLE_TESTABILITY = YES;
270 | GCC_C_LANGUAGE_STANDARD = gnu11;
271 | GCC_DYNAMIC_NO_PIC = NO;
272 | GCC_NO_COMMON_BLOCKS = YES;
273 | GCC_OPTIMIZATION_LEVEL = 0;
274 | GCC_PREPROCESSOR_DEFINITIONS = (
275 | "DEBUG=1",
276 | "$(inherited)",
277 | );
278 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
279 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
280 | GCC_WARN_UNDECLARED_SELECTOR = YES;
281 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
282 | GCC_WARN_UNUSED_FUNCTION = YES;
283 | GCC_WARN_UNUSED_VARIABLE = YES;
284 | MACOSX_DEPLOYMENT_TARGET = 14.0;
285 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
286 | MTL_FAST_MATH = YES;
287 | ONLY_ACTIVE_ARCH = YES;
288 | SDKROOT = macosx;
289 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
290 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
291 | };
292 | name = Debug;
293 | };
294 | C4B0DAB5276BA4480015082A /* Release */ = {
295 | isa = XCBuildConfiguration;
296 | baseConfigurationReference = 9AEEB9169F5D7FC75E6B35A2 /* SampleCode.xcconfig */;
297 | buildSettings = {
298 | ALWAYS_SEARCH_USER_PATHS = NO;
299 | CLANG_ANALYZER_NONNULL = YES;
300 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
301 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++17";
302 | CLANG_CXX_LIBRARY = "libc++";
303 | CLANG_ENABLE_MODULES = YES;
304 | CLANG_ENABLE_OBJC_ARC = YES;
305 | CLANG_ENABLE_OBJC_WEAK = YES;
306 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
307 | CLANG_WARN_BOOL_CONVERSION = YES;
308 | CLANG_WARN_COMMA = YES;
309 | CLANG_WARN_CONSTANT_CONVERSION = YES;
310 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
311 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
312 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
313 | CLANG_WARN_EMPTY_BODY = YES;
314 | CLANG_WARN_ENUM_CONVERSION = YES;
315 | CLANG_WARN_INFINITE_RECURSION = YES;
316 | CLANG_WARN_INT_CONVERSION = YES;
317 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
318 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
319 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
320 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
321 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
322 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
323 | CLANG_WARN_STRICT_PROTOTYPES = YES;
324 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
325 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
326 | CLANG_WARN_UNREACHABLE_CODE = YES;
327 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
328 | COPY_PHASE_STRIP = NO;
329 | DEAD_CODE_STRIPPING = YES;
330 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
331 | ENABLE_NS_ASSERTIONS = NO;
332 | ENABLE_STRICT_OBJC_MSGSEND = YES;
333 | GCC_C_LANGUAGE_STANDARD = gnu11;
334 | GCC_NO_COMMON_BLOCKS = YES;
335 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
336 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
337 | GCC_WARN_UNDECLARED_SELECTOR = YES;
338 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
339 | GCC_WARN_UNUSED_FUNCTION = YES;
340 | GCC_WARN_UNUSED_VARIABLE = YES;
341 | MACOSX_DEPLOYMENT_TARGET = 14.0;
342 | MTL_ENABLE_DEBUG_INFO = NO;
343 | MTL_FAST_MATH = YES;
344 | SDKROOT = macosx;
345 | SWIFT_COMPILATION_MODE = wholemodule;
346 | SWIFT_OPTIMIZATION_LEVEL = "-O";
347 | };
348 | name = Release;
349 | };
350 | C4B0DAB7276BA4480015082A /* Debug */ = {
351 | isa = XCBuildConfiguration;
352 | baseConfigurationReference = 9AEEB9169F5D7FC75E6B35A2 /* SampleCode.xcconfig */;
353 | buildSettings = {
354 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
355 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
356 | CLANG_ENABLE_MODULES = YES;
357 | CODE_SIGN_ENTITLEMENTS = CaptureSample/CaptureSample.entitlements;
358 | CODE_SIGN_IDENTITY = "Mac Developer";
359 | "CODE_SIGN_IDENTITY[sdk=macosx*]" = "Mac Developer";
360 | CODE_SIGN_STYLE = Automatic;
361 | COMBINE_HIDPI_IMAGES = YES;
362 | CURRENT_PROJECT_VERSION = 1;
363 | DEAD_CODE_STRIPPING = YES;
364 | DEVELOPMENT_ASSET_PATHS = "\"CaptureSample/Preview Content\"";
365 | DEVELOPMENT_TEAM = "";
366 | ENABLE_HARDENED_RUNTIME = YES;
367 | ENABLE_PREVIEWS = YES;
368 | GENERATE_INFOPLIST_FILE = YES;
369 | INFOPLIST_KEY_NSHumanReadableCopyright = "";
370 | LD_RUNPATH_SEARCH_PATHS = (
371 | "$(inherited)",
372 | "@executable_path/../Frameworks",
373 | );
374 | MACOSX_DEPLOYMENT_TARGET = 14.0;
375 | MARKETING_VERSION = 1.0;
376 | PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.CaptureSample${SAMPLE_CODE_DISAMBIGUATOR}";
377 | PRODUCT_NAME = "$(TARGET_NAME)";
378 | PROVISIONING_PROFILE_SPECIFIER = "";
379 | SWIFT_EMIT_LOC_STRINGS = YES;
380 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
381 | SWIFT_VERSION = 5.0;
382 | };
383 | name = Debug;
384 | };
385 | C4B0DAB8276BA4480015082A /* Release */ = {
386 | isa = XCBuildConfiguration;
387 | baseConfigurationReference = 9AEEB9169F5D7FC75E6B35A2 /* SampleCode.xcconfig */;
388 | buildSettings = {
389 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
390 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
391 | CLANG_ENABLE_MODULES = YES;
392 | CODE_SIGN_ENTITLEMENTS = CaptureSample/CaptureSample.entitlements;
393 | CODE_SIGN_IDENTITY = "Mac Developer";
394 | "CODE_SIGN_IDENTITY[sdk=macosx*]" = "Mac Developer";
395 | CODE_SIGN_STYLE = Automatic;
396 | COMBINE_HIDPI_IMAGES = YES;
397 | CURRENT_PROJECT_VERSION = 1;
398 | DEAD_CODE_STRIPPING = YES;
399 | DEVELOPMENT_ASSET_PATHS = "\"CaptureSample/Preview Content\"";
400 | DEVELOPMENT_TEAM = "";
401 | ENABLE_HARDENED_RUNTIME = YES;
402 | ENABLE_PREVIEWS = YES;
403 | GENERATE_INFOPLIST_FILE = YES;
404 | INFOPLIST_KEY_NSHumanReadableCopyright = "";
405 | LD_RUNPATH_SEARCH_PATHS = (
406 | "$(inherited)",
407 | "@executable_path/../Frameworks",
408 | );
409 | MACOSX_DEPLOYMENT_TARGET = 14.0;
410 | MARKETING_VERSION = 1.0;
411 | PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.CaptureSample${SAMPLE_CODE_DISAMBIGUATOR}";
412 | PRODUCT_NAME = "$(TARGET_NAME)";
413 | PROVISIONING_PROFILE_SPECIFIER = "";
414 | SWIFT_EMIT_LOC_STRINGS = YES;
415 | SWIFT_VERSION = 5.0;
416 | };
417 | name = Release;
418 | };
419 | /* End XCBuildConfiguration section */
420 |
421 | /* Begin XCConfigurationList section */
422 | C4B0DAA2276BA4460015082A /* Build configuration list for PBXProject "CaptureSample" */ = {
423 | isa = XCConfigurationList;
424 | buildConfigurations = (
425 | C4B0DAB4276BA4480015082A /* Debug */,
426 | C4B0DAB5276BA4480015082A /* Release */,
427 | );
428 | defaultConfigurationIsVisible = 0;
429 | defaultConfigurationName = Release;
430 | };
431 | C4B0DAB6276BA4480015082A /* Build configuration list for PBXNativeTarget "CaptureSample" */ = {
432 | isa = XCConfigurationList;
433 | buildConfigurations = (
434 | C4B0DAB7276BA4480015082A /* Debug */,
435 | C4B0DAB8276BA4480015082A /* Release */,
436 | );
437 | defaultConfigurationIsVisible = 0;
438 | defaultConfigurationName = Release;
439 | };
440 | /* End XCConfigurationList section */
441 | };
442 | rootObject = C4B0DA9F276BA4460015082A /* Project object */;
443 | }
444 |
--------------------------------------------------------------------------------