├── .gitignore
├── Example
├── Example.xcodeproj
│ ├── project.pbxproj
│ └── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
├── Example
│ ├── ExampleApp.swift
│ ├── ExampleView.swift
│ ├── Examples.swift
│ ├── Supporting Files
│ │ ├── Assets.xcassets
│ │ │ ├── AccentColor.colorset
│ │ │ │ └── Contents.json
│ │ │ ├── AppIcon.appiconset
│ │ │ │ └── Contents.json
│ │ │ └── Contents.json
│ │ ├── Info.plist
│ │ ├── Preview Content
│ │ │ └── Preview Assets.xcassets
│ │ │ │ └── Contents.json
│ │ ├── lookup_amatorka.png
│ │ └── lookup_miss_etikate.png
│ └── Views
│ │ ├── CameraSampleView.swift
│ │ ├── LookupFilterView.swift
│ │ ├── LookupFilterViewModel.swift
│ │ ├── MPSSampleView.swift
│ │ └── SegmentationSampleView.swift
├── ExampleTests
│ ├── ExampleTests.swift
│ └── Info.plist
└── ExampleUITests
│ ├── ExampleUITests.swift
│ └── Info.plist
├── LICENSE
├── MetalCamera.xcodeproj
├── MetalCameraTests_Info.plist
├── MetalCamera_Info.plist
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ ├── IDEWorkspaceChecks.plist
│ │ └── WorkspaceSettings.xcsettings
└── xcshareddata
│ └── xcschemes
│ └── MetalCamera-Package.xcscheme
├── Package.swift
├── README.md
├── Sources
└── MetalCamera
│ ├── CoreML
│ ├── CoreMLClassifierHandler.swift
│ ├── CoreMLLoader.swift
│ └── PoseNet
│ │ ├── CoreMLPoseNetHandler.swift
│ │ ├── Joint.swift
│ │ ├── Pose.swift
│ │ └── PoseNetOutput.swift
│ ├── MetalCamera.swift
│ ├── MetalRenderingDevice.swift
│ ├── MetalVideoLoader.swift
│ ├── MetalVideoView.swift
│ ├── MetalVideoWriter.swift
│ ├── OperationChain.swift
│ ├── SwiftUI
│ └── VideoPreview.swift
│ ├── Texture.swift
│ ├── audio
│ ├── AudioBuffer.swift
│ ├── AudioCompositor.swift
│ └── AudioStreamPlayer.swift
│ ├── operations
│ ├── AlphaBlend.swift
│ ├── Gray.swift
│ ├── ImageCompositor.swift
│ ├── Kernel.swift
│ ├── Lookup.swift
│ ├── Mask.swift
│ ├── MetalKernel.swift
│ └── Rotation.swift
│ ├── shader
│ └── Shaders.metal
│ └── utils
│ ├── Colors.swift
│ ├── Maths.swift
│ └── extensions.swift
├── Tests
└── MetalCameraTests
│ └── MetalCameraTests.swift
└── build.sh
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## User settings
6 | xcuserdata/
7 |
8 | ## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
9 | *.xcscmblueprint
10 | *.xccheckout
11 |
12 | ## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
13 | build/
14 | DerivedData/
15 | *.moved-aside
16 | *.pbxuser
17 | !default.pbxuser
18 | *.mode1v3
19 | !default.mode1v3
20 | *.mode2v3
21 | !default.mode2v3
22 | *.perspectivev3
23 | !default.perspectivev3
24 |
25 | ## Obj-C/Swift specific
26 | *.hmap
27 |
28 | ## App packaging
29 | *.ipa
30 | *.dSYM.zip
31 | *.dSYM
32 |
33 | ## Playgrounds
34 | timeline.xctimeline
35 | playground.xcworkspace
36 |
37 | # Swift Package Manager
38 | #
39 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
40 | # Packages/
41 | # Package.pins
42 | # Package.resolved
43 | # *.xcodeproj
44 | #
45 | # Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata
46 | # hence it is not needed unless you have added a package configuration file to your project
47 | # .swiftpm
48 |
49 | .build/
50 |
51 | # CocoaPods
52 | #
53 | # We recommend against adding the Pods directory to your .gitignore. However
54 | # you should judge for yourself, the pros and cons are mentioned at:
55 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
56 | #
57 | # Pods/
58 | #
59 | # Add this line if you want to avoid checking in source code from the Xcode workspace
60 | # *.xcworkspace
61 |
62 | # Carthage
63 | #
64 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
65 | # Carthage/Checkouts
66 |
67 | Carthage/Build/
68 |
69 | # Accio dependency management
70 | Dependencies/
71 | .accio/
72 |
73 | # fastlane
74 | #
75 | # It is recommended to not store the screenshots in the git repo.
76 | # Instead, use fastlane to re-generate the screenshots whenever they are needed.
77 | # For more information about the recommended setup visit:
78 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
79 |
80 | fastlane/report.xml
81 | fastlane/Preview.html
82 | fastlane/screenshots/**/*.png
83 | fastlane/test_output
84 |
85 | # Code Injection
86 | #
87 | # After new code Injection tools there's a generated folder /iOSInjectionProject
88 | # https://github.com/johnno1962/injectionforxcode
89 |
90 | iOSInjectionProject/
91 |
--------------------------------------------------------------------------------
/Example/Example.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Example/Example.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Example/Example/ExampleApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleApp.swift
3 | // Example
4 | //
5 | // Created by Eunchul Jeon on 2021/09/05.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct ExampleApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ExampleView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Example/Example/ExampleView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // Example
4 | //
5 | // Created by Eunchul Jeon on 2021/09/05.
6 | //
7 |
8 | import SwiftUI
9 | import MetalCamera
10 |
11 | struct ExampleView: View {
12 | var body: some View {
13 | NavigationView {
14 | List(Examples.allCases) { item in
15 | NavigationLink(destination: item.view) {
16 | HStack {
17 | Text("\(item.name)")
18 | Spacer()
19 | }
20 | }
21 | }
22 | .navigationTitle(MetalCamera.libraryName)
23 | }
24 | }
25 | }
26 |
27 | struct ContentView_Previews: PreviewProvider {
28 | static var previews: some View {
29 | ExampleView()
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/Example/Example/Examples.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleViewModel.swift
3 | // Example
4 | //
5 | // Created by Eunchul Jeon on 2021/09/05.
6 | //
7 |
8 | import Foundation
9 | import MetalCamera
10 | import SwiftUI
11 |
12 | enum Examples: Int, CaseIterable, Identifiable {
13 | case camera
14 | case recording
15 | case segmentation
16 | case lookup
17 | case MPS
18 |
19 | var id: String {
20 | return "\(self.rawValue)"
21 | }
22 |
23 | var name: String {
24 | switch self {
25 | case .camera:
26 | return "Camera"
27 | case .recording:
28 | return "Composition and Recording"
29 | case .segmentation:
30 | return "Segmentation"
31 | case .lookup:
32 | return "Lookup Table"
33 | case .MPS:
34 | return "Metal Performance Shader"
35 | }
36 | }
37 | }
38 |
39 | extension Examples {
40 | var view: some View {
41 | switch self {
42 | case .camera:
43 | return AnyView(CameraSampleView())
44 | case .segmentation:
45 | return AnyView(SegmentationSampleView())
46 | case .MPS:
47 | return AnyView(MPSSampleView())
48 | case .lookup:
49 | return AnyView(LookupFilterView())
50 | default:
51 | return AnyView(Text(self.name))
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/Example/Example/Supporting Files/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Example/Example/Supporting Files/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "scale" : "2x",
6 | "size" : "20x20"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "scale" : "3x",
11 | "size" : "20x20"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "scale" : "2x",
16 | "size" : "29x29"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "scale" : "3x",
21 | "size" : "29x29"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "scale" : "2x",
26 | "size" : "40x40"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "scale" : "3x",
31 | "size" : "40x40"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "scale" : "2x",
36 | "size" : "60x60"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "scale" : "3x",
41 | "size" : "60x60"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "scale" : "1x",
46 | "size" : "20x20"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "scale" : "2x",
51 | "size" : "20x20"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "scale" : "1x",
56 | "size" : "29x29"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "scale" : "2x",
61 | "size" : "29x29"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "scale" : "1x",
66 | "size" : "40x40"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "scale" : "2x",
71 | "size" : "40x40"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "scale" : "1x",
76 | "size" : "76x76"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "scale" : "2x",
81 | "size" : "76x76"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "scale" : "2x",
86 | "size" : "83.5x83.5"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "scale" : "1x",
91 | "size" : "1024x1024"
92 | }
93 | ],
94 | "info" : {
95 | "author" : "xcode",
96 | "version" : 1
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/Example/Example/Supporting Files/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Example/Example/Supporting Files/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | $(PRODUCT_BUNDLE_PACKAGE_TYPE)
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | NSCameraUsageDescription
24 | $(PRODUCT_NAME) Requires camera access in order to capture and transmit video
25 | NSMicrophoneUsageDescription
26 | $(PRODUCT_NAME) Requires microphone access in order to capture and transmit audio
27 | UIApplicationSceneManifest
28 |
29 | UIApplicationSupportsMultipleScenes
30 |
31 |
32 | UIApplicationSupportsIndirectInputEvents
33 |
34 | UILaunchScreen
35 |
36 | UIRequiredDeviceCapabilities
37 |
38 | armv7
39 |
40 | UISupportedInterfaceOrientations
41 |
42 | UIInterfaceOrientationPortrait
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 | UISupportedInterfaceOrientations~ipad
47 |
48 | UIInterfaceOrientationPortrait
49 | UIInterfaceOrientationPortraitUpsideDown
50 | UIInterfaceOrientationLandscapeLeft
51 | UIInterfaceOrientationLandscapeRight
52 |
53 |
54 |
55 |
--------------------------------------------------------------------------------
/Example/Example/Supporting Files/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Example/Example/Supporting Files/lookup_amatorka.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jsharp83/MetalCamera/af4029355eecfaa97ebc42a08b9c0aec75ab1cab/Example/Example/Supporting Files/lookup_amatorka.png
--------------------------------------------------------------------------------
/Example/Example/Supporting Files/lookup_miss_etikate.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jsharp83/MetalCamera/af4029355eecfaa97ebc42a08b9c0aec75ab1cab/Example/Example/Supporting Files/lookup_miss_etikate.png
--------------------------------------------------------------------------------
/Example/Example/Views/CameraSampleView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CameraSampleView.swift
3 | // Example
4 | //
5 | // Created by Eunchul Jeon on 2021/09/07.
6 | //
7 |
8 | import SwiftUI
9 | import MetalCamera
10 |
11 | struct CameraSampleView: View {
12 | let camera = try! MetalCamera(videoOrientation: .portrait, isVideoMirrored: true)
13 | var body: some View {
14 | VideoPreview(operation: camera)
15 | .onAppear {
16 | camera.startCapture()
17 | }
18 | .onDisappear {
19 | camera.stopCapture()
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/Example/Example/Views/LookupFilterView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LookupFilterView.swift
3 | // Example
4 | //
5 | // Created by Eunchul Jeon on 2021/09/07.
6 | //
7 |
8 | import MetalCamera
9 | import SwiftUI
10 |
11 | struct LookupFilterView: View {
12 | @ObservedObject var viewModel = LookupFilterViewModel()
13 |
14 | var body: some View {
15 | if let operation = viewModel.operationChain {
16 | VideoPreview(operation: operation)
17 | .onAppear {
18 | viewModel.camera.startCapture()
19 | }
20 | .onDisappear {
21 | viewModel.camera.stopCapture()
22 | }
23 | .gesture(DragGesture(minimumDistance: 20, coordinateSpace: .global)
24 | .onEnded({ _ in
25 | viewModel.changeFilter()
26 | })
27 | )
28 | } else {
29 | Text("Preparing...")
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/Example/Example/Views/LookupFilterViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LookupFilterViewModel.swift
3 | // Example
4 | //
5 | // Created by Eunchul Jeon on 2021/09/07.
6 | //
7 |
8 | import UIKit
9 | import MetalCamera
10 |
11 | enum LookupFilter: CaseIterable {
12 | case none
13 | case amatorka
14 | case miss_etikate
15 |
16 | var imageFileName: String {
17 | switch self {
18 | case .amatorka:
19 | return "lookup_amatorka.png"
20 | case .miss_etikate:
21 | return "lookup_miss_etikate.png"
22 | case .none:
23 | return ""
24 | }
25 | }
26 |
27 | var cgImage: CGImage? {
28 | if imageFileName.count > 0 {
29 | return UIImage(named: imageFileName)?.cgImage
30 | } else {
31 | return nil
32 | }
33 | }
34 | }
35 |
36 | class LookupFilterViewModel: ObservableObject {
37 | let camera = try! MetalCamera(videoOrientation: .portrait, isVideoMirrored: true)
38 | let lookupFilters = LookupFilter.allCases
39 | .compactMap { $0.cgImage }
40 | .compactMap { Lookup($0) }
41 |
42 | @Published var operationChain: OperationChain?
43 | var currentIndex = 0
44 |
45 | init() {
46 | camera-->lookupFilters.first!
47 | operationChain = lookupFilters.first
48 | }
49 |
50 | func changeFilter() {
51 | camera.removeAllTargets()
52 | operationChain?.removeAllTargets()
53 | currentIndex = currentIndex + 1 <= lookupFilters.count ? currentIndex + 1 : 0
54 | if currentIndex < lookupFilters.count {
55 | camera-->lookupFilters[currentIndex]
56 | operationChain = lookupFilters[currentIndex]
57 | } else {
58 | operationChain = camera
59 | }
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/Example/Example/Views/MPSSampleView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MPSSampleView.swift
3 | // Example
4 | //
5 | // Created by Eunchul Jeon on 2021/09/07.
6 | //
7 |
8 | import MetalCamera
9 | import MetalPerformanceShaders
10 | import SwiftUI
11 |
12 | struct MPSSampleView: View {
13 | let camera = try! MetalCamera(videoOrientation: .portrait, isVideoMirrored: true)
14 | let sobel = MPSImageSobel(device: sharedMetalRenderingDevice.device)
15 | let kernel: Kernel?
16 |
17 | init() {
18 | let kernel = Kernel(sobel)
19 | camera-->kernel
20 | self.kernel = kernel
21 | }
22 |
23 | var body: some View {
24 | if let kernel = kernel {
25 | VideoPreview(operation: kernel)
26 | .onAppear {
27 | camera.startCapture()
28 | }
29 | .onDisappear {
30 | camera.stopCapture()
31 | }
32 | } else {
33 | Text("Preparing...")
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Example/Example/Views/SegmentationSampleView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SegmentationSampleView.swift
3 | // Example
4 | //
5 | // Created by Eunchul Jeon on 2021/09/07.
6 | //
7 |
8 | import CoreML
9 | import MetalCamera
10 | import SwiftUI
11 | import Vision
12 |
13 | struct SegmentationSampleView: View {
14 | let camera = try! MetalCamera(videoOrientation: .portrait, isVideoMirrored: true)
15 | @State var operationChain: OperationChain?
16 |
17 | let modelURL = URL(string: "https://ml-assets.apple.com/coreml/models/Image/ImageSegmentation/DeepLabV3/DeepLabV3Int8LUT.mlmodel")!
18 |
19 | var body: some View {
20 | if let operationChain = operationChain {
21 | VideoPreview(operation: operationChain)
22 | .onAppear {
23 | camera.startCapture()
24 | }
25 | .onDisappear {
26 | camera.stopCapture()
27 | }
28 | } else {
29 | Text("Preparing...")
30 | .onAppear() {
31 | loadCoreML()
32 | }
33 | }
34 | }
35 | }
36 |
37 | extension SegmentationSampleView {
38 | func loadCoreML() {
39 | do {
40 | let loader = try CoreMLLoader(url: modelURL)
41 | loader.load { (model, error) in
42 | if let model = model {
43 | setupModelHandler(model)
44 | } else if let error = error {
45 | debugPrint(error)
46 | }
47 | }
48 | } catch {
49 | debugPrint(error)
50 | }
51 | }
52 |
53 | func setupModelHandler(_ model: MLModel) {
54 | do {
55 | let modelHandler = try CoreMLClassifierHandler(model)
56 | camera.removeAllTargets()
57 | camera-->modelHandler
58 | operationChain = modelHandler
59 | } catch{
60 | debugPrint(error)
61 | }
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/Example/ExampleTests/ExampleTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleTests.swift
3 | // ExampleTests
4 | //
5 | // Created by Eunchul Jeon on 2021/09/05.
6 | //
7 |
8 | import XCTest
9 | @testable import Example
10 |
11 | class ExampleTests: XCTestCase {
12 |
13 | override func setUpWithError() throws {
14 | // Put setup code here. This method is called before the invocation of each test method in the class.
15 | }
16 |
17 | override func tearDownWithError() throws {
18 | // Put teardown code here. This method is called after the invocation of each test method in the class.
19 | }
20 |
21 | func testExample() throws {
22 | // This is an example of a functional test case.
23 | // Use XCTAssert and related functions to verify your tests produce the correct results.
24 | }
25 |
26 | func testPerformanceExample() throws {
27 | // This is an example of a performance test case.
28 | self.measure {
29 | // Put the code you want to measure the time of here.
30 | }
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/Example/ExampleTests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | $(PRODUCT_BUNDLE_PACKAGE_TYPE)
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 |
22 |
23 |
--------------------------------------------------------------------------------
/Example/ExampleUITests/ExampleUITests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleUITests.swift
3 | // ExampleUITests
4 | //
5 | // Created by Eunchul Jeon on 2021/09/05.
6 | //
7 |
8 | import XCTest
9 |
10 | class ExampleUITests: XCTestCase {
11 |
12 | override func setUpWithError() throws {
13 | // Put setup code here. This method is called before the invocation of each test method in the class.
14 |
15 | // In UI tests it is usually best to stop immediately when a failure occurs.
16 | continueAfterFailure = false
17 |
18 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
19 | }
20 |
21 | override func tearDownWithError() throws {
22 | // Put teardown code here. This method is called after the invocation of each test method in the class.
23 | }
24 |
25 | func testExample() throws {
26 | // UI tests must launch the application that they test.
27 | let app = XCUIApplication()
28 | app.launch()
29 |
30 | // Use recording to get started writing UI tests.
31 | // Use XCTAssert and related functions to verify your tests produce the correct results.
32 | }
33 |
34 | func testLaunchPerformance() throws {
35 | if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) {
36 | // This measures how long it takes to launch your application.
37 | measure(metrics: [XCTApplicationLaunchMetric()]) {
38 | XCUIApplication().launch()
39 | }
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/Example/ExampleUITests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | $(PRODUCT_BUNDLE_PACKAGE_TYPE)
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 |
22 |
23 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2020 jsharp83
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in
11 | all copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | THE SOFTWARE.
20 |
--------------------------------------------------------------------------------
/MetalCamera.xcodeproj/MetalCameraTests_Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | CFBundleDevelopmentRegion
5 | en
6 | CFBundleExecutable
7 | $(EXECUTABLE_NAME)
8 | CFBundleIdentifier
9 | $(PRODUCT_BUNDLE_IDENTIFIER)
10 | CFBundleInfoDictionaryVersion
11 | 6.0
12 | CFBundleName
13 | $(PRODUCT_NAME)
14 | CFBundlePackageType
15 | BNDL
16 | CFBundleShortVersionString
17 | 1.0
18 | CFBundleSignature
19 | ????
20 | CFBundleVersion
21 | $(CURRENT_PROJECT_VERSION)
22 | NSPrincipalClass
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/MetalCamera.xcodeproj/MetalCamera_Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | CFBundleDevelopmentRegion
5 | en
6 | CFBundleExecutable
7 | $(EXECUTABLE_NAME)
8 | CFBundleIdentifier
9 | $(PRODUCT_BUNDLE_IDENTIFIER)
10 | CFBundleInfoDictionaryVersion
11 | 6.0
12 | CFBundleName
13 | $(PRODUCT_NAME)
14 | CFBundlePackageType
15 | FMWK
16 | CFBundleShortVersionString
17 | 1.0
18 | CFBundleSignature
19 | ????
20 | CFBundleVersion
21 | $(CURRENT_PROJECT_VERSION)
22 | NSPrincipalClass
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/MetalCamera.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 46;
7 | objects = {
8 |
9 | /* Begin PBXAggregateTarget section */
10 | "MetalCamera::MetalCameraPackageTests::ProductTarget" /* MetalCameraPackageTests */ = {
11 | isa = PBXAggregateTarget;
12 | buildConfigurationList = OBJ_90 /* Build configuration list for PBXAggregateTarget "MetalCameraPackageTests" */;
13 | buildPhases = (
14 | );
15 | dependencies = (
16 | OBJ_93 /* PBXTargetDependency */,
17 | );
18 | name = MetalCameraPackageTests;
19 | productName = MetalCameraPackageTests;
20 | };
21 | /* End PBXAggregateTarget section */
22 |
23 | /* Begin PBXBuildFile section */
24 | 077F00EA26E4744B008C6607 /* VideoPreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = 077F00E926E4744B008C6607 /* VideoPreview.swift */; };
25 | 077F00F426E66FC2008C6607 /* Shaders.metal in Sources */ = {isa = PBXBuildFile; fileRef = 077F00F326E66FC2008C6607 /* Shaders.metal */; };
26 | OBJ_101 /* MetalCamera.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = "MetalCamera::MetalCamera::Product" /* MetalCamera.framework */; };
27 | OBJ_55 /* CoreMLClassifierHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_10 /* CoreMLClassifierHandler.swift */; };
28 | OBJ_56 /* CoreMLLoader.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_11 /* CoreMLLoader.swift */; };
29 | OBJ_57 /* CoreMLPoseNetHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_13 /* CoreMLPoseNetHandler.swift */; };
30 | OBJ_58 /* Joint.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_14 /* Joint.swift */; };
31 | OBJ_59 /* Pose.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_15 /* Pose.swift */; };
32 | OBJ_60 /* PoseNetOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_16 /* PoseNetOutput.swift */; };
33 | OBJ_61 /* MetalCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_17 /* MetalCamera.swift */; };
34 | OBJ_62 /* MetalRenderingDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_18 /* MetalRenderingDevice.swift */; };
35 | OBJ_63 /* MetalVideoLoader.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_19 /* MetalVideoLoader.swift */; };
36 | OBJ_64 /* MetalVideoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_20 /* MetalVideoView.swift */; };
37 | OBJ_65 /* MetalVideoWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_21 /* MetalVideoWriter.swift */; };
38 | OBJ_66 /* OperationChain.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_22 /* OperationChain.swift */; };
39 | OBJ_67 /* Texture.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_23 /* Texture.swift */; };
40 | OBJ_68 /* AudioBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_25 /* AudioBuffer.swift */; };
41 | OBJ_69 /* AudioCompositor.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_26 /* AudioCompositor.swift */; };
42 | OBJ_70 /* AudioStreamPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_27 /* AudioStreamPlayer.swift */; };
43 | OBJ_71 /* AlphaBlend.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_29 /* AlphaBlend.swift */; };
44 | OBJ_72 /* Gray.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_30 /* Gray.swift */; };
45 | OBJ_73 /* ImageCompositor.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_31 /* ImageCompositor.swift */; };
46 | OBJ_74 /* Lookup.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_32 /* Lookup.swift */; };
47 | OBJ_75 /* Kernel.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_33 /* Kernel.swift */; };
48 | OBJ_76 /* Mask.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_34 /* Mask.swift */; };
49 | OBJ_77 /* MetalKernel.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_35 /* MetalKernel.swift */; };
50 | OBJ_78 /* Rotation.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_36 /* Rotation.swift */; };
51 | OBJ_79 /* Colors.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_38 /* Colors.swift */; };
52 | OBJ_80 /* Maths.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_39 /* Maths.swift */; };
53 | OBJ_81 /* extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_40 /* extensions.swift */; };
54 | OBJ_88 /* Package.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_6 /* Package.swift */; };
55 | OBJ_99 /* MetalCameraTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = OBJ_43 /* MetalCameraTests.swift */; };
56 | /* End PBXBuildFile section */
57 |
58 | /* Begin PBXContainerItemProxy section */
59 | 077F00DB26E45271008C6607 /* PBXContainerItemProxy */ = {
60 | isa = PBXContainerItemProxy;
61 | containerPortal = OBJ_1 /* Project object */;
62 | proxyType = 1;
63 | remoteGlobalIDString = "MetalCamera::MetalCameraTests";
64 | remoteInfo = MetalCameraTests;
65 | };
66 | 077F00DC26E45271008C6607 /* PBXContainerItemProxy */ = {
67 | isa = PBXContainerItemProxy;
68 | containerPortal = OBJ_1 /* Project object */;
69 | proxyType = 1;
70 | remoteGlobalIDString = "MetalCamera::MetalCamera";
71 | remoteInfo = MetalCamera;
72 | };
73 | /* End PBXContainerItemProxy section */
74 |
75 | /* Begin PBXFileReference section */
76 | 077F00E926E4744B008C6607 /* VideoPreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoPreview.swift; sourceTree = ""; };
77 | 077F00F326E66FC2008C6607 /* Shaders.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; path = Shaders.metal; sourceTree = ""; };
78 | "MetalCamera::MetalCamera::Product" /* MetalCamera.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = MetalCamera.framework; sourceTree = BUILT_PRODUCTS_DIR; };
79 | "MetalCamera::MetalCameraTests::Product" /* MetalCameraTests.xctest */ = {isa = PBXFileReference; lastKnownFileType = file; path = MetalCameraTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
80 | OBJ_10 /* CoreMLClassifierHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoreMLClassifierHandler.swift; sourceTree = ""; };
81 | OBJ_11 /* CoreMLLoader.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoreMLLoader.swift; sourceTree = ""; };
82 | OBJ_13 /* CoreMLPoseNetHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoreMLPoseNetHandler.swift; sourceTree = ""; };
83 | OBJ_14 /* Joint.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Joint.swift; sourceTree = ""; };
84 | OBJ_15 /* Pose.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Pose.swift; sourceTree = ""; };
85 | OBJ_16 /* PoseNetOutput.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PoseNetOutput.swift; sourceTree = ""; };
86 | OBJ_17 /* MetalCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalCamera.swift; sourceTree = ""; };
87 | OBJ_18 /* MetalRenderingDevice.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalRenderingDevice.swift; sourceTree = ""; };
88 | OBJ_19 /* MetalVideoLoader.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalVideoLoader.swift; sourceTree = ""; };
89 | OBJ_20 /* MetalVideoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalVideoView.swift; sourceTree = ""; };
90 | OBJ_21 /* MetalVideoWriter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalVideoWriter.swift; sourceTree = ""; };
91 | OBJ_22 /* OperationChain.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OperationChain.swift; sourceTree = ""; };
92 | OBJ_23 /* Texture.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Texture.swift; sourceTree = ""; };
93 | OBJ_25 /* AudioBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioBuffer.swift; sourceTree = ""; };
94 | OBJ_26 /* AudioCompositor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCompositor.swift; sourceTree = ""; };
95 | OBJ_27 /* AudioStreamPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioStreamPlayer.swift; sourceTree = ""; };
96 | OBJ_29 /* AlphaBlend.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AlphaBlend.swift; sourceTree = ""; };
97 | OBJ_30 /* Gray.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Gray.swift; sourceTree = ""; };
98 | OBJ_31 /* ImageCompositor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageCompositor.swift; sourceTree = ""; };
99 | OBJ_32 /* Lookup.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Lookup.swift; sourceTree = ""; };
100 | OBJ_33 /* Kernel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Kernel.swift; sourceTree = ""; };
101 | OBJ_34 /* Mask.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Mask.swift; sourceTree = ""; };
102 | OBJ_35 /* MetalKernel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalKernel.swift; sourceTree = ""; };
103 | OBJ_36 /* Rotation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Rotation.swift; sourceTree = ""; };
104 | OBJ_38 /* Colors.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Colors.swift; sourceTree = ""; };
105 | OBJ_39 /* Maths.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Maths.swift; sourceTree = ""; };
106 | OBJ_40 /* extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = extensions.swift; sourceTree = ""; };
107 | OBJ_43 /* MetalCameraTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MetalCameraTests.swift; sourceTree = ""; };
108 | OBJ_48 /* LICENSE */ = {isa = PBXFileReference; lastKnownFileType = text; path = LICENSE; sourceTree = ""; };
109 | OBJ_49 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = ""; };
110 | OBJ_6 /* Package.swift */ = {isa = PBXFileReference; explicitFileType = sourcecode.swift; path = Package.swift; sourceTree = ""; };
111 | /* End PBXFileReference section */
112 |
113 | /* Begin PBXFrameworksBuildPhase section */
114 | OBJ_100 /* Frameworks */ = {
115 | isa = PBXFrameworksBuildPhase;
116 | buildActionMask = 0;
117 | files = (
118 | OBJ_101 /* MetalCamera.framework in Frameworks */,
119 | );
120 | runOnlyForDeploymentPostprocessing = 0;
121 | };
122 | OBJ_82 /* Frameworks */ = {
123 | isa = PBXFrameworksBuildPhase;
124 | buildActionMask = 0;
125 | files = (
126 | );
127 | runOnlyForDeploymentPostprocessing = 0;
128 | };
129 | /* End PBXFrameworksBuildPhase section */
130 |
131 | /* Begin PBXGroup section */
132 | 077F00E826E469CC008C6607 /* SwiftUI */ = {
133 | isa = PBXGroup;
134 | children = (
135 | 077F00E926E4744B008C6607 /* VideoPreview.swift */,
136 | );
137 | path = SwiftUI;
138 | sourceTree = "";
139 | };
140 | 077F00F226E66FC2008C6607 /* shader */ = {
141 | isa = PBXGroup;
142 | children = (
143 | 077F00F326E66FC2008C6607 /* Shaders.metal */,
144 | );
145 | path = shader;
146 | sourceTree = "";
147 | };
148 | OBJ_12 /* PoseNet */ = {
149 | isa = PBXGroup;
150 | children = (
151 | OBJ_13 /* CoreMLPoseNetHandler.swift */,
152 | OBJ_14 /* Joint.swift */,
153 | OBJ_15 /* Pose.swift */,
154 | OBJ_16 /* PoseNetOutput.swift */,
155 | );
156 | path = PoseNet;
157 | sourceTree = "";
158 | };
159 | OBJ_24 /* audio */ = {
160 | isa = PBXGroup;
161 | children = (
162 | OBJ_25 /* AudioBuffer.swift */,
163 | OBJ_26 /* AudioCompositor.swift */,
164 | OBJ_27 /* AudioStreamPlayer.swift */,
165 | );
166 | path = audio;
167 | sourceTree = "";
168 | };
169 | OBJ_28 /* operations */ = {
170 | isa = PBXGroup;
171 | children = (
172 | OBJ_29 /* AlphaBlend.swift */,
173 | OBJ_30 /* Gray.swift */,
174 | OBJ_31 /* ImageCompositor.swift */,
175 | OBJ_32 /* Lookup.swift */,
176 | OBJ_33 /* Kernel.swift */,
177 | OBJ_34 /* Mask.swift */,
178 | OBJ_35 /* MetalKernel.swift */,
179 | OBJ_36 /* Rotation.swift */,
180 | );
181 | path = operations;
182 | sourceTree = "";
183 | };
184 | OBJ_37 /* utils */ = {
185 | isa = PBXGroup;
186 | children = (
187 | OBJ_38 /* Colors.swift */,
188 | OBJ_39 /* Maths.swift */,
189 | OBJ_40 /* extensions.swift */,
190 | );
191 | path = utils;
192 | sourceTree = "";
193 | };
194 | OBJ_41 /* Tests */ = {
195 | isa = PBXGroup;
196 | children = (
197 | OBJ_42 /* MetalCameraTests */,
198 | );
199 | name = Tests;
200 | sourceTree = SOURCE_ROOT;
201 | };
202 | OBJ_42 /* MetalCameraTests */ = {
203 | isa = PBXGroup;
204 | children = (
205 | OBJ_43 /* MetalCameraTests.swift */,
206 | );
207 | name = MetalCameraTests;
208 | path = Tests/MetalCameraTests;
209 | sourceTree = SOURCE_ROOT;
210 | };
211 | OBJ_44 /* Products */ = {
212 | isa = PBXGroup;
213 | children = (
214 | "MetalCamera::MetalCamera::Product" /* MetalCamera.framework */,
215 | "MetalCamera::MetalCameraTests::Product" /* MetalCameraTests.xctest */,
216 | );
217 | name = Products;
218 | sourceTree = BUILT_PRODUCTS_DIR;
219 | };
220 | OBJ_5 = {
221 | isa = PBXGroup;
222 | children = (
223 | OBJ_6 /* Package.swift */,
224 | OBJ_7 /* Sources */,
225 | OBJ_41 /* Tests */,
226 | OBJ_44 /* Products */,
227 | OBJ_48 /* LICENSE */,
228 | OBJ_49 /* README.md */,
229 | );
230 | sourceTree = "";
231 | };
232 | OBJ_7 /* Sources */ = {
233 | isa = PBXGroup;
234 | children = (
235 | OBJ_8 /* MetalCamera */,
236 | );
237 | name = Sources;
238 | sourceTree = SOURCE_ROOT;
239 | };
240 | OBJ_8 /* MetalCamera */ = {
241 | isa = PBXGroup;
242 | children = (
243 | 077F00E826E469CC008C6607 /* SwiftUI */,
244 | OBJ_9 /* CoreML */,
245 | OBJ_24 /* audio */,
246 | OBJ_28 /* operations */,
247 | OBJ_37 /* utils */,
248 | 077F00F226E66FC2008C6607 /* shader */,
249 | OBJ_17 /* MetalCamera.swift */,
250 | OBJ_18 /* MetalRenderingDevice.swift */,
251 | OBJ_19 /* MetalVideoLoader.swift */,
252 | OBJ_20 /* MetalVideoView.swift */,
253 | OBJ_21 /* MetalVideoWriter.swift */,
254 | OBJ_22 /* OperationChain.swift */,
255 | OBJ_23 /* Texture.swift */,
256 | );
257 | name = MetalCamera;
258 | path = Sources/MetalCamera;
259 | sourceTree = SOURCE_ROOT;
260 | };
261 | OBJ_9 /* CoreML */ = {
262 | isa = PBXGroup;
263 | children = (
264 | OBJ_10 /* CoreMLClassifierHandler.swift */,
265 | OBJ_11 /* CoreMLLoader.swift */,
266 | OBJ_12 /* PoseNet */,
267 | );
268 | path = CoreML;
269 | sourceTree = "";
270 | };
271 | /* End PBXGroup section */
272 |
273 | /* Begin PBXNativeTarget section */
274 | "MetalCamera::MetalCamera" /* MetalCamera */ = {
275 | isa = PBXNativeTarget;
276 | buildConfigurationList = OBJ_51 /* Build configuration list for PBXNativeTarget "MetalCamera" */;
277 | buildPhases = (
278 | OBJ_54 /* Sources */,
279 | OBJ_82 /* Frameworks */,
280 | );
281 | buildRules = (
282 | );
283 | dependencies = (
284 | );
285 | name = MetalCamera;
286 | productName = MetalCamera;
287 | productReference = "MetalCamera::MetalCamera::Product" /* MetalCamera.framework */;
288 | productType = "com.apple.product-type.framework";
289 | };
290 | "MetalCamera::MetalCameraTests" /* MetalCameraTests */ = {
291 | isa = PBXNativeTarget;
292 | buildConfigurationList = OBJ_95 /* Build configuration list for PBXNativeTarget "MetalCameraTests" */;
293 | buildPhases = (
294 | OBJ_98 /* Sources */,
295 | OBJ_100 /* Frameworks */,
296 | );
297 | buildRules = (
298 | );
299 | dependencies = (
300 | OBJ_102 /* PBXTargetDependency */,
301 | );
302 | name = MetalCameraTests;
303 | productName = MetalCameraTests;
304 | productReference = "MetalCamera::MetalCameraTests::Product" /* MetalCameraTests.xctest */;
305 | productType = "com.apple.product-type.bundle.unit-test";
306 | };
307 | "MetalCamera::SwiftPMPackageDescription" /* MetalCameraPackageDescription */ = {
308 | isa = PBXNativeTarget;
309 | buildConfigurationList = OBJ_84 /* Build configuration list for PBXNativeTarget "MetalCameraPackageDescription" */;
310 | buildPhases = (
311 | OBJ_87 /* Sources */,
312 | );
313 | buildRules = (
314 | );
315 | dependencies = (
316 | );
317 | name = MetalCameraPackageDescription;
318 | productName = MetalCameraPackageDescription;
319 | productType = "com.apple.product-type.framework";
320 | };
321 | /* End PBXNativeTarget section */
322 |
323 | /* Begin PBXProject section */
324 | OBJ_1 /* Project object */ = {
325 | isa = PBXProject;
326 | attributes = {
327 | LastSwiftMigration = 9999;
328 | LastUpgradeCheck = 9999;
329 | };
330 | buildConfigurationList = OBJ_2 /* Build configuration list for PBXProject "MetalCamera" */;
331 | compatibilityVersion = "Xcode 3.2";
332 | developmentRegion = en;
333 | hasScannedForEncodings = 0;
334 | knownRegions = (
335 | en,
336 | );
337 | mainGroup = OBJ_5;
338 | productRefGroup = OBJ_44 /* Products */;
339 | projectDirPath = "";
340 | projectRoot = "";
341 | targets = (
342 | "MetalCamera::MetalCamera" /* MetalCamera */,
343 | "MetalCamera::SwiftPMPackageDescription" /* MetalCameraPackageDescription */,
344 | "MetalCamera::MetalCameraPackageTests::ProductTarget" /* MetalCameraPackageTests */,
345 | "MetalCamera::MetalCameraTests" /* MetalCameraTests */,
346 | );
347 | };
348 | /* End PBXProject section */
349 |
350 | /* Begin PBXSourcesBuildPhase section */
351 | OBJ_54 /* Sources */ = {
352 | isa = PBXSourcesBuildPhase;
353 | buildActionMask = 0;
354 | files = (
355 | 077F00F426E66FC2008C6607 /* Shaders.metal in Sources */,
356 | OBJ_55 /* CoreMLClassifierHandler.swift in Sources */,
357 | OBJ_56 /* CoreMLLoader.swift in Sources */,
358 | OBJ_57 /* CoreMLPoseNetHandler.swift in Sources */,
359 | OBJ_58 /* Joint.swift in Sources */,
360 | OBJ_59 /* Pose.swift in Sources */,
361 | OBJ_60 /* PoseNetOutput.swift in Sources */,
362 | OBJ_61 /* MetalCamera.swift in Sources */,
363 | OBJ_62 /* MetalRenderingDevice.swift in Sources */,
364 | OBJ_63 /* MetalVideoLoader.swift in Sources */,
365 | 077F00EA26E4744B008C6607 /* VideoPreview.swift in Sources */,
366 | OBJ_64 /* MetalVideoView.swift in Sources */,
367 | OBJ_65 /* MetalVideoWriter.swift in Sources */,
368 | OBJ_66 /* OperationChain.swift in Sources */,
369 | OBJ_67 /* Texture.swift in Sources */,
370 | OBJ_68 /* AudioBuffer.swift in Sources */,
371 | OBJ_69 /* AudioCompositor.swift in Sources */,
372 | OBJ_70 /* AudioStreamPlayer.swift in Sources */,
373 | OBJ_71 /* AlphaBlend.swift in Sources */,
374 | OBJ_72 /* Gray.swift in Sources */,
375 | OBJ_73 /* ImageCompositor.swift in Sources */,
376 | OBJ_74 /* Lookup.swift in Sources */,
377 | OBJ_75 /* Kernel.swift in Sources */,
378 | OBJ_76 /* Mask.swift in Sources */,
379 | OBJ_77 /* MetalKernel.swift in Sources */,
380 | OBJ_78 /* Rotation.swift in Sources */,
381 | OBJ_79 /* Colors.swift in Sources */,
382 | OBJ_80 /* Maths.swift in Sources */,
383 | OBJ_81 /* extensions.swift in Sources */,
384 | );
385 | runOnlyForDeploymentPostprocessing = 0;
386 | };
387 | OBJ_87 /* Sources */ = {
388 | isa = PBXSourcesBuildPhase;
389 | buildActionMask = 0;
390 | files = (
391 | OBJ_88 /* Package.swift in Sources */,
392 | );
393 | runOnlyForDeploymentPostprocessing = 0;
394 | };
395 | OBJ_98 /* Sources */ = {
396 | isa = PBXSourcesBuildPhase;
397 | buildActionMask = 0;
398 | files = (
399 | OBJ_99 /* MetalCameraTests.swift in Sources */,
400 | );
401 | runOnlyForDeploymentPostprocessing = 0;
402 | };
403 | /* End PBXSourcesBuildPhase section */
404 |
405 | /* Begin PBXTargetDependency section */
406 | OBJ_102 /* PBXTargetDependency */ = {
407 | isa = PBXTargetDependency;
408 | target = "MetalCamera::MetalCamera" /* MetalCamera */;
409 | targetProxy = 077F00DC26E45271008C6607 /* PBXContainerItemProxy */;
410 | };
411 | OBJ_93 /* PBXTargetDependency */ = {
412 | isa = PBXTargetDependency;
413 | target = "MetalCamera::MetalCameraTests" /* MetalCameraTests */;
414 | targetProxy = 077F00DB26E45271008C6607 /* PBXContainerItemProxy */;
415 | };
416 | /* End PBXTargetDependency section */
417 |
418 | /* Begin XCBuildConfiguration section */
419 | OBJ_3 /* Debug */ = {
420 | isa = XCBuildConfiguration;
421 | buildSettings = {
422 | CLANG_ENABLE_OBJC_ARC = YES;
423 | COMBINE_HIDPI_IMAGES = YES;
424 | COPY_PHASE_STRIP = NO;
425 | DEBUG_INFORMATION_FORMAT = dwarf;
426 | DYLIB_INSTALL_NAME_BASE = "@rpath";
427 | ENABLE_NS_ASSERTIONS = YES;
428 | GCC_OPTIMIZATION_LEVEL = 0;
429 | GCC_PREPROCESSOR_DEFINITIONS = (
430 | "$(inherited)",
431 | "SWIFT_PACKAGE=1",
432 | "DEBUG=1",
433 | );
434 | MACOSX_DEPLOYMENT_TARGET = 10.10;
435 | ONLY_ACTIVE_ARCH = YES;
436 | OTHER_SWIFT_FLAGS = "$(inherited) -DXcode";
437 | PRODUCT_NAME = "$(TARGET_NAME)";
438 | SDKROOT = macosx;
439 | SUPPORTED_PLATFORMS = "$(AVAILABLE_PLATFORMS)";
440 | SUPPORTS_MACCATALYST = YES;
441 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) SWIFT_PACKAGE DEBUG";
442 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
443 | USE_HEADERMAP = NO;
444 | };
445 | name = Debug;
446 | };
447 | OBJ_4 /* Release */ = {
448 | isa = XCBuildConfiguration;
449 | buildSettings = {
450 | CLANG_ENABLE_OBJC_ARC = YES;
451 | COMBINE_HIDPI_IMAGES = YES;
452 | COPY_PHASE_STRIP = YES;
453 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
454 | DYLIB_INSTALL_NAME_BASE = "@rpath";
455 | GCC_OPTIMIZATION_LEVEL = s;
456 | GCC_PREPROCESSOR_DEFINITIONS = (
457 | "$(inherited)",
458 | "SWIFT_PACKAGE=1",
459 | );
460 | MACOSX_DEPLOYMENT_TARGET = 10.10;
461 | OTHER_SWIFT_FLAGS = "$(inherited) -DXcode";
462 | PRODUCT_NAME = "$(TARGET_NAME)";
463 | SDKROOT = macosx;
464 | SUPPORTED_PLATFORMS = "$(AVAILABLE_PLATFORMS)";
465 | SUPPORTS_MACCATALYST = YES;
466 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited) SWIFT_PACKAGE";
467 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
468 | USE_HEADERMAP = NO;
469 | };
470 | name = Release;
471 | };
472 | OBJ_52 /* Debug */ = {
473 | isa = XCBuildConfiguration;
474 | buildSettings = {
475 | CURRENT_PROJECT_VERSION = 1;
476 | ENABLE_TESTABILITY = YES;
477 | FRAMEWORK_SEARCH_PATHS = (
478 | "$(inherited)",
479 | "$(PLATFORM_DIR)/Developer/Library/Frameworks",
480 | );
481 | HEADER_SEARCH_PATHS = "$(inherited)";
482 | INFOPLIST_FILE = MetalCamera.xcodeproj/MetalCamera_Info.plist;
483 | IPHONEOS_DEPLOYMENT_TARGET = 14.0;
484 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) $(TOOLCHAIN_DIR)/usr/lib/swift/macosx";
485 | OTHER_CFLAGS = "$(inherited)";
486 | OTHER_LDFLAGS = "$(inherited)";
487 | OTHER_SWIFT_FLAGS = "$(inherited)";
488 | PRODUCT_BUNDLE_IDENTIFIER = MetalCamera;
489 | PRODUCT_MODULE_NAME = "$(TARGET_NAME:c99extidentifier)";
490 | PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
491 | SDKROOT = iphoneos;
492 | SKIP_INSTALL = YES;
493 | SUPPORTED_PLATFORMS = "iphonesimulator iphoneos";
494 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited)";
495 | SWIFT_VERSION = 5.0;
496 | TARGET_NAME = MetalCamera;
497 | TVOS_DEPLOYMENT_TARGET = 9.0;
498 | WATCHOS_DEPLOYMENT_TARGET = 2.0;
499 | };
500 | name = Debug;
501 | };
502 | OBJ_53 /* Release */ = {
503 | isa = XCBuildConfiguration;
504 | buildSettings = {
505 | CURRENT_PROJECT_VERSION = 1;
506 | ENABLE_TESTABILITY = YES;
507 | FRAMEWORK_SEARCH_PATHS = (
508 | "$(inherited)",
509 | "$(PLATFORM_DIR)/Developer/Library/Frameworks",
510 | );
511 | HEADER_SEARCH_PATHS = "$(inherited)";
512 | INFOPLIST_FILE = MetalCamera.xcodeproj/MetalCamera_Info.plist;
513 | IPHONEOS_DEPLOYMENT_TARGET = 14.0;
514 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) $(TOOLCHAIN_DIR)/usr/lib/swift/macosx";
515 | OTHER_CFLAGS = "$(inherited)";
516 | OTHER_LDFLAGS = "$(inherited)";
517 | OTHER_SWIFT_FLAGS = "$(inherited)";
518 | PRODUCT_BUNDLE_IDENTIFIER = MetalCamera;
519 | PRODUCT_MODULE_NAME = "$(TARGET_NAME:c99extidentifier)";
520 | PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
521 | SDKROOT = iphoneos;
522 | SKIP_INSTALL = YES;
523 | SUPPORTED_PLATFORMS = "iphonesimulator iphoneos";
524 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited)";
525 | SWIFT_VERSION = 5.0;
526 | TARGET_NAME = MetalCamera;
527 | TVOS_DEPLOYMENT_TARGET = 9.0;
528 | WATCHOS_DEPLOYMENT_TARGET = 2.0;
529 | };
530 | name = Release;
531 | };
532 | OBJ_85 /* Debug */ = {
533 | isa = XCBuildConfiguration;
534 | buildSettings = {
535 | LD = /usr/bin/true;
536 | OTHER_SWIFT_FLAGS = "-swift-version 5 -I $(TOOLCHAIN_DIR)/usr/lib/swift/pm/4_2 -sdk /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.3.sdk -package-description-version 5.3.0";
537 | SDKROOT = iphoneos;
538 | SUPPORTED_PLATFORMS = "iphonesimulator iphoneos";
539 | SWIFT_VERSION = 5.0;
540 | };
541 | name = Debug;
542 | };
543 | OBJ_86 /* Release */ = {
544 | isa = XCBuildConfiguration;
545 | buildSettings = {
546 | LD = /usr/bin/true;
547 | OTHER_SWIFT_FLAGS = "-swift-version 5 -I $(TOOLCHAIN_DIR)/usr/lib/swift/pm/4_2 -sdk /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.3.sdk -package-description-version 5.3.0";
548 | SDKROOT = iphoneos;
549 | SUPPORTED_PLATFORMS = "iphonesimulator iphoneos";
550 | SWIFT_VERSION = 5.0;
551 | };
552 | name = Release;
553 | };
554 | OBJ_91 /* Debug */ = {
555 | isa = XCBuildConfiguration;
556 | buildSettings = {
557 | };
558 | name = Debug;
559 | };
560 | OBJ_92 /* Release */ = {
561 | isa = XCBuildConfiguration;
562 | buildSettings = {
563 | };
564 | name = Release;
565 | };
566 | OBJ_96 /* Debug */ = {
567 | isa = XCBuildConfiguration;
568 | buildSettings = {
569 | CLANG_ENABLE_MODULES = YES;
570 | CURRENT_PROJECT_VERSION = 1;
571 | EMBEDDED_CONTENT_CONTAINS_SWIFT = YES;
572 | FRAMEWORK_SEARCH_PATHS = (
573 | "$(inherited)",
574 | "$(PLATFORM_DIR)/Developer/Library/Frameworks",
575 | );
576 | HEADER_SEARCH_PATHS = "$(inherited)";
577 | INFOPLIST_FILE = MetalCamera.xcodeproj/MetalCameraTests_Info.plist;
578 | IPHONEOS_DEPLOYMENT_TARGET = 14.0;
579 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @loader_path/../Frameworks @loader_path/Frameworks";
580 | MACOSX_DEPLOYMENT_TARGET = 10.15;
581 | OTHER_CFLAGS = "$(inherited)";
582 | OTHER_LDFLAGS = "$(inherited)";
583 | OTHER_SWIFT_FLAGS = "$(inherited)";
584 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited)";
585 | SWIFT_VERSION = 5.0;
586 | TARGET_NAME = MetalCameraTests;
587 | TVOS_DEPLOYMENT_TARGET = 9.0;
588 | WATCHOS_DEPLOYMENT_TARGET = 7.0;
589 | };
590 | name = Debug;
591 | };
592 | OBJ_97 /* Release */ = {
593 | isa = XCBuildConfiguration;
594 | buildSettings = {
595 | CLANG_ENABLE_MODULES = YES;
596 | CURRENT_PROJECT_VERSION = 1;
597 | EMBEDDED_CONTENT_CONTAINS_SWIFT = YES;
598 | FRAMEWORK_SEARCH_PATHS = (
599 | "$(inherited)",
600 | "$(PLATFORM_DIR)/Developer/Library/Frameworks",
601 | );
602 | HEADER_SEARCH_PATHS = "$(inherited)";
603 | INFOPLIST_FILE = MetalCamera.xcodeproj/MetalCameraTests_Info.plist;
604 | IPHONEOS_DEPLOYMENT_TARGET = 14.0;
605 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @loader_path/../Frameworks @loader_path/Frameworks";
606 | MACOSX_DEPLOYMENT_TARGET = 10.15;
607 | OTHER_CFLAGS = "$(inherited)";
608 | OTHER_LDFLAGS = "$(inherited)";
609 | OTHER_SWIFT_FLAGS = "$(inherited)";
610 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "$(inherited)";
611 | SWIFT_VERSION = 5.0;
612 | TARGET_NAME = MetalCameraTests;
613 | TVOS_DEPLOYMENT_TARGET = 9.0;
614 | WATCHOS_DEPLOYMENT_TARGET = 7.0;
615 | };
616 | name = Release;
617 | };
618 | /* End XCBuildConfiguration section */
619 |
620 | /* Begin XCConfigurationList section */
621 | OBJ_2 /* Build configuration list for PBXProject "MetalCamera" */ = {
622 | isa = XCConfigurationList;
623 | buildConfigurations = (
624 | OBJ_3 /* Debug */,
625 | OBJ_4 /* Release */,
626 | );
627 | defaultConfigurationIsVisible = 0;
628 | defaultConfigurationName = Release;
629 | };
630 | OBJ_51 /* Build configuration list for PBXNativeTarget "MetalCamera" */ = {
631 | isa = XCConfigurationList;
632 | buildConfigurations = (
633 | OBJ_52 /* Debug */,
634 | OBJ_53 /* Release */,
635 | );
636 | defaultConfigurationIsVisible = 0;
637 | defaultConfigurationName = Release;
638 | };
639 | OBJ_84 /* Build configuration list for PBXNativeTarget "MetalCameraPackageDescription" */ = {
640 | isa = XCConfigurationList;
641 | buildConfigurations = (
642 | OBJ_85 /* Debug */,
643 | OBJ_86 /* Release */,
644 | );
645 | defaultConfigurationIsVisible = 0;
646 | defaultConfigurationName = Release;
647 | };
648 | OBJ_90 /* Build configuration list for PBXAggregateTarget "MetalCameraPackageTests" */ = {
649 | isa = XCConfigurationList;
650 | buildConfigurations = (
651 | OBJ_91 /* Debug */,
652 | OBJ_92 /* Release */,
653 | );
654 | defaultConfigurationIsVisible = 0;
655 | defaultConfigurationName = Release;
656 | };
657 | OBJ_95 /* Build configuration list for PBXNativeTarget "MetalCameraTests" */ = {
658 | isa = XCConfigurationList;
659 | buildConfigurations = (
660 | OBJ_96 /* Debug */,
661 | OBJ_97 /* Release */,
662 | );
663 | defaultConfigurationIsVisible = 0;
664 | defaultConfigurationName = Release;
665 | };
666 | /* End XCConfigurationList section */
667 | };
668 | rootObject = OBJ_1 /* Project object */;
669 | }
670 |
--------------------------------------------------------------------------------
/MetalCamera.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
--------------------------------------------------------------------------------
/MetalCamera.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/MetalCamera.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEWorkspaceSharedSettings_AutocreateContextsIfNeeded
6 |
7 |
8 |
--------------------------------------------------------------------------------
/MetalCamera.xcodeproj/xcshareddata/xcschemes/MetalCamera-Package.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
33 |
39 |
40 |
41 |
42 |
43 |
53 |
54 |
60 |
61 |
63 |
64 |
67 |
68 |
69 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:5.3
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "MetalCamera",
8 | platforms: [
9 | .iOS(.v15)
10 | ],
11 | products: [
12 | // Products define the executables and libraries a package produces, and make them visible to other packages.
13 | .library(
14 | name: "MetalCamera",
15 | targets: ["MetalCamera"]),
16 | ],
17 | dependencies: [
18 | // Dependencies declare other packages that this package depends on.
19 | // .package(url: /* package url */, from: "1.0.0"),
20 | ],
21 | targets: [
22 | // Targets are the basic building blocks of a package. A target can define a module or a test suite.
23 | // Targets can depend on other targets in this package, and on products in packages this package depends on.
24 | .target(
25 | name: "MetalCamera",
26 | dependencies: []),
27 | .testTarget(
28 | name: "MetalCameraTests",
29 | dependencies: ["MetalCamera"]),
30 | ]
31 | )
32 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # MetalCamera
2 |
3 |
6 |
7 | ## Motivation
8 | MetalCamera is an open source project for performing GPU-accelerated image and video processing on Mac and iOS.
9 |
10 | There are many ways to use the GPU, including CIFilter, but it's not open or difficult to expand feature and contribute.
11 |
12 | The main goal of this repository is to provide an interface and test performance to develop and apply it to actual services more easily when you have an idea about image processing and machine learning in the iOS environment.
13 |
14 | At this stage, I'm developing to provide the following functions simply.
15 | * SwiftUI support
16 | * Camera input/output Handling
17 | * Save image frame to video
18 | * Basic image processing and filter
19 | * Download and processing CoreML model
20 | * Visualize result of CoreML model
21 | * Benchmark algorithm.
22 |
23 |
24 | There are still a lot of bugs and many things to implement,
25 | but I created a repository because I wanted to develop camera and vision feature in iOS with many people.
26 |
27 | Feel free to use, make some issue and PR when you have a idea.
28 |
29 | Thanks.
30 |
31 | ## Example
32 |
33 | To run the example project, clone the repo, and open Example.xcodeproj from the Example directory first.
34 |
35 | ### Camera
36 | * SwiftUI case
37 | ```swift
38 | import SwiftUI
39 | import MetalCamera
40 |
41 | struct CameraSampleView: View {
42 | let camera = try! MetalCamera(videoOrientation: .portrait, isVideoMirrored: true)
43 | var body: some View {
44 | VideoPreview(operation: camera)
45 | .onAppear {
46 | camera.startCapture()
47 | }
48 | .onDisappear {
49 | camera.stopCapture()
50 | }
51 | }
52 | }
53 | ```
54 |
55 | * UIKit case
56 | ```swift
57 | import MetalCamera
58 | @IBOutlet weak var preview: MetalVideoView!
59 | var camera: MetalCamera!
60 |
61 | override func viewDidLoad() {
62 | super.viewDidLoad()
63 | guard let camera = try? MetalCamera(useMic: useMic) else { return }
64 | camera-->preview
65 | self.camera = camera
66 | }
67 |
68 | override func viewWillAppear(_ animated: Bool) {
69 | super.viewWillAppear(animated)
70 | camera?.startCapture()
71 | }
72 |
73 | override func viewDidDisappear(_ animated: Bool) {
74 | super.viewDidDisappear(animated)
75 | camera?.stopCapture()
76 | }
77 | ```
78 |
79 | ### Download and load CoreML from web url
80 | ```swift
81 |
82 | import MetalCamera
83 |
84 | let url = URL(string: "https://ml-assets.apple.com/coreml/models/Image/ImageSegmentation/DeepLabV3/DeepLabV3Int8LUT.mlmodel")!
85 |
86 | do {
87 | coreMLLoader = try CoreMLLoader(url: url, isForcedDownload: true)
88 | coreMLLoader?.load({ (progress) in
89 | debugPrint("Model downloading.... \(progress)")
90 | }, { (loadedModel, error) in
91 | if let loadedModel = loadedModel {
92 | debugPrint(loadedModel)
93 | } else if let error = error {
94 | debugPrint(error)
95 | }
96 | })
97 | } catch {
98 | debugPrint(error)
99 | }
100 | ```
101 |
102 | ### Segmentation Test(DeepLabV3Int8LUT model, iPhone XS, avg 63ms)
103 | 
104 |
105 | ```swift
106 | func loadCoreML() {
107 | do {
108 | let modelURL = URL(string: "https://ml-assets.apple.com/coreml/models/Image/ImageSegmentation/DeepLabV3/DeepLabV3Int8LUT.mlmodel")!
109 | let loader = try CoreMLLoader(url: modelURL)
110 | loader.load { [weak self](model, error) in
111 | if let model = model {
112 | self?.setupModelHandler(model)
113 | } else if let error = error {
114 | debugPrint(error)
115 | }
116 | }
117 | } catch {
118 | debugPrint(error)
119 | }
120 | }
121 |
122 | func setupModelHandler(_ model: MLModel) {
123 | do {
124 | let modelHandler = try CoreMLClassifierHandler(model)
125 | camera.removeTarget(preview)
126 | camera-->modelHandler-->preview
127 | } catch{
128 | debugPrint(error)
129 | }
130 | }
131 | ```
132 |
133 | ### Composite images or video and Rotation
134 | 
135 |
136 | ```swift
137 | let rotation90 = RotationOperation(.degree90_flip)
138 |
139 | let imageCompositor = ImageCompositor(baseTextureKey: camera.textureKey)
140 | guard let testImage = UIImage(named: "sampleImage") else {
141 | fatalError("Check image resource")
142 | }
143 |
144 | let gray = Gray()
145 |
146 | let compositeFrame = CGRect(x: 50, y: 100, width: 250, height: 250)
147 | imageCompositor.addCompositeImage(testImage)
148 | imageCompositor.sourceFrame = compositeFrame
149 |
150 | videoCompositor = ImageCompositor(baseTextureKey: camera.textureKey)
151 | videoCompositor.sourceFrame = CGRect(x: 320, y: 100, width: 450, height: 250)
152 |
153 | camera-->rotation90-->gray-->imageCompositor-->videoCompositor-->preview
154 |
155 | ```
156 |
157 | ### Filter
158 |
159 | * Lookup Filter
160 |
161 | 
162 |
163 | ### Recording video and audio
164 | ```swift
165 | do {
166 | if FileManager.default.fileExists(atPath: recordingURL.path) {
167 | try FileManager.default.removeItem(at: recordingURL)
168 | }
169 |
170 | recorder = try MetalVideoWriter(url: recordingURL, videoSize: CGSize(width: 720, height: 1280), recordAudio: useMic)
171 | if let recorder = recorder {
172 | preview-->recorder
173 | if useMic {
174 | camera==>recorder
175 | }
176 |
177 | recorder.startRecording()
178 |
179 | }
180 | } catch {
181 | debugPrint(error)
182 | }
183 | ```
184 |
185 | ## Requirements
186 | * Swift 5
187 | * Xcode 12.5.1 or higher on Mac
188 | * iOS: 14.0 or higher
189 |
190 | ## Installation
191 |
192 | The [Swift Package Manager](https://swift.org/package-manager/) is a tool for automating the distribution of Swift code and is integrated into the `swift` compiler. It is in early development, but Alamofire does support its use on supported platforms.
193 |
194 | Once you have your Swift package set up, adding Alamofire as a dependency is as easy as adding it to the `dependencies` value of your `Package.swift`.
195 |
196 | ```swift
197 | dependencies: [
198 | .package(url: "https://github.com/jsharp83/MetalCamera.git", .upToNextMinor(from: "0.2.0"))
199 | ]
200 | ```
201 |
202 | ## References
203 |
204 | When creating this repository, I referenced the following repositories a lot.
205 | First of all, thanks to those who have worked and opened many parts in advance, and let me know if there are any problems.
206 |
207 | * [GPUImage3](https://github.com/BradLarson/GPUImage3)
208 | * [MaLiang](https://github.com/Harley-xk/MaLiang)
209 | * [CoreMLHelpers](https://github.com/hollance/CoreMLHelpers)
210 | * [MetalPetal](https://github.com/MetalPetal/MetalPetal)
211 |
212 | ## Author
213 |
214 | jsharp83, jsharp83@gmail.com
215 |
216 | ## License
217 |
218 | MetalCamera is available under the MIT license. See the LICENSE file for more info.
219 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/CoreML/CoreMLClassifierHandler.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CoreMLHandler.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/12.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 | import CoreML
11 | import Vision
12 |
13 | public enum ClassificationResultType {
14 | case mask
15 | case alphaBlend
16 | }
17 |
18 | public class CoreMLClassifierHandler: CMSampleChain {
19 | public var targets = TargetContainer()
20 | let visionModel: VNCoreMLModel
21 | let imageCropAndScaleOption: VNImageCropAndScaleOption
22 | var request: VNCoreMLRequest?
23 | let dropFrame: Bool
24 | var isProcessing: Bool = false
25 | var currentTime = CMTime.zero
26 | var startTime: CFAbsoluteTime = 0
27 | var frameTexture: Texture?
28 |
29 | let resultOperation: TwoTextureOperationChain
30 |
31 | // TODO: I need to make benchmark module.
32 | public var runBenchmark = true
33 |
34 | var colorBuffer: MTLBuffer?
35 | private var pipelineState: MTLRenderPipelineState!
36 | private var render_target_vertex: MTLBuffer!
37 | private var render_target_uniform: MTLBuffer!
38 |
39 | public init(_ model: MLModel, imageCropAndScaleOption: VNImageCropAndScaleOption = .centerCrop, dropFrame: Bool = true, maxClasses: Int = 255, resultType: ClassificationResultType = .alphaBlend) throws {
40 | self.visionModel = try VNCoreMLModel(for: model)
41 | self.imageCropAndScaleOption = imageCropAndScaleOption
42 | self.dropFrame = dropFrame
43 |
44 | resultOperation = resultType == .mask ? Mask() : AlphaBlend()
45 |
46 | if maxClasses > randomColors.count {
47 | randomColors = generateRandomColors(maxClasses)
48 | }
49 | }
50 |
51 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm, width: Int, height: Int) {
52 | do {
53 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("vertex_render_target", "segmentation_render_target", colorPixelFormat)
54 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
55 |
56 | render_target_vertex = sharedMetalRenderingDevice.makeRenderVertexBuffer(size: CGSize(width: width, height: height))
57 | render_target_uniform = sharedMetalRenderingDevice.makeRenderUniformBuffer(CGSize(width: width, height: height))
58 | } catch {
59 | debugPrint(error)
60 | }
61 | }
62 |
63 | public func newBufferAvailable(_ sampleBuffer: CMSampleBuffer) {
64 | if dropFrame, isProcessing {
65 | if runBenchmark {
66 | debugPrint("Drop the frame....")
67 | }
68 |
69 | return
70 | }
71 |
72 | isProcessing = true
73 |
74 | currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
75 | startTime = CFAbsoluteTimeGetCurrent()
76 |
77 | guard let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
78 |
79 | // FIXME: Refactoring is needed. I don't know it is needed to keep the reqeust.
80 | let request = self.request != nil ? self.request! : createRequest()
81 | self.request = request
82 |
83 | let handler = VNImageRequestHandler(cvPixelBuffer: cameraFrame, options: [:])
84 | DispatchQueue.global().async {
85 | try? handler.perform([request])
86 | }
87 | }
88 |
89 | func generateTexture(_ segmentationMap: MLMultiArray, _ row: Int, _ col: Int, _ targetClass: Int) -> Texture? {
90 | if pipelineState == nil {
91 | setupPiplineState(width: col, height: row)
92 | }
93 |
94 | let outputTexture = Texture(col, row, timestamp: currentTime, textureKey: "segmentation")
95 |
96 | let renderPassDescriptor = MTLRenderPassDescriptor()
97 | let attachment = renderPassDescriptor.colorAttachments[0]
98 | attachment?.clearColor = MTLClearColorMake(1, 0, 0, 1)
99 | attachment?.texture = outputTexture.texture
100 | attachment?.loadAction = .clear
101 | attachment?.storeAction = .store
102 |
103 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
104 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
105 |
106 | commandEncoder?.setRenderPipelineState(pipelineState)
107 |
108 | commandEncoder?.setVertexBuffer(render_target_vertex, offset: 0, index: 0)
109 | commandEncoder?.setVertexBuffer(render_target_uniform, offset: 0, index: 1)
110 |
111 | let segmentationBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: segmentationMap.dataPointer,
112 | length: segmentationMap.count * MemoryLayout.size,
113 | options: [])!
114 | commandEncoder?.setFragmentBuffer(segmentationBuffer, offset: 0, index: 0)
115 |
116 | let uniformBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: [Int32(targetClass), Int32(col), Int32(row)] as [Int32],
117 | length: 3 * MemoryLayout.size,
118 | options: [])!
119 | commandEncoder?.setFragmentBuffer(uniformBuffer, offset: 0, index: 1)
120 |
121 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
122 | commandEncoder?.endEncoding()
123 | commandBuffer?.commit()
124 |
125 | return outputTexture
126 | }
127 |
128 | func visionRequestDidComplete(request: VNRequest, error: Error?) {
129 | let inferenceTime = CFAbsoluteTimeGetCurrent() - startTime
130 |
131 | if let observations = request.results as? [VNCoreMLFeatureValueObservation],
132 | let segmenationMap = observations.first?.featureValue.multiArrayValue {
133 |
134 | if runBenchmark {
135 | debugPrint("Request Complete")
136 | }
137 |
138 | guard let row = segmenationMap.shape[0] as? Int,
139 | let col = segmenationMap.shape[1] as? Int else {
140 | return
141 | }
142 |
143 | guard let frameTexture = frameTexture else { return }
144 |
145 | let targetClass = 15 // Human
146 |
147 | guard let outputTexture = generateTexture(segmenationMap, row, col, targetClass) else { return }
148 |
149 | resultOperation.newTextureAvailable(frameTexture, outputTexture) { [weak self](texture) in
150 | self?.operationFinished(texture)
151 | }
152 | }
153 |
154 | let totalTime = CFAbsoluteTimeGetCurrent() - startTime
155 | if runBenchmark {
156 | debugPrint("Current inferenceTime: \(1000.0 * inferenceTime)ms, totalTime: \(1000.0 * totalTime)ms")
157 | }
158 |
159 | self.isProcessing = false
160 | }
161 |
162 | func createRequest() -> VNCoreMLRequest {
163 | let request = VNCoreMLRequest(model: visionModel, completionHandler: visionRequestDidComplete(request:error:))
164 | request.imageCropAndScaleOption = imageCropAndScaleOption
165 | return request
166 | }
167 |
168 | public func newTextureAvailable(_ texture: Texture) {
169 | if currentTime == texture.timestamp {
170 | frameTexture = texture
171 | }
172 | }
173 | }
174 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/CoreML/CoreMLLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CoreMLLoader.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/10.
6 | //
7 |
8 | import Foundation
9 | import CoreML
10 |
11 | enum CoreMLLoaderError: Error {
12 | case invalidFileName
13 | case compileFailed
14 | case loadFailed
15 | case removeExistFileFailed
16 | }
17 |
18 | public class CoreMLLoader {
19 | private let url: URL
20 | private let filePath: String
21 | private let isForcedDownload: Bool
22 | private var fileURL: URL {
23 | let documentsDir = try? FileManager.default.url(for:. documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
24 | let fileURL = URL(string: filePath, relativeTo: documentsDir)!
25 | return fileURL
26 | }
27 | private var progressObservation: NSKeyValueObservation?
28 |
29 | public init(url: URL, filePath: String? = nil, isForcedDownload: Bool = false) throws {
30 | self.url = url
31 | self.isForcedDownload = isForcedDownload
32 |
33 | let lastCompoent = url.lastPathComponent
34 | guard lastCompoent.hasSuffix(".mlmodel") else {
35 | throw CoreMLLoaderError.invalidFileName
36 | }
37 |
38 | if let filePath = filePath {
39 | self.filePath = filePath
40 | } else {
41 | self.filePath = "CoreMLModel/\(lastCompoent)"
42 | }
43 | }
44 |
45 | // TODO: Cancel and handling background process are needed.
46 | public func load(_ progressHandler: ((Double) -> Void)? = nil,
47 | _ completionHandler: @escaping ((MLModel?, Error?) -> Void)) {
48 | if isForcedDownload {
49 | do {
50 | try FileManager.default.removeItem(atPath: fileURL.path)
51 | } catch {
52 | completionHandler(nil, CoreMLLoaderError.removeExistFileFailed)
53 | return
54 | }
55 | }
56 |
57 | if FileManager.default.fileExists(atPath: fileURL.path) {
58 | loadCoreML(completionHandler)
59 | } else {
60 | prepareDownloadFolder()
61 |
62 | let task = URLSession.shared.downloadTask(with: url) { (url, response, error) in
63 | if let path = url?.path {
64 | try! FileManager.default.moveItem(atPath: path, toPath: self.fileURL.path)
65 | }
66 | self.progressObservation?.invalidate()
67 | self.progressObservation = nil
68 |
69 | self.loadCoreML(completionHandler)
70 | }
71 |
72 | progressObservation = task.progress.observe(\.fractionCompleted) { (progress, value) in
73 | progressHandler?(progress.fractionCompleted)
74 | }
75 |
76 | task.resume()
77 | }
78 | }
79 |
80 | private func loadCoreML(_ completionHandler: ((MLModel?, Error?) -> Void)) {
81 | guard let compiledModelURL = try? MLModel.compileModel(at: fileURL) else {
82 | completionHandler(nil, CoreMLLoaderError.compileFailed)
83 | return
84 | }
85 |
86 | if let model = try? MLModel(contentsOf: compiledModelURL) {
87 | completionHandler(model, nil)
88 | } else {
89 | completionHandler(nil, CoreMLLoaderError.loadFailed)
90 | }
91 | }
92 |
93 | private func prepareDownloadFolder() {
94 | let directoryURL = fileURL.deletingLastPathComponent()
95 |
96 | if !FileManager.default.fileExists(atPath: directoryURL.path) {
97 | try? FileManager.default.createDirectory(at: directoryURL, withIntermediateDirectories: true, attributes: nil)
98 | }
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/CoreML/PoseNet/CoreMLPoseNetHandler.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CoreMLPoseNetHandler.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/29.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 | import CoreML
11 | import Vision
12 |
13 | public class CoreMLPoseNetHandler: CMSampleChain {
14 | public var targets = TargetContainer()
15 | let visionModel: VNCoreMLModel
16 | let imageCropAndScaleOption: VNImageCropAndScaleOption
17 | var request: VNCoreMLRequest?
18 | let dropFrame: Bool
19 | var isProcessing: Bool = false
20 | var currentTime = CMTime.zero
21 | var startTime: CFAbsoluteTime = 0
22 | var frameTexture: Texture?
23 |
24 | // TODO: I need to make benchmark module.
25 | public var runBenchmark = true
26 |
27 | var colorBuffer: MTLBuffer?
28 | private var pipelineState: MTLRenderPipelineState!
29 | private var render_target_vertex: MTLBuffer!
30 | private var render_target_uniform: MTLBuffer!
31 |
32 | private var computePipelineState: MTLComputePipelineState!
33 |
34 | private var mBufferA: MTLBuffer!
35 | private var mBufferB: MTLBuffer!
36 | private var mBufferResult: MTLBuffer!
37 |
38 | public init(_ model: MLModel, imageCropAndScaleOption: VNImageCropAndScaleOption = .scaleFill, dropFrame: Bool = true) throws {
39 | self.visionModel = try VNCoreMLModel(for: model)
40 | self.imageCropAndScaleOption = imageCropAndScaleOption
41 | self.dropFrame = dropFrame
42 | setupPiplineState(width: 512, height: 512)
43 | }
44 |
45 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm, width: Int, height: Int) {
46 | do {
47 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("vertex_render_target", "fragment_render_target", colorPixelFormat)
48 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
49 |
50 | render_target_vertex = sharedMetalRenderingDevice.makeRenderVertexBuffer(size: CGSize(width: width, height: height))
51 | render_target_uniform = sharedMetalRenderingDevice.makeRenderUniformBuffer(CGSize(width: width, height: height))
52 |
53 | computePipelineState = try sharedMetalRenderingDevice.makeComputePipelineState("add_arrays")
54 |
55 | let mA: [Float32] = [0,1,2,3,4]
56 | let mB: [Float32] = [10,11,12,13,14]
57 |
58 | mBufferA = sharedMetalRenderingDevice.device.makeBuffer(bytes: mA , length: 5 * MemoryLayout.size, options: .storageModeShared)
59 | mBufferB = sharedMetalRenderingDevice.device.makeBuffer(bytes: mB, length: 5 * MemoryLayout.size, options: .storageModeShared)
60 | mBufferResult = sharedMetalRenderingDevice.device.makeBuffer(length: 5 * MemoryLayout.size, options: .storageModeShared)
61 | } catch {
62 | debugPrint(error)
63 | }
64 | }
65 |
66 | public func newBufferAvailable(_ sampleBuffer: CMSampleBuffer) {
67 | if dropFrame, isProcessing {
68 | if runBenchmark {
69 | debugPrint("Drop the frame....")
70 | }
71 |
72 | return
73 | }
74 |
75 | isProcessing = true
76 |
77 | currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
78 | startTime = CFAbsoluteTimeGetCurrent()
79 |
80 | guard let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
81 |
82 | // FIXME: Refactoring is needed. I don't know it is needed to keep the reqeust.
83 | let request = self.request != nil ? self.request! : createRequest()
84 | self.request = request
85 |
86 | let handler = VNImageRequestHandler(cvPixelBuffer: cameraFrame, options: [:])
87 | DispatchQueue.global().async {
88 | try? handler.perform([request])
89 | }
90 | }
91 |
92 | func generateTexture(_ posenet: PoseNetOutput) -> Texture? {
93 | guard let frameTexture = frameTexture else { return nil }
94 | if pipelineState == nil {
95 | setupPiplineState(width: frameTexture.texture.width, height: frameTexture.texture.height)
96 | }
97 |
98 | let outputTexture = Texture(frameTexture.texture.width, frameTexture.texture.height, timestamp: currentTime, textureKey: "posenet")
99 |
100 | let renderPassDescriptor = MTLRenderPassDescriptor()
101 | let attachment = renderPassDescriptor.colorAttachments[0]
102 | attachment?.clearColor = MTLClearColorMake(1, 0, 0, 1)
103 | attachment?.texture = outputTexture.texture
104 | attachment?.loadAction = .clear
105 | attachment?.storeAction = .store
106 |
107 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
108 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
109 |
110 | commandEncoder?.setRenderPipelineState(pipelineState)
111 |
112 | commandEncoder?.setVertexBuffer(render_target_vertex, offset: 0, index: 0)
113 | commandEncoder?.setVertexBuffer(render_target_uniform, offset: 0, index: 1)
114 |
115 | commandEncoder?.setFragmentTexture(frameTexture.texture, index: 0)
116 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
117 |
118 | commandEncoder?.endEncoding()
119 | commandBuffer?.commit()
120 |
121 |
122 |
123 | // let posenetBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: posenet.heatmap.dataPointer,
124 | // length: posenet.heatmap.count * MemoryLayout.size,
125 | // options: [])!
126 | // commandEncoder?.setFragmentBuffer(posenetBuffer, offset: 0, index: 0)
127 | //
128 | // let shape = posenet.heatmap.shape
129 | //
130 | // let uniformBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: [Int32(shape[0]), Int32(shape[1]), Int32(shape[2])] as [Int32],
131 | // length: 3 * MemoryLayout.size,
132 | // options: [])!
133 | // commandEncoder?.setFragmentBuffer(uniformBuffer, offset: 0, index: 1)
134 | // commandEncoder?.setFragmentTexture(frameTexture.texture, index: 0)
135 | //
136 | // commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
137 | // commandEncoder?.endEncoding()
138 | // commandBuffer?.commit()
139 | //
140 | let startTime2 = CFAbsoluteTimeGetCurrent()
141 | let pose = Pose()
142 |
143 | for name in Joint.Name.allCases {
144 | let joint = pose.joints[name]!
145 |
146 | var bestCell = PoseNetOutput.Cell(0, 0)
147 | var bestConfidence: Float = 0.0
148 | for yIndex in 0.. bestConfidence {
155 | bestConfidence = currentConfidence
156 | bestCell = currentCell
157 | }
158 | }
159 | }
160 | // print("\(bestCell), \(bestConfidence)")
161 | }
162 |
163 | let totalTime = CFAbsoluteTimeGetCurrent() - startTime2
164 | debugPrint("Current totalTime: \(1000.0 * totalTime)ms")
165 |
166 | return outputTexture
167 | }
168 |
169 | func visionRequestDidComplete(request: VNRequest, error: Error?) {
170 | let inferenceTime = CFAbsoluteTimeGetCurrent() - startTime
171 |
172 | if let observations = request.results as? [VNCoreMLFeatureValueObservation] {
173 | let output = PoseNetOutput(observations)
174 |
175 | handleOutput(output)
176 |
177 |
178 |
179 | // guard let outputTexture = generateTexture(output) else { return }
180 |
181 | // operationFinished(outputTexture)
182 |
183 | // let totalTime = CFAbsoluteTimeGetCurrent() - startTime
184 |
185 | // if runBenchmark {
186 | // debugPrint("Current inferenceTime: \(1000.0 * inferenceTime)ms, totalTime: \(1000.0 * totalTime)ms")
187 | // }
188 |
189 | self.isProcessing = false
190 | }
191 | }
192 |
193 | func handleOutput(_ output: PoseNetOutput) {
194 |
195 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
196 | guard let commandEncoder = commandBuffer?.makeComputeCommandEncoder(dispatchType: .serial) else { return }
197 | commandEncoder.setComputePipelineState(computePipelineState)
198 | commandEncoder.setBuffer(mBufferA, offset: 0, index: 0)
199 | commandEncoder.setBuffer(mBufferB, offset: 0, index: 1)
200 | commandEncoder.setBuffer(mBufferResult, offset: 0, index: 2)
201 |
202 | let gridSize = MTLSizeMake(5, 1, 1)
203 | let w = computePipelineState.threadExecutionWidth
204 | let h = computePipelineState.maxTotalThreadsPerThreadgroup / w
205 | let threadgroupSize = MTLSizeMake(w, h, 1)
206 | commandEncoder.dispatchThreads(gridSize, threadsPerThreadgroup: threadgroupSize)
207 |
208 | commandEncoder.endEncoding()
209 | commandBuffer?.commit()
210 |
211 | let rawPointer = mBufferResult.contents()
212 | let typePointer = rawPointer.bindMemory(to: Float32.self, capacity: 5)
213 | let bufferPointer = UnsafeBufferPointer(start: typePointer, count: 5)
214 | for item in bufferPointer {
215 | print(item)
216 | }
217 |
218 |
219 | let startTime2 = CFAbsoluteTimeGetCurrent()
220 | let pose = Pose()
221 |
222 | for name in Joint.Name.allCases {
223 | let joint = pose.joints[name]!
224 |
225 | var bestCell = PoseNetOutput.Cell(0, 0)
226 | var bestConfidence: Float = 0.0
227 | for yIndex in 0.. bestConfidence {
234 | bestConfidence = currentConfidence
235 | bestCell = currentCell
236 | }
237 | }
238 | }
239 | print("\(bestCell), \(bestConfidence)")
240 | }
241 |
242 | let totalTime = CFAbsoluteTimeGetCurrent() - startTime2
243 | debugPrint("Current totalTime: \(1000.0 * totalTime)ms")
244 | }
245 |
246 | func createRequest() -> VNCoreMLRequest {
247 | let request = VNCoreMLRequest(model: visionModel, completionHandler: visionRequestDidComplete(request:error:))
248 | request.imageCropAndScaleOption = imageCropAndScaleOption
249 | return request
250 | }
251 |
252 | public func newTextureAvailable(_ texture: Texture) {
253 | if currentTime == texture.timestamp {
254 | frameTexture = texture
255 | }
256 |
257 | operationFinished(texture)
258 | }
259 | }
260 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/CoreML/PoseNet/Joint.swift:
--------------------------------------------------------------------------------
1 |
2 | import UIKit
3 |
4 | class Joint {
5 | enum Name: Int, CaseIterable {
6 | case nose
7 | case leftEye
8 | case rightEye
9 | case leftEar
10 | case rightEar
11 | case leftShoulder
12 | case rightShoulder
13 | case leftElbow
14 | case rightElbow
15 | case leftWrist
16 | case rightWrist
17 | case leftHip
18 | case rightHip
19 | case leftKnee
20 | case rightKnee
21 | case leftAnkle
22 | case rightAnkle
23 | }
24 |
25 | /// The total number of joints available.
26 | static var numberOfJoints: Int {
27 | return Name.allCases.count
28 | }
29 |
30 | /// The name used to identify the joint.
31 | let name: Name
32 |
33 | /// The position of the joint relative to the image.
34 | ///
35 | /// The position is initially relative to the model's input image size and then mapped to the original image
36 | /// size after constructing the associated pose.
37 | var position: CGPoint
38 |
39 | /// The confidence score associated with this joint.
40 | ///
41 | /// The joint confidence is obtained from the `heatmap` array output by the PoseNet model.
42 | var confidence: Double
43 |
44 | /// A boolean value that indicates if the joint satisfies the joint threshold defined in the configuration.
45 | var isValid: Bool
46 |
47 | init(name: Name,
48 | position: CGPoint = .zero,
49 | confidence: Double = 0,
50 | isValid: Bool = false) {
51 | self.name = name
52 | self.position = position
53 | self.confidence = confidence
54 | self.isValid = isValid
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/CoreML/PoseNet/Pose.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Pose.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/08/14.
6 | //
7 |
8 | import Foundation
9 |
10 | struct Pose {
11 |
12 | /// A structure used to describe a parent-child relationship between two joints.
13 | struct Edge {
14 | let index: Int
15 | let parent: Joint.Name
16 | let child: Joint.Name
17 |
18 | init(from parent: Joint.Name, to child: Joint.Name, index: Int) {
19 | self.index = index
20 | self.parent = parent
21 | self.child = child
22 | }
23 | }
24 |
25 | /// An array of edges used to define the connections between the joints.
26 | ///
27 | /// The index relates to the index used to access the associated value within the displacement maps
28 | /// output by the PoseNet model.
29 | static let edges = [
30 | Edge(from: .nose, to: .leftEye, index: 0),
31 | Edge(from: .leftEye, to: .leftEar, index: 1),
32 | Edge(from: .nose, to: .rightEye, index: 2),
33 | Edge(from: .rightEye, to: .rightEar, index: 3),
34 | Edge(from: .nose, to: .leftShoulder, index: 4),
35 | Edge(from: .leftShoulder, to: .leftElbow, index: 5),
36 | Edge(from: .leftElbow, to: .leftWrist, index: 6),
37 | Edge(from: .leftShoulder, to: .leftHip, index: 7),
38 | Edge(from: .leftHip, to: .leftKnee, index: 8),
39 | Edge(from: .leftKnee, to: .leftAnkle, index: 9),
40 | Edge(from: .nose, to: .rightShoulder, index: 10),
41 | Edge(from: .rightShoulder, to: .rightElbow, index: 11),
42 | Edge(from: .rightElbow, to: .rightWrist, index: 12),
43 | Edge(from: .rightShoulder, to: .rightHip, index: 13),
44 | Edge(from: .rightHip, to: .rightKnee, index: 14),
45 | Edge(from: .rightKnee, to: .rightAnkle, index: 15)
46 | ]
47 |
48 | /// The joints that make up a pose.
49 | private(set) var joints: [Joint.Name: Joint] = [
50 | .nose: Joint(name: .nose),
51 | .leftEye: Joint(name: .leftEye),
52 | .leftEar: Joint(name: .leftEar),
53 | .leftShoulder: Joint(name: .leftShoulder),
54 | .leftElbow: Joint(name: .leftElbow),
55 | .leftWrist: Joint(name: .leftWrist),
56 | .leftHip: Joint(name: .leftHip),
57 | .leftKnee: Joint(name: .leftKnee),
58 | .leftAnkle: Joint(name: .leftAnkle),
59 | .rightEye: Joint(name: .rightEye),
60 | .rightEar: Joint(name: .rightEar),
61 | .rightShoulder: Joint(name: .rightShoulder),
62 | .rightElbow: Joint(name: .rightElbow),
63 | .rightWrist: Joint(name: .rightWrist),
64 | .rightHip: Joint(name: .rightHip),
65 | .rightKnee: Joint(name: .rightKnee),
66 | .rightAnkle: Joint(name: .rightAnkle)
67 | ]
68 |
69 | /// The confidence score associated with this pose.
70 | var confidence: Double = 0.0
71 |
72 | /// Accesses the joint with the specified name.
73 | subscript(jointName: Joint.Name) -> Joint {
74 | get {
75 | assert(joints[jointName] != nil)
76 | return joints[jointName]!
77 | }
78 | set {
79 | joints[jointName] = newValue
80 | }
81 | }
82 |
83 | /// Returns all edges that link **from** or **to** the specified joint.
84 | ///
85 | /// - parameters:
86 | /// - jointName: Query joint name.
87 | /// - returns: All edges that connect to or from `jointName`.
88 | static func edges(for jointName: Joint.Name) -> [Edge] {
89 | return Pose.edges.filter {
90 | $0.parent == jointName || $0.child == jointName
91 | }
92 | }
93 |
94 | /// Returns the edge having the specified parent and child joint names.
95 | ///
96 | /// - parameters:
97 | /// - parentJointName: Edge's parent joint name.
98 | /// - childJointName: Edge's child joint name.
99 | /// - returns: All edges that connect to or from `jointName`.
100 | static func edge(from parentJointName: Joint.Name, to childJointName: Joint.Name) -> Edge? {
101 | return Pose.edges.first(where: { $0.parent == parentJointName && $0.child == childJointName })
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/CoreML/PoseNet/PoseNetOutput.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PoseNetOutput.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/08/14.
6 | //
7 |
8 | import Foundation
9 | import CoreML
10 | import Vision
11 |
12 | public struct PoseNetOutput {
13 | enum Feature: String {
14 | case heatmap = "heatmap"
15 | case offsets = "offsets"
16 | case backwardDisplacementMap = "displacementBwd"
17 | case forwardDisplacementMap = "displacementFwd"
18 | }
19 |
20 | private(set) var heatmap: MLMultiArray!
21 | private(set) var offsets: MLMultiArray!
22 | private(set) var backwardDisplacementMap: MLMultiArray!
23 | private(set) var forwardDisplacementMap: MLMultiArray!
24 |
25 | var height: Int {
26 | return heatmap.shape[1].intValue
27 | }
28 |
29 | /// Returns the **width** of the output array (`heatmap.shape[2]`).
30 | var width: Int {
31 | return heatmap.shape[2].intValue
32 | }
33 |
34 | /// The PoseNet model's output stride.
35 | ///
36 | /// Valid strides are 16 and 8 and define the resolution of the grid output by the model. Smaller strides
37 | /// result in higher-resolution grids with an expected increase in accuracy but require more computation. Larger
38 | /// strides provide a more coarse grid and typically less accurate but are computationally cheaper in comparison.
39 | ///
40 | /// - Note: The output stride is dependent on the chosen model and specified in the metadata. Other variants of the
41 | /// PoseNet models are available from the Model Gallery.
42 | let modelOutputStride: Int = 16
43 |
44 | init(_ predictionResult: [VNCoreMLFeatureValueObservation]) {
45 | for result in predictionResult {
46 | if let feature = Feature(rawValue: result.featureName) {
47 | switch feature {
48 | case .heatmap:
49 | self.heatmap = result.featureValue.multiArrayValue
50 | case .offsets:
51 | self.offsets = result.featureValue.multiArrayValue
52 | case .backwardDisplacementMap:
53 | self.backwardDisplacementMap = result.featureValue.multiArrayValue
54 | case .forwardDisplacementMap:
55 | self.forwardDisplacementMap = result.featureValue.multiArrayValue
56 | }
57 | }
58 | }
59 | }
60 |
61 | struct Cell {
62 | let yIndex: Int
63 | let xIndex: Int
64 |
65 | init(_ yIndex: Int, _ xIndex: Int) {
66 | self.yIndex = yIndex
67 | self.xIndex = xIndex
68 | }
69 |
70 | static var zero: Cell {
71 | return Cell(0, 0)
72 | }
73 | }
74 |
75 | func offset(for jointName: Joint.Name, at cell: Cell) -> CGVector {
76 | // Create the index for the y and x component of the offset.
77 | let yOffsetIndex = [jointName.rawValue, cell.yIndex, cell.xIndex]
78 | let xOffsetIndex = [jointName.rawValue + Joint.numberOfJoints, cell.yIndex, cell.xIndex]
79 |
80 | // Obtain y and x component of the offset from the offsets array.
81 | let offsetY: Float = offsets[yOffsetIndex].floatValue
82 | let offsetX: Float = offsets[xOffsetIndex].floatValue
83 |
84 | return CGVector(dx: CGFloat(offsetX), dy: CGFloat(offsetY))
85 | }
86 |
87 | func position(for jointName: Joint.Name, at cell: Cell) -> CGPoint {
88 | let jointOffset = offset(for: jointName, at: cell)
89 |
90 | // First, calculate the joint’s coarse position.
91 | var jointPosition = CGPoint(x: cell.xIndex * modelOutputStride,
92 | y: cell.yIndex * modelOutputStride)
93 |
94 | // Then, add the offset to get a precise position.
95 | jointPosition += jointOffset
96 |
97 | return jointPosition
98 | }
99 |
100 | func confidence(for jointName: Joint.Name, at cell: Cell) -> Float {
101 | let multiArrayIndex = [jointName.rawValue, cell.yIndex, cell.xIndex]
102 | return heatmap[multiArrayIndex].floatValue
103 | }
104 | }
105 |
106 | extension MLMultiArray {
107 | subscript(index: [Int]) -> NSNumber {
108 | return self[index.map { NSNumber(value: $0) } ]
109 | }
110 | }
111 |
112 | extension CGPoint {
113 | /// Calculates and returns the result of an element-wise addition.
114 | static func + (_ lhs: CGPoint, _ rhs: CGVector) -> CGPoint {
115 | return CGPoint(x: lhs.x + rhs.dx, y: lhs.y + rhs.dy)
116 | }
117 |
118 | /// Performs element-wise addition.
119 | static func += (lhs: inout CGPoint, _ rhs: CGVector) {
120 | lhs.x += rhs.dx
121 | lhs.y += rhs.dy
122 | }
123 |
124 | /// Calculates and returns the result of an element-wise multiplication.
125 | static func * (_ lhs: CGPoint, _ scale: CGFloat) -> CGPoint {
126 | return CGPoint(x: lhs.x * scale, y: lhs.y * scale)
127 | }
128 |
129 | /// Calculates and returns the result of an element-wise multiplication.
130 | static func * (_ lhs: CGPoint, _ rhs: CGSize) -> CGPoint {
131 | return CGPoint(x: lhs.x * rhs.width, y: lhs.y * rhs.height)
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/MetalCamera.swift:
--------------------------------------------------------------------------------
1 | import AVFoundation
2 |
3 | enum MetalCameraError: Error {
4 | case noVideoDevice
5 | case noAudioDevice
6 | case deviceInputInitialize
7 | }
8 |
9 | public class MetalCamera: NSObject, OperationChain, AudioOperationChain {
10 | public static let libraryName = "Metal Camera"
11 | public var runBenchmark = false
12 | public var logFPS = false
13 |
14 | public let captureSession: AVCaptureSession
15 | public var inputCamera: AVCaptureDevice!
16 |
17 | var videoInput: AVCaptureDeviceInput!
18 | let videoOutput: AVCaptureVideoDataOutput!
19 | var videoTextureCache: CVMetalTextureCache?
20 |
21 | var audioInput: AVCaptureDeviceInput?
22 | var audioOutput: AVCaptureAudioDataOutput?
23 |
24 | let cameraProcessingQueue = DispatchQueue.global()
25 | let cameraFrameProcessingQueue = DispatchQueue(label: "MetalCamera.cameraFrameProcessingQueue", attributes: [])
26 |
27 | let frameRenderingSemaphore = DispatchSemaphore(value: 1)
28 |
29 | var numberOfFramesCaptured = 0
30 | var totalFrameTimeDuringCapture: Double = 0.0
31 | var framesSinceLastCheck = 0
32 | var lastCheckTime = CFAbsoluteTimeGetCurrent()
33 |
34 | public let sourceKey: String
35 | public var targets = TargetContainer()
36 | public var audioTargets = TargetContainer()
37 |
38 | let useMic: Bool
39 | var currentPosition = AVCaptureDevice.Position.front
40 | var videoOrientation: AVCaptureVideoOrientation?
41 | var isVideoMirrored: Bool?
42 |
43 | public init(sessionPreset: AVCaptureSession.Preset = .hd1280x720,
44 | position: AVCaptureDevice.Position = .front,
45 | sourceKey: String = "camera",
46 | useMic: Bool = false,
47 | videoOrientation: AVCaptureVideoOrientation? = nil,
48 | isVideoMirrored: Bool? = nil) throws {
49 | self.sourceKey = sourceKey
50 | self.useMic = useMic
51 |
52 | captureSession = AVCaptureSession()
53 | captureSession.beginConfiguration()
54 |
55 | videoOutput = AVCaptureVideoDataOutput()
56 | videoOutput.videoSettings = [kCVPixelBufferMetalCompatibilityKey as String: true,
57 | kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
58 |
59 | if captureSession.canAddOutput(videoOutput) {
60 | captureSession.addOutput(videoOutput)
61 | }
62 |
63 | self.videoOrientation = videoOrientation
64 | self.isVideoMirrored = isVideoMirrored
65 |
66 | super.init()
67 |
68 | defer {
69 | captureSession.commitConfiguration()
70 | }
71 |
72 | try updateVideoInput(position: position)
73 |
74 | if useMic {
75 | guard let audio = AVCaptureDevice.default(for: .audio),
76 | let audioInput = try? AVCaptureDeviceInput(device: audio) else {
77 | throw MetalCameraError.noAudioDevice
78 | }
79 |
80 | let audioDataOutput = AVCaptureAudioDataOutput()
81 |
82 | if captureSession.canAddInput(audioInput) {
83 | captureSession.addInput(audioInput)
84 | }
85 |
86 | if captureSession.canAddOutput(audioDataOutput) {
87 | captureSession.addOutput(audioDataOutput)
88 | }
89 |
90 | self.audioInput = audioInput
91 | self.audioOutput = audioDataOutput
92 | }
93 |
94 | captureSession.sessionPreset = sessionPreset
95 | captureSession.commitConfiguration()
96 |
97 | CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, sharedMetalRenderingDevice.device, nil, &videoTextureCache)
98 |
99 | videoOutput.setSampleBufferDelegate(self, queue: cameraProcessingQueue)
100 | audioOutput?.setSampleBufferDelegate(self, queue: cameraProcessingQueue)
101 | }
102 |
103 | deinit {
104 | cameraFrameProcessingQueue.sync {
105 | stopCapture()
106 | videoOutput?.setSampleBufferDelegate(nil, queue:nil)
107 | }
108 | }
109 |
110 | public func startCapture() {
111 | guard captureSession.isRunning == false else { return }
112 |
113 | let _ = frameRenderingSemaphore.wait(timeout:DispatchTime.distantFuture)
114 | numberOfFramesCaptured = 0
115 | totalFrameTimeDuringCapture = 0
116 | frameRenderingSemaphore.signal()
117 |
118 | captureSession.startRunning()
119 | }
120 |
121 | public func stopCapture() {
122 | guard captureSession.isRunning else { return }
123 |
124 | let _ = frameRenderingSemaphore.wait(timeout:DispatchTime.distantFuture)
125 | captureSession.stopRunning()
126 | self.frameRenderingSemaphore.signal()
127 | }
128 |
129 | private func updateVideoInput(position: AVCaptureDevice.Position) throws {
130 | guard let device = position.device() else {
131 | throw MetalCameraError.noVideoDevice
132 | }
133 |
134 | inputCamera = device
135 |
136 | if videoInput != nil {
137 | captureSession.removeInput(videoInput)
138 | }
139 |
140 | do {
141 | self.videoInput = try AVCaptureDeviceInput(device: inputCamera)
142 | } catch {
143 | throw MetalCameraError.deviceInputInitialize
144 | }
145 |
146 | if (captureSession.canAddInput(videoInput)) {
147 | captureSession.addInput(videoInput)
148 | }
149 |
150 | if let orientation = videoOrientation {
151 | videoOutput.connection(with: .video)?.videoOrientation = orientation
152 | }
153 |
154 | if let isVideoMirrored = isVideoMirrored, position == .front {
155 | videoOutput.connection(with: .video)?.isVideoMirrored = isVideoMirrored
156 | }
157 |
158 | currentPosition = position
159 | }
160 |
161 | public func switchPosition() throws {
162 | captureSession.beginConfiguration()
163 | try updateVideoInput(position: currentPosition == .front ? .back : .front)
164 | captureSession.commitConfiguration()
165 | }
166 |
167 | public func newTextureAvailable(_ texture: Texture) {}
168 | public func newAudioAvailable(_ sampleBuffer: AudioBuffer) {}
169 | }
170 |
171 | extension MetalCamera: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
172 | public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
173 | if connection == videoOutput?.connection(with: .video) {
174 | for target in targets {
175 | if let target = target as? CMSampleChain {
176 | target.newBufferAvailable(sampleBuffer)
177 | }
178 | }
179 |
180 | handleVideo(sampleBuffer)
181 |
182 | } else if connection == audioOutput?.connection(with: .audio) {
183 | handleAudio(sampleBuffer)
184 | }
185 | }
186 |
187 | private func handleVideo(_ sampleBuffer: CMSampleBuffer) {
188 | guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return }
189 | guard let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
190 | guard let videoTextureCache = videoTextureCache else { return }
191 |
192 | let startTime = CFAbsoluteTimeGetCurrent()
193 | let bufferWidth = CVPixelBufferGetWidth(cameraFrame)
194 | let bufferHeight = CVPixelBufferGetHeight(cameraFrame)
195 | let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
196 |
197 | CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
198 |
199 | cameraFrameProcessingQueue.async {
200 | CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
201 |
202 | let texture: Texture?
203 |
204 | var textureRef: CVMetalTexture? = nil
205 | let _ = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, videoTextureCache, cameraFrame, nil, .bgra8Unorm, bufferWidth, bufferHeight, 0, &textureRef)
206 | if let concreteTexture = textureRef,
207 | let cameraTexture = CVMetalTextureGetTexture(concreteTexture) {
208 | texture = Texture(texture: cameraTexture, timestamp: currentTime, textureKey: self.sourceKey)
209 | } else {
210 | texture = nil
211 | }
212 |
213 | if let texture = texture {
214 | self.operationFinished(texture)
215 | }
216 |
217 | if self.runBenchmark {
218 | self.numberOfFramesCaptured += 1
219 |
220 | let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime)
221 | self.totalFrameTimeDuringCapture += currentFrameTime
222 | debugPrint("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured)) ms")
223 | debugPrint("Current frame time : \(1000.0 * currentFrameTime) ms")
224 | }
225 |
226 | if self.logFPS {
227 | if ((CFAbsoluteTimeGetCurrent() - self.lastCheckTime) > 1.0) {
228 | self.lastCheckTime = CFAbsoluteTimeGetCurrent()
229 | debugPrint("FPS: \(self.framesSinceLastCheck)")
230 | self.framesSinceLastCheck = 0
231 | }
232 | self.framesSinceLastCheck += 1
233 | }
234 |
235 | self.frameRenderingSemaphore.signal()
236 | }
237 | }
238 |
239 | private func handleAudio(_ sampleBuffer: CMSampleBuffer) {
240 | audioOperationFinished(AudioBuffer(sampleBuffer, sourceKey))
241 | }
242 | }
243 |
244 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/MetalRenderingDevice.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MetalRenderingDevice.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/04.
6 | //
7 |
8 | import UIKit
9 | import Metal
10 |
11 | public let sharedMetalRenderingDevice = MetalRenderingDevice()
12 |
13 | public class MetalRenderingDevice {
14 | public let device: MTLDevice
15 | public let commandQueue: MTLCommandQueue
16 |
17 | init() {
18 | guard let device = MTLCreateSystemDefaultDevice() else { fatalError("Could not create Metal Device") }
19 | self.device = device
20 |
21 | guard let queue = self.device.makeCommandQueue() else { fatalError("Could not create command queue") }
22 | self.commandQueue = queue
23 | }
24 |
25 | func generateRenderPipelineDescriptor(_ vertexFuncName: String, _ fragmentFuncName: String, _ colorPixelFormat: MTLPixelFormat = .bgra8Unorm) throws -> MTLRenderPipelineDescriptor {
26 | let framework = Bundle(for: MetalCamera.self)
27 | let resource = framework.path(forResource: "default", ofType: "metallib")!
28 | let library = try self.device.makeLibrary(filepath: resource)
29 |
30 | let vertex_func = library.makeFunction(name: vertexFuncName)
31 | let fragment_func = library.makeFunction(name: fragmentFuncName)
32 | let rpd = MTLRenderPipelineDescriptor()
33 | rpd.vertexFunction = vertex_func
34 | rpd.fragmentFunction = fragment_func
35 | rpd.colorAttachments[0].pixelFormat = colorPixelFormat
36 |
37 | return rpd
38 | }
39 |
40 | func makeRenderVertexBuffer(_ origin: CGPoint = .zero, size: CGSize) -> MTLBuffer? {
41 | let w = size.width, h = size.height
42 | let vertices = [
43 | Vertex(position: CGPoint(x: origin.x , y: origin.y), textCoord: CGPoint(x: 0, y: 0)),
44 | Vertex(position: CGPoint(x: origin.x + w , y: origin.y), textCoord: CGPoint(x: 1, y: 0)),
45 | Vertex(position: CGPoint(x: origin.x + 0 , y: origin.y + h), textCoord: CGPoint(x: 0, y: 1)),
46 | Vertex(position: CGPoint(x: origin.x + w , y: origin.y + h), textCoord: CGPoint(x: 1, y: 1)),
47 | ]
48 | return makeRenderVertexBuffer(vertices)
49 | }
50 |
51 | func makeRenderVertexBuffer(_ vertices: [Vertex]) -> MTLBuffer? {
52 | return self.device.makeBuffer(bytes: vertices, length: MemoryLayout.stride * vertices.count, options: .cpuCacheModeWriteCombined)
53 | }
54 |
55 | func makeRenderUniformBuffer(_ size: CGSize) -> MTLBuffer? {
56 | let metrix = Matrix.identity
57 | metrix.scaling(x: 2 / Float(size.width), y: -2 / Float(size.height), z: 1)
58 | metrix.translation(x: -1, y: 1, z: 0)
59 | return self.device.makeBuffer(bytes: metrix.m, length: MemoryLayout.size * 16, options: [])
60 | }
61 |
62 | func makeComputePipelineState(_ funcName: String) throws -> MTLComputePipelineState? {
63 | let framework = Bundle(for: MetalCamera.self)
64 | let resource = framework.path(forResource: "default", ofType: "metallib")!
65 | let library = try self.device.makeLibrary(filepath: resource)
66 | guard let mtlFunc = library.makeFunction(name: funcName) else { return nil }
67 | let pipelineState = try self.device.makeComputePipelineState(function: mtlFunc)
68 |
69 | return pipelineState
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/MetalVideoLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MetalVideoLoader.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/06.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | public class MetalVideoLoader: OperationChain, AudioOperationChain {
12 | public let sourceKey: String
13 | public var targets = TargetContainer()
14 | public var audioTargets = TargetContainer()
15 |
16 | private let asset: AVAsset
17 | private var assetReader: AVAssetReader!
18 | private var videoTrackOutput: AVAssetReaderTrackOutput?
19 | private var audioTrackOutput: AVAssetReaderTrackOutput?
20 | private let loop: Bool
21 | private let playAtActualSpeed: Bool
22 | private let useAudio: Bool
23 |
24 | private var previousFrameTime = CMTime.zero
25 | private var previousActualFrameTime = CFAbsoluteTimeGetCurrent()
26 |
27 | private var videoTextureCache: CVMetalTextureCache?
28 |
29 | public convenience init(url: URL, playAtActualSpeed: Bool = true, loop: Bool = true, sourceKey: String = "video", useAudio: Bool = false) throws {
30 | let asset = AVURLAsset(url: url, options: [AVURLAssetPreferPreciseDurationAndTimingKey: true])
31 | try self.init(asset: asset, playAtActualSpeed: playAtActualSpeed, loop: loop, useAudio: useAudio)
32 | }
33 |
34 | public init(asset: AVAsset, playAtActualSpeed: Bool = true, loop: Bool = true, sourceKey: String = "video", useAudio: Bool = false) throws {
35 | self.asset = asset
36 | self.loop = loop
37 | self.playAtActualSpeed = playAtActualSpeed
38 | self.sourceKey = sourceKey
39 | self.useAudio = useAudio
40 |
41 | let _ = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, sharedMetalRenderingDevice.device, nil, &videoTextureCache)
42 | try createAssetReader()
43 | }
44 |
45 | func createAssetReader() throws {
46 | assetReader = try AVAssetReader(asset: self.asset)
47 |
48 | let outputSettings: [String: Any] = [kCVPixelBufferMetalCompatibilityKey as String: true,
49 | kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
50 |
51 | if let videoTrack = asset.tracks(withMediaType: .video).first {
52 | let videoTrackOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: outputSettings)
53 | assetReader.add(videoTrackOutput)
54 | self.videoTrackOutput = videoTrackOutput
55 | } else {
56 | self.videoTrackOutput = nil
57 | }
58 |
59 | guard useAudio else { return }
60 |
61 | if let audioTrack = asset.tracks(withMediaType: .audio).first {
62 | let outputSettings: [String: Any] = [AVFormatIDKey: kAudioFormatLinearPCM,
63 | AVNumberOfChannelsKey: 1,
64 | AVSampleRateKey: 44100,
65 | ]
66 |
67 | let audioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: outputSettings)
68 | assetReader.add(audioTrackOutput)
69 | self.audioTrackOutput = audioTrackOutput
70 | } else {
71 | self.audioTrackOutput = nil
72 | }
73 | }
74 |
75 | public func start() {
76 | if assetReader.status == .cancelled {
77 | restart()
78 | return
79 | }
80 |
81 | asset.loadValuesAsynchronously(forKeys: ["tracks"]) {
82 | DispatchQueue.global().async {
83 | guard (self.asset.statusOfValue(forKey: "tracks", error: nil) == .loaded) else {
84 | return
85 | }
86 |
87 | guard self.assetReader.startReading() else {
88 | debugPrint("Couldn't start reading")
89 | return
90 | }
91 |
92 | self.processReadingTrack()
93 | }
94 | }
95 | }
96 |
97 | public func stop() {
98 | assetReader.cancelReading()
99 | }
100 |
101 | private func processReadingTrack() {
102 | while assetReader.status == .reading {
103 | if let videoTrackOutput = videoTrackOutput {
104 | readNextVideoFrame(from: videoTrackOutput)
105 | }
106 | if let audioTrackOutput = audioTrackOutput {
107 | readNextAudioFrame(from: audioTrackOutput)
108 | }
109 | }
110 |
111 | if assetReader.status == .completed && loop {
112 | assetReader.cancelReading()
113 | restart()
114 | }
115 | }
116 |
117 | private func restart() {
118 | do {
119 | try createAssetReader()
120 | start()
121 | } catch {
122 | debugPrint(error)
123 | }
124 | }
125 |
126 | private func readNextVideoFrame(from videoTrackOutput: AVAssetReaderOutput) {
127 | guard assetReader.status == .reading else { return }
128 |
129 | if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() {
130 | if playAtActualSpeed {
131 | let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
132 | let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime)
133 | let currentActualTime = CFAbsoluteTimeGetCurrent()
134 |
135 | let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame)
136 | let actualTimeDifference = currentActualTime - previousActualFrameTime
137 |
138 | if (frameTimeDifference > actualTimeDifference) {
139 | usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference))))
140 | }
141 |
142 | previousFrameTime = currentSampleTime
143 | previousActualFrameTime = CFAbsoluteTimeGetCurrent()
144 | }
145 |
146 | debugPrint("Read Video frame")
147 | process(sampleBuffer)
148 | CMSampleBufferInvalidate(sampleBuffer)
149 | }
150 | }
151 |
152 | private func readNextAudioFrame(from audioTrackOutput: AVAssetReaderOutput) {
153 | guard assetReader.status == .reading else { return }
154 |
155 | if let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() {
156 | debugPrint("Read Audio frame")
157 | audioOperationFinished(AudioBuffer(sampleBuffer, sourceKey))
158 | CMSampleBufferInvalidate(sampleBuffer)
159 | }
160 | }
161 |
162 | private func process(_ frame:CMSampleBuffer) {
163 | guard let videoTextureCache = videoTextureCache else { return }
164 |
165 | let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame)
166 | let movieFrame = CMSampleBufferGetImageBuffer(frame)!
167 |
168 | let bufferHeight = CVPixelBufferGetHeight(movieFrame)
169 | let bufferWidth = CVPixelBufferGetWidth(movieFrame)
170 |
171 | CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
172 | let texture:Texture?
173 | var textureRef: CVMetalTexture? = nil
174 |
175 | let _ = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, videoTextureCache, movieFrame, nil, .bgra8Unorm, bufferWidth, bufferHeight, 0, &textureRef)
176 | if let concreteTexture = textureRef,
177 | let cameraTexture = CVMetalTextureGetTexture(concreteTexture) {
178 | texture = Texture(texture: cameraTexture, timestamp: currentSampleTime, textureKey: self.sourceKey)
179 | } else {
180 | texture = nil
181 | }
182 |
183 | CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
184 |
185 | if let texture = texture {
186 | operationFinished(texture)
187 | }
188 | }
189 |
190 | public func newTextureAvailable(_ texture: Texture) {}
191 | public func newAudioAvailable(_ sampleBuffer: AudioBuffer) {}
192 | }
193 |
194 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/MetalVideoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MetalVideoView.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/04.
6 | //
7 |
8 | import Foundation
9 | import MetalKit
10 |
11 | public class MetalVideoView: MTKView {
12 | public let targets = TargetContainer()
13 | var currentTexture: Texture?
14 |
15 | private var pipelineState: MTLRenderPipelineState!
16 | private var render_target_vertex: MTLBuffer!
17 | private var render_target_uniform: MTLBuffer!
18 |
19 | public init() {
20 | super.init(frame: .zero, device: sharedMetalRenderingDevice.device)
21 | setup()
22 | }
23 |
24 | public required init(coder: NSCoder) {
25 | super.init(coder: coder)
26 |
27 | setup()
28 | }
29 |
30 | private func setup() {
31 | self.device = sharedMetalRenderingDevice.device
32 |
33 | isOpaque = false
34 | setupTargetUniforms()
35 |
36 | do {
37 | try setupPiplineState()
38 | } catch {
39 | fatalError("Metal initialize failed: \(error.localizedDescription)")
40 | }
41 | }
42 |
43 | func setupTargetUniforms() {
44 | let size = UIScreen.main.bounds.size
45 | render_target_vertex = sharedMetalRenderingDevice.makeRenderVertexBuffer(size: size)
46 | render_target_uniform = sharedMetalRenderingDevice.makeRenderUniformBuffer(size)
47 | }
48 |
49 | private func setupPiplineState() throws {
50 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("vertex_render_target", "fragment_render_target", colorPixelFormat)
51 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
52 | }
53 |
54 | public override func draw(_ rect:CGRect) {
55 | if let currentDrawable = self.currentDrawable, let imageTexture = currentTexture {
56 | let renderPassDescriptor = MTLRenderPassDescriptor()
57 | let attachment = renderPassDescriptor.colorAttachments[0]
58 | attachment?.clearColor = clearColor
59 | attachment?.texture = currentDrawable.texture
60 | attachment?.loadAction = .clear
61 | attachment?.storeAction = .store
62 |
63 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
64 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
65 |
66 | commandEncoder?.setRenderPipelineState(pipelineState)
67 |
68 | commandEncoder?.setVertexBuffer(render_target_vertex, offset: 0, index: 0)
69 | commandEncoder?.setVertexBuffer(render_target_uniform, offset: 0, index: 1)
70 | commandEncoder?.setFragmentTexture(imageTexture.texture, index: 0)
71 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
72 |
73 | commandEncoder?.endEncoding()
74 | commandBuffer?.present(currentDrawable)
75 | commandBuffer?.commit()
76 | }
77 | }
78 | }
79 |
80 | extension MetalVideoView: OperationChain {
81 | public func newTextureAvailable(_ texture: Texture) {
82 | self.drawableSize = CGSize(width: texture.texture.width, height: texture.texture.height)
83 | currentTexture = texture
84 | operationFinished(texture)
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/MetalVideoWriter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MetalVideoWriter.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/06.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 | import CoreImage
11 |
12 | public class MetalVideoWriter: OperationChain, AudioOperationChain {
13 | public var targets = TargetContainer()
14 | public var audioTargets = TargetContainer()
15 |
16 | private var isRecording = false
17 | private var startTime: CMTime?
18 | private var previousFrameTime = CMTime.negativeInfinity
19 | private var previousAudioTime = CMTime.negativeInfinity
20 |
21 | private let assetWriter: AVAssetWriter
22 | private let assetWriterVideoInput: AVAssetWriterInput
23 | private let assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor
24 |
25 | private let assetWriterAudioInput: AVAssetWriterInput?
26 |
27 | private let url: URL
28 | private let videoSize: CGSize
29 |
30 | private var pipelineState: MTLRenderPipelineState!
31 | private var render_target_vertex: MTLBuffer!
32 | private var render_target_uniform: MTLBuffer!
33 |
34 | private let textureInputSemaphore = DispatchSemaphore(value:1)
35 |
36 | let ciContext = CIContext(mtlDevice: sharedMetalRenderingDevice.device, options: nil)
37 |
38 | private let recordAudio: Bool
39 |
40 | public init(url: URL, videoSize: CGSize, fileType: AVFileType = .mov, settings: [String: Any]? = nil, recordAudio: Bool = false) throws {
41 | assetWriter = try AVAssetWriter(url: url, fileType: fileType)
42 | self.videoSize = videoSize
43 | self.url = url
44 | self.recordAudio = recordAudio
45 |
46 | // Setup Video
47 | let localSettings: [String: Any]
48 |
49 | if let settings = settings {
50 | localSettings = settings
51 | } else {
52 | localSettings = [AVVideoCodecKey: AVVideoCodecType.h264,
53 | AVVideoWidthKey: videoSize.width,
54 | AVVideoHeightKey: videoSize.height ]
55 | }
56 |
57 | assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaType.video, outputSettings:localSettings)
58 | assetWriterVideoInput.expectsMediaDataInRealTime = true
59 |
60 | let sourcePixelBufferAttributes: [String: Any] = [
61 | kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
62 | kCVPixelBufferWidthKey as String: videoSize.width,
63 | kCVPixelBufferHeightKey as String: videoSize.height,
64 | AVVideoCompressionPropertiesKey: [
65 | AVVideoAverageBitRateKey: 2300000
66 | ]]
67 |
68 | assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
69 | sourcePixelBufferAttributes: sourcePixelBufferAttributes)
70 | assetWriter.add(assetWriterVideoInput)
71 |
72 | // Setup Audio
73 | if recordAudio {
74 | let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: [
75 | AVFormatIDKey: kAudioFormatMPEG4AAC,
76 | AVNumberOfChannelsKey: 1,
77 | AVSampleRateKey: 44100,
78 | AVEncoderBitRateKey: 64000
79 | ])
80 |
81 | audioInput.expectsMediaDataInRealTime = true
82 | if assetWriter.canAdd(audioInput) {
83 | assetWriter.add(audioInput)
84 | }
85 |
86 | assetWriterAudioInput = audioInput
87 | } else {
88 | assetWriterAudioInput = nil
89 | }
90 |
91 | setupPiplineState()
92 | loadRenderTargetVertex(videoSize)
93 | }
94 |
95 | public func startRecording() {
96 | self.startTime = nil
97 | self.isRecording = self.assetWriter.startWriting()
98 | }
99 |
100 | public func finishRecording(_ completionCallback:(() -> Void)? = nil) {
101 | self.isRecording = false
102 |
103 | if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) {
104 | DispatchQueue.global().async{
105 | completionCallback?()
106 | }
107 | return
108 | }
109 |
110 | self.assetWriterVideoInput.markAsFinished()
111 | self.assetWriter.finishWriting {
112 | completionCallback?()
113 | debugPrint("Write finished!!")
114 | }
115 | }
116 |
117 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm) {
118 | do {
119 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("vertex_render_target", "fragment_render_target", colorPixelFormat)
120 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
121 | } catch {
122 | debugPrint(error)
123 | }
124 | }
125 |
126 | private func loadRenderTargetVertex(_ baseTextureSize: CGSize) {
127 | render_target_vertex = sharedMetalRenderingDevice.makeRenderVertexBuffer(size: baseTextureSize)
128 | render_target_uniform = sharedMetalRenderingDevice.makeRenderUniformBuffer(baseTextureSize)
129 | }
130 | }
131 |
132 | // MARK: Video processing
133 | extension MetalVideoWriter {
134 | public func newTextureAvailable(_ texture: Texture) {
135 | DispatchQueue.main.sync {
136 |
137 | guard isRecording else { return }
138 |
139 | guard let frameTime = texture.timestamp else { return }
140 |
141 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
142 | defer {
143 | textureInputSemaphore.signal()
144 | }
145 |
146 | if startTime == nil {
147 | assetWriter.startSession(atSourceTime: frameTime)
148 | startTime = frameTime
149 | }
150 |
151 | guard assetWriterVideoInput.isReadyForMoreMediaData,
152 | let inputPixelBufferPool = assetWriterPixelBufferInput.pixelBufferPool else {
153 | debugPrint("Had to drop a frame at time \(frameTime)")
154 | return
155 | }
156 |
157 | var pixelBufferFromPool: CVPixelBuffer? = nil
158 | let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, inputPixelBufferPool, &pixelBufferFromPool)
159 | guard let pixelBuffer = pixelBufferFromPool, (pixelBufferStatus == kCVReturnSuccess) else { return }
160 |
161 | CVPixelBufferLockBaseAddress(pixelBuffer, [])
162 |
163 | // FIXME: Has problem with Color format and orientation.
164 | let kciOptions = [CIImageOption.colorSpace: CGColorSpaceCreateDeviceRGB()] as [CIImageOption : Any]
165 | var ciImage = CIImage(mtlTexture: texture.texture, options: kciOptions)
166 | ciImage = ciImage?.oriented(.downMirrored)
167 | ciContext.render(ciImage!, to: pixelBuffer)
168 |
169 | // FIXME: Want to fix rednerIntoPixelBuffer func rather than using CIFilter.
170 | // renderIntoPixelBuffer(pixelBuffer, texture:texture)
171 | if (!assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime:frameTime)) {
172 | debugPrint("Problem appending pixel buffer at time: \(frameTime)")
173 | }
174 |
175 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
176 |
177 | textureInputSemaphore.signal()
178 | operationFinished(texture)
179 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
180 | }
181 | }
182 |
183 | func renderIntoPixelBuffer(_ pixelBuffer: CVPixelBuffer, texture: Texture) {
184 | guard let pixelBufferBytes = CVPixelBufferGetBaseAddress(pixelBuffer) else {
185 | debugPrint("Could not get buffer bytes")
186 | return
187 | }
188 |
189 | let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
190 |
191 | let outputTexture: Texture
192 | if (Int(round(self.videoSize.width)) != texture.texture.width) && (Int(round(self.videoSize.height)) != texture.texture.height) {
193 | outputTexture = Texture(Int(videoSize.width), Int(videoSize.height), timestamp: texture.timestamp, textureKey: texture.textureKey)
194 |
195 | let renderPassDescriptor = MTLRenderPassDescriptor()
196 | let attachment = renderPassDescriptor.colorAttachments[0]
197 | attachment?.clearColor = MTLClearColorMake(1, 0, 0, 1)
198 | attachment?.texture = outputTexture.texture
199 | attachment?.loadAction = .clear
200 | attachment?.storeAction = .store
201 |
202 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
203 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
204 |
205 | commandEncoder?.setRenderPipelineState(pipelineState)
206 |
207 | commandEncoder?.setVertexBuffer(render_target_vertex, offset: 0, index: 0)
208 | commandEncoder?.setVertexBuffer(render_target_uniform, offset: 0, index: 1)
209 | commandEncoder?.setFragmentTexture(texture.texture, index: 0)
210 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
211 |
212 | commandEncoder?.endEncoding()
213 | commandBuffer?.commit()
214 | commandBuffer?.waitUntilCompleted()
215 | } else {
216 | outputTexture = texture
217 | }
218 |
219 | let region = MTLRegionMake2D(0, 0, outputTexture.texture.width, outputTexture.texture.height)
220 | outputTexture.texture.getBytes(pixelBufferBytes, bytesPerRow: bytesPerRow, from: region, mipmapLevel: 0)
221 | }
222 | }
223 |
224 | // MARK: Audio processing
225 | extension MetalVideoWriter {
226 | public func newAudioAvailable(_ sampleBuffer: AudioBuffer) {
227 | handleAudio(sampleBuffer)
228 | audioOperationFinished(sampleBuffer)
229 | }
230 |
231 | private func handleAudio(_ sampleBuffer: AudioBuffer) {
232 | guard isRecording, startTime != nil,
233 | let audioInput = assetWriterAudioInput else { return }
234 |
235 | if audioInput.isReadyForMoreMediaData {
236 | audioInput.append(sampleBuffer.buffer)
237 | }
238 | }
239 | }
240 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/OperationChain.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OperationChain.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/04.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | public protocol OperationChain: AnyObject {
12 | var targets: TargetContainer { get }
13 | func newTextureAvailable(_ texture: Texture)
14 | func operationFinished(_ texture: Texture)
15 | }
16 |
17 | extension OperationChain {
18 | func addTarget(_ target: OperationChain) {
19 | targets.append(target)
20 | }
21 |
22 | public func removeTarget(_ target: OperationChain) {
23 | targets.remove(target)
24 | }
25 |
26 | public func removeAllTargets() {
27 | targets.removeAll()
28 | }
29 |
30 | public func operationFinished(_ texture: Texture) {
31 | for target in targets {
32 | target?.newTextureAvailable(texture)
33 | }
34 | }
35 | }
36 |
37 | public protocol TwoTextureOperationChain: OperationChain {
38 | func newTextureAvailable(_ source1: Texture, _ source2: Texture, completion: @escaping ((_ texture: Texture) -> Void))
39 | }
40 |
41 | extension TwoTextureOperationChain {
42 | func newTextureAvailable(_ texture: Texture) {
43 | fatalError("Should be use newTextureAvailable(_ source1: Texture, source2: Texture, completion: @escaping (() -> Void)) func")
44 | }
45 | }
46 |
47 | public protocol CMSampleChain: OperationChain {
48 | func newBufferAvailable(_ sampleBuffer: CMSampleBuffer)
49 | }
50 |
51 | public protocol AudioOperationChain: AnyObject {
52 | var audioTargets: TargetContainer { get }
53 | func newAudioAvailable(_ sampleBuffer: AudioBuffer)
54 | func audioOperationFinished(_ sampleBuffer: AudioBuffer)
55 | }
56 |
57 | extension AudioOperationChain {
58 | func addAudioTarget(_ target: AudioOperationChain) {
59 | audioTargets.append(target)
60 | }
61 |
62 | public func removeAudioTarget(_ target: AudioOperationChain) {
63 | audioTargets.remove(target)
64 | }
65 |
66 | func removeAllAudioTargets() {
67 | audioTargets.removeAll()
68 | }
69 |
70 | public func audioOperationFinished(_ sampleBuffer: AudioBuffer) {
71 | for target in audioTargets {
72 | target?.newAudioAvailable(sampleBuffer)
73 | }
74 | }
75 | }
76 |
77 |
78 | infix operator --> : AdditionPrecedence
79 | infix operator ==> : AdditionPrecedence
80 | //precedencegroup ProcessingOperationPrecedence {
81 | // associativity: left
82 | //// higherThan: Multiplicative
83 | //}
84 | @discardableResult public func -->(source: OperationChain, destination: T) -> T {
85 | source.addTarget(destination)
86 | return destination
87 | }
88 |
89 | @discardableResult public func ==>(source: AudioOperationChain, destination: T) -> T {
90 | source.addAudioTarget(destination)
91 | return destination
92 | }
93 |
94 | public class TargetContainer: Sequence {
95 | var targets = [T]()
96 | var count: Int { get { return targets.count }}
97 | let dispatchQueue = DispatchQueue(label:"MetalCamera.targetContainerQueue", attributes: [])
98 |
99 | public init() {
100 | }
101 |
102 | public func append(_ target: T) {
103 | dispatchQueue.async{
104 | self.targets.append(target)
105 | }
106 | }
107 |
108 | public func remove(_ target: T) {
109 | dispatchQueue.async {
110 | self.targets.removeAll {
111 | $0 as AnyObject === target as AnyObject
112 | }
113 | }
114 | }
115 |
116 | public func makeIterator() -> AnyIterator {
117 | var index = 0
118 |
119 | return AnyIterator { () -> T? in
120 | return self.dispatchQueue.sync{
121 | if (index >= self.targets.count) {
122 | return nil
123 | }
124 |
125 | index += 1
126 | return self.targets[index - 1]
127 | }
128 | }
129 | }
130 |
131 | public func removeAll() {
132 | dispatchQueue.async{
133 | self.targets.removeAll()
134 | }
135 | }
136 | }
137 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/SwiftUI/VideoPreview.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VideoPreview.swift
3 | // MetalCamera
4 | //
5 | // Created by Eunchul Jeon on 2021/09/05.
6 | //
7 |
8 | import SwiftUI
9 |
10 | public struct VideoPreview: UIViewRepresentable {
11 | let prevChain: OperationChain
12 | public init(operation: OperationChain) {
13 | prevChain = operation
14 | }
15 |
16 | public func makeUIView(context: Context) -> MetalVideoView {
17 | let view = MetalVideoView()
18 | prevChain.addTarget(view)
19 | return view
20 | }
21 |
22 | public func updateUIView(_ uiView: MetalVideoView, context: Context) {
23 | prevChain.addTarget(uiView)
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/Texture.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Texture.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/06.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | public class Texture {
12 | let texture: MTLTexture
13 | let timestamp: CMTime?
14 | let textureKey: String
15 |
16 | public init(texture: MTLTexture, timestamp: CMTime?, textureKey: String = "") {
17 | self.texture = texture
18 | self.timestamp = timestamp
19 | self.textureKey = textureKey
20 | }
21 |
22 | public init(_ width: Int, _ height: Int, pixelFormat: MTLPixelFormat = .bgra8Unorm, timestamp: CMTime?, textureKey: String = "") {
23 | let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .bgra8Unorm,
24 | width: width,
25 | height: height,
26 | mipmapped: false)
27 | textureDescriptor.usage = [.renderTarget, .shaderRead, .shaderWrite]
28 |
29 | guard let newTexture = sharedMetalRenderingDevice.device.makeTexture(descriptor: textureDescriptor) else {
30 | fatalError("Could not create texture of size: (\(width), \(height))")
31 | }
32 |
33 | self.texture = newTexture
34 | self.timestamp = timestamp
35 | self.textureKey = textureKey
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/audio/AudioBuffer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioBuffer.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/08.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | public class AudioBuffer {
12 | let buffer: CMSampleBuffer
13 | let key: String
14 |
15 | public init(_ buffer: CMSampleBuffer, _ key: String = "") {
16 | self.buffer = buffer
17 | self.key = key
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/audio/AudioCompositor.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioCompositor.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/08.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | class AudioCompositor: AudioOperationChain {
12 | public var audioTargets = TargetContainer()
13 | public let mainSourceKey: String
14 | public var preservedBuffer: AudioBuffer?
15 |
16 |
17 | public init(_ mainSourceKey: String) {
18 | self.mainSourceKey = mainSourceKey
19 | }
20 |
21 | public func newAudioAvailable(_ sampleBuffer: AudioBuffer) {
22 | if sampleBuffer.key == mainSourceKey {
23 | audioOperationFinished(sampleBuffer)
24 | // playSampleBuffer(sampleBuffer)
25 | // if let preservedBuffer = preservedBuffer {
26 | // audioOperationFinished(sampleBuffer)
27 | // self.preservedBuffer = nil
28 | // } else {
29 | // audioOperationFinished(sampleBuffer)
30 | // }
31 | } else {
32 | // preservedBuffer = sampleBuffer
33 | // playSampleBuffer(sampleBuffer)
34 | }
35 | }
36 |
37 | // func playSampleBuffer(_ sampleBuffer: AudioBuffer) {
38 | // let blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer.buffer)
39 | // let blockBufferDataLength = CMBlockBufferGetDataLength(blockBuffer!)
40 | //
41 | // var blockBufferData = [UInt8](repeating: 0, count: blockBufferDataLength)
42 | // let status = CMBlockBufferCopyDataBytes(blockBuffer!, 0, blockBufferDataLength, &blockBufferData)
43 | // guard status == noErr else { return }
44 | // let data = Data(bytes: blockBufferData, count: blockBufferDataLength)
45 | //
46 | // }
47 | }
48 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/audio/AudioStreamPlayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioStreamPlayer.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/08.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 | import AudioToolbox
11 |
12 |
13 | // FIXME: This class is not fully implemented yet. Don't use this or fixMe :)
14 | class AudioStreamPlayer: NSObject, AudioOperationChain {
15 | public var audioTargets = TargetContainer()
16 | // public var preservedBuffer: AudioBuffer?
17 | //
18 | // var outputQueue: UnsafeMutablePointer = UnsafeMutablePointer.allocate(capacity: 1)
19 | // var streamDescription: AudioStreamBasicDescription?
20 | //
21 | // let engine = AVAudioEngine()
22 | // let playerNode = AVAudioPlayerNode()
23 | // let audioFormat = AVAudioFormat(standardFormatWithSampleRate: 44100.0, channels: 1)
24 | //
25 | // let audioQueue = DispatchQueue(label: "MetalCamera.AudioStreamPlayer", attributes: [])
26 | // let audioSemaphore = DispatchSemaphore(value: 1)
27 | //
28 | // var isPlaying = false
29 |
30 | public override init() {
31 | // engine.attach(playerNode)
32 | // engine.connect(playerNode, to: engine.mainMixerNode, format: audioFormat)
33 | // engine.prepare()
34 | //
35 | // do {
36 | // try engine.start()
37 | // } catch {
38 | // debugPrint(error)
39 | // }
40 | }
41 |
42 | // private func createAudioQueue(audioStreamDescription: AudioStreamBasicDescription) {
43 | // var audioStreamDescription = audioStreamDescription
44 | // self.streamDescription = audioStreamDescription
45 | //
46 | // var status: OSStatus = 0
47 | // let selfPointer = unsafeBitCast(self, to: UnsafeMutableRawPointer.self)
48 | //
49 | // status = AudioQueueNewOutput(&audioStreamDescription, { (pointer, aq, bufferRef) in
50 | // print("New output")
51 | // }, selfPointer, CFRunLoopGetCurrent(), CFRunLoopMode.commonModes as! CFString, 0, self.outputQueue)
52 | //
53 | // assert(noErr == status)
54 | //
55 | // guard let audioQueueRef = outputQueue.pointee else { return }
56 | //
57 | // status = AudioQueueAddPropertyListener(audioQueueRef, kAudioQueueProperty_IsRunning, { (pointer, aq, propertyID) in
58 | // print("Add Listner")
59 | // }, selfPointer)
60 | //
61 | // assert(noErr == status)
62 | //
63 | // AudioQueuePrime(audioQueueRef, 0, nil)
64 | // AudioQueueStart(audioQueueRef, nil)
65 | // }
66 |
67 | public func newAudioAvailable(_ sampleBuffer: AudioBuffer) {
68 | audioOperationFinished(sampleBuffer)
69 | }
70 |
71 | // func playSampleBuffer(_ sampleBuffer: AudioBuffer) {
72 | // if engine.isRunning == false {
73 | // print("Engine is not runing")
74 | // engine.prepare()
75 | //
76 | // do {
77 | // try engine.start()
78 | // } catch {
79 | // debugPrint(error)
80 | // }
81 | // }
82 | //
83 | //// guard isPlaying == false else {
84 | //// return
85 | //// }
86 | ////
87 | //// isPlaying = true
88 | //
89 | //// guard let data = convert(sampleBuffer) else { return }
90 | //
91 | //// if isPlaying == false {
92 | //// isPlaying = true
93 | //
94 | //// let asbd = createAudioDescription(sampleRate: 44100.0)
95 | //// createAudioQueue(audioStreamDescription: asbd.pointee)
96 | //
97 | //
98 | //
99 | // guard let desc = CMSampleBufferGetFormatDescription(sampleBuffer.buffer) else {
100 | // debugPrint("Check SampleBufferFormatDescription")
101 | // return
102 | // }
103 | //
104 | // let numOfSamples = CMSampleBufferGetNumSamples(sampleBuffer.buffer)
105 | // let audioFormat = AVAudioFormat(cmAudioFormatDescription: desc)
106 | //
107 | // guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: AVAudioFrameCount(numOfSamples)) else {
108 | // debugPrint("Check convert CMSampleBuffer to AVAudioPCMBuffer")
109 | // return
110 | // }
111 | //
112 | // CMSampleBufferCopyPCMDataIntoAudioBufferList(sampleBuffer.buffer, 0, Int32(numOfSamples), pcmBuffer.mutableAudioBufferList)
113 | //
114 | // playerNode.scheduleBuffer(pcmBuffer, completionCallbackType: .dataConsumed) { (type) in
115 | // print("here!")
116 | // }
117 | //
118 | // if playerNode.isPlaying == false {
119 | // playerNode.play()
120 | // print("PlayerNode play")
121 | // }
122 | //
123 | //
124 | //
125 | //
126 | //// playerNode.scheduleBuffer(pcmBuffer, completionHandler: nil)
127 | //// playerNode.scheduleBuffer(pcmBuffer) {
128 | //// print("here!")
129 | //// }
130 | ////
131 | //// self.playerNode.play()
132 | //// self.isPlaying = false
133 | //
134 | //
135 | //
136 | //// playerNode.play()
137 | //// playerNode.scheduleBuffer(pcmBuffer) {
138 | //// print("Play Complete")
139 | //// self.isPlaying = false
140 | //// }
141 | //
142 | //// playPCMBuffer(ㅜㅁ
143 | //// } else {
144 | //// debugPrint("Error: audio is already playing back.")
145 | //// }
146 | // }
147 |
148 | func playPCMBuffer(_ pcmBuffer: AVAudioPCMBuffer) {
149 |
150 | }
151 |
152 | func scheduleBuffer(_ sampleBuffer: CMSampleBuffer) {
153 |
154 | }
155 |
156 | func convert(_ sampleBuffer: AudioBuffer) -> Data? {
157 | let blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer.buffer)
158 | let blockBufferDataLength = CMBlockBufferGetDataLength(blockBuffer!)
159 |
160 | var blockBufferData = [UInt8](repeating: 0, count: blockBufferDataLength)
161 | let status = CMBlockBufferCopyDataBytes(blockBuffer!, atOffset: 0, dataLength: blockBufferDataLength, destination: &blockBufferData)
162 | guard status == noErr else { return nil }
163 | let data = Data(bytes: blockBufferData, count: blockBufferDataLength)
164 | return data
165 | }
166 |
167 | func createAudioDescription(sampleRate: Float64) -> UnsafeMutablePointer {
168 | let descRef = UnsafeMutablePointer.allocate(capacity: 1)
169 |
170 | descRef.pointee.mSampleRate = sampleRate
171 | descRef.pointee.mFormatID = kAudioFormatLinearPCM
172 | descRef.pointee.mFormatFlags = (kAudioFormatFlagIsFloat | kAudioFormatFlagsNativeEndian | kLinearPCMFormatFlagIsPacked)
173 | descRef.pointee.mBitsPerChannel = 32
174 | descRef.pointee.mChannelsPerFrame = 1
175 | descRef.pointee.mBytesPerFrame = descRef.pointee.mChannelsPerFrame * descRef.pointee.mBitsPerChannel >> 3
176 | descRef.pointee.mFramesPerPacket = 1
177 | descRef.pointee.mBytesPerPacket = descRef.pointee.mFramesPerPacket * descRef.pointee.mBytesPerFrame
178 | descRef.pointee.mReserved = 0
179 |
180 | return descRef
181 | }
182 |
183 | // func playTestAudio() {
184 | // var audioDesc = AudioComponentDescription()
185 | // audioDesc.componentType = kAudioUnitType_Output
186 | // audioDesc.componentSubType = kAudioUnitSubType_VoiceProcessingIO
187 | // audioDesc.componentManufacturer = kAudioUnitManufacturer_Apple
188 | // audioDesc.componentFlags = 0
189 | // audioDesc.componentFlagsMask = 0
190 | //
191 | // guard let inputComponent = AudioComponentFindNext(nil, &audioDesc) else {
192 | // debugPrint("AudioInputComponent is not found. Check AudioComponentDescription")
193 | // return
194 | // }
195 | // AudioComponentInstanceNew(inputComponent, &audioUnit)
196 | //
197 | // let bufferListRef = UnsafeMutablePointer.allocate(capacity: 1)
198 | // bufferListRef.pointee.mNumberBuffers = 1
199 | //
200 | // let kOutputBus: UInt32 = 0
201 | // let kInputBus: UInt32 = 1
202 | // var flag = 1
203 | //
204 | // var status = AudioUnitSetProperty(audioUnit!, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, UInt32( MemoryLayout.size))
205 | //
206 | // print(status)
207 | //
208 | // status = AudioUnitSetProperty(audioUnit!, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, UInt32( MemoryLayout.size))
209 | //
210 | // print(status)
211 | //
212 | // let channel = 1
213 | //
214 | // var format = AudioStreamBasicDescription(
215 | // mSampleRate : Double(44100),
216 | // mFormatID : kAudioFormatLinearPCM,
217 | // mFormatFlags : kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked,
218 | // mBytesPerPacket : UInt32( channel * 2 ), // 16bit
219 | // mFramesPerPacket : 1,
220 | // mBytesPerFrame : UInt32( channel * 2),
221 | // mChannelsPerFrame : UInt32( channel ),
222 | // mBitsPerChannel : UInt32( 8 * 2),
223 | // mReserved: UInt32(0))
224 | //
225 | // status = AudioUnitSetProperty(audioUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &format, UInt32( MemoryLayout.size))
226 | //
227 | // print(status)
228 | //
229 | // status = AudioUnitSetProperty(audioUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &format, UInt32( MemoryLayout.size))
230 | //
231 | // print(status)
232 | //
233 | // var inputCallback = AURenderCallbackStruct()
234 | // inputCallback.inputProc = { (inRefCon, flags, timestamp, inBusNumber, inNumberFrames, ioData) -> OSStatus in
235 | // return 0
236 | // }
237 | // inputCallback.inputProcRefCon = Unmanaged.passUnretained(self).toOpaque()
238 | //
239 | // status = AudioUnitSetProperty(audioUnit!, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &inputCallback, UInt32(MemoryLayout.size))
240 | //
241 | // print(status)
242 | //
243 | // var renderCallback = AURenderCallbackStruct()
244 | // renderCallback.inputProc = { (inRefCon, flags, timestamp, inBusNumber, inNumberFrames, ioData) -> OSStatus in
245 | // print("Output Calback!!!")
246 | // var status = AudioUnitRender(audioUnit!, flags, timestamp, inBusNumber, inNumberFrames, ioData!)
247 | // print(status)
248 | // return status
249 | // }
250 | // renderCallback.inputProcRefCon = Unmanaged.passUnretained(self).toOpaque()
251 | //
252 | // status = AudioUnitSetProperty(audioUnit!, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &renderCallback, UInt32(MemoryLayout.size))
253 | //
254 | // print(status)
255 | //
256 | // status = AudioUnitInitialize(audioUnit!)
257 | // print(status)
258 | //
259 | // AudioOutputUnitStart(audioUnit!)
260 | // }
261 |
262 | // func setupAudio() {
263 | // // 1
264 | // let bundleURL = Bundle.main.resourceURL!
265 | // let movieURL = URL(string: "bunny.mp4", relativeTo: bundleURL)!
266 | //
267 | // audioFileURL = movieURL
268 | //
269 | // // 2
270 | // engine.attach(player)
271 | // engine.connect(player, to: engine.mainMixerNode, format: audioFormat)
272 | // engine.prepare()
273 | //
274 | // do {
275 | // // 3
276 | // try engine.start()
277 | // } catch let error {
278 | // print(error.localizedDescription)
279 | // }
280 | //
281 | // guard let audioFile = audioFile else { return }
282 | //
283 | // // skipFrame = 0
284 | // player.scheduleFile(audioFile, at: nil) { [weak self] in
285 | // // self?.needsFileScheduled = true
286 | //
287 | // }
288 | //
289 | // self.player.play()
290 | //
291 | // }
292 | }
293 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/operations/AlphaBlend.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AlphaBlend.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/16.
6 | //
7 |
8 | import UIKit
9 |
10 | public let standardImageVertices: [Float] = [-1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0]
11 | public let standardTextureCoordinate: [Float] = [0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]
12 |
13 | public class AlphaBlend: TwoTextureOperationChain {
14 | public var targets = TargetContainer()
15 |
16 | private var pipelineState: MTLRenderPipelineState!
17 | private let textureInputSemaphore = DispatchSemaphore(value:1)
18 |
19 | private var textureBuffer1: MTLBuffer?
20 | private var textureBuffer2: MTLBuffer?
21 |
22 | public var alphaValue: Float = 0.5
23 |
24 | public init() {
25 | setup()
26 | }
27 |
28 | private func setup() {
29 | setupPiplineState()
30 | }
31 |
32 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm) {
33 | do {
34 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("two_vertex_render_target", "alphaBlendFragment", colorPixelFormat)
35 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
36 | } catch {
37 | debugPrint(error)
38 | }
39 | }
40 |
41 | private func generateTextureBuffer(_ width: Int, _ height: Int, _ targetWidth: Int, _ targetHeight: Int) -> MTLBuffer? {
42 | let targetRatio = Float(targetWidth)/Float(targetHeight)
43 | let curRatio = Float(width)/Float(height)
44 |
45 | let coordinates: [Float]
46 |
47 | if targetRatio > curRatio {
48 | let remainHeight = (Float(height) - Float(width) * targetRatio)/2.0
49 | let remainRatio = remainHeight/Float(height)
50 | coordinates = [0.0, remainRatio, 1.0, remainRatio, 0.0, 1.0 - remainRatio, 1.0, 1.0 - remainRatio]
51 | } else {
52 | let remainWidth = (Float(width) - Float(height) * targetRatio)/2.0
53 | let remainRatio = remainWidth/Float(width)
54 | coordinates = [remainRatio, 0.0, 1.0 - remainRatio, 0.0, remainRatio, 1.0, 1.0 - remainRatio, 1.0]
55 | }
56 |
57 | let textureBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: coordinates,
58 | length: coordinates.count * MemoryLayout.size,
59 | options: [])!
60 | return textureBuffer
61 | }
62 |
63 | public func newTextureAvailable(_ source1: Texture, _ source2: Texture, completion: @escaping ((_ texture: Texture) -> Void)) {
64 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
65 | defer {
66 | textureInputSemaphore.signal()
67 | }
68 |
69 | let minX = min(source1.texture.width, source2.texture.width)
70 | let minY = min(source1.texture.height, source2.texture.height)
71 |
72 | if textureBuffer1 == nil {
73 | textureBuffer1 = generateTextureBuffer(source1.texture.width, source1.texture.height, minX, minY)
74 | }
75 | if textureBuffer2 == nil {
76 | textureBuffer2 = generateTextureBuffer(source2.texture.width, source2.texture.height, minX, minY)
77 | }
78 |
79 | let outputTexture = Texture(minX, minY, timestamp: source1.timestamp, textureKey: source1.textureKey)
80 |
81 | let renderPassDescriptor = MTLRenderPassDescriptor()
82 | let attachment = renderPassDescriptor.colorAttachments[0]
83 | attachment?.clearColor = MTLClearColorMake(1, 0, 0, 1)
84 | attachment?.texture = outputTexture.texture
85 | attachment?.loadAction = .clear
86 | attachment?.storeAction = .store
87 |
88 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
89 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
90 |
91 | commandEncoder?.setFrontFacing(.counterClockwise)
92 | commandEncoder?.setRenderPipelineState(pipelineState)
93 |
94 | let vertexBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: standardImageVertices,
95 | length: standardImageVertices.count * MemoryLayout.size,
96 | options: [])!
97 | vertexBuffer.label = "Vertices"
98 | commandEncoder?.setVertexBuffer(vertexBuffer, offset: 0, index: 0)
99 | commandEncoder?.setVertexBuffer(textureBuffer1, offset: 0, index: 1)
100 | commandEncoder?.setVertexBuffer(textureBuffer2, offset: 0, index: 2)
101 |
102 | commandEncoder?.setFragmentTexture(source1.texture, index: 0)
103 | commandEncoder?.setFragmentTexture(source2.texture, index: 1)
104 | let uniformBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: [alphaValue],
105 | length: 1 * MemoryLayout.size,
106 | options: [])!
107 | commandEncoder?.setFragmentBuffer(uniformBuffer, offset: 0, index: 1)
108 |
109 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
110 | commandEncoder?.endEncoding()
111 | commandBuffer?.commit()
112 |
113 | textureInputSemaphore.signal()
114 | completion(outputTexture)
115 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
116 | }
117 |
118 | public func newTextureAvailable(_ texture: Texture) {
119 | fatalError("Should be use newTextureAvailable(_ base: Texture, overlay: Texture, completion: @escaping (() -> Void)) func")
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/operations/Gray.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Rotation.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/06.
6 | //
7 |
8 | import UIKit
9 |
10 | public class Gray: OperationChain {
11 | public let targets = TargetContainer()
12 |
13 | private var pipelineState: MTLRenderPipelineState!
14 | private var render_target_vertex: MTLBuffer!
15 | private var render_target_uniform: MTLBuffer!
16 |
17 | private let textureInputSemaphore = DispatchSemaphore(value:1)
18 |
19 | public init() {
20 | setup()
21 | }
22 |
23 | private func setup() {
24 | setupPiplineState()
25 | }
26 |
27 | private func loadRenderTargetVertex(_ baseTextureSize: CGSize) {
28 | render_target_vertex = sharedMetalRenderingDevice.makeRenderVertexBuffer(size: baseTextureSize)
29 | render_target_uniform = sharedMetalRenderingDevice.makeRenderUniformBuffer(baseTextureSize)
30 | }
31 |
32 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm) {
33 | do {
34 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("vertex_render_target", "gray_fragment_render_target", colorPixelFormat)
35 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
36 | } catch {
37 | debugPrint(error)
38 | }
39 | }
40 |
41 | public func newTextureAvailable(_ texture: Texture) {
42 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
43 | defer {
44 | textureInputSemaphore.signal()
45 | }
46 |
47 | if render_target_vertex == nil {
48 | let baseTextureSize = CGSize(width: texture.texture.width, height: texture.texture.height)
49 | loadRenderTargetVertex(baseTextureSize)
50 | }
51 |
52 | let outputTexture = Texture(Int(texture.texture.width), Int(texture.texture.height), timestamp: texture.timestamp, textureKey: texture.textureKey)
53 |
54 | let renderPassDescriptor = MTLRenderPassDescriptor()
55 | let attachment = renderPassDescriptor.colorAttachments[0]
56 | attachment?.clearColor = MTLClearColorMake(1, 0, 0, 1)
57 | attachment?.texture = outputTexture.texture
58 | attachment?.loadAction = .clear
59 | attachment?.storeAction = .store
60 |
61 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
62 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
63 |
64 | commandEncoder?.setRenderPipelineState(pipelineState)
65 |
66 | commandEncoder?.setVertexBuffer(render_target_vertex, offset: 0, index: 0)
67 | commandEncoder?.setVertexBuffer(render_target_uniform, offset: 0, index: 1)
68 | commandEncoder?.setFragmentTexture(texture.texture, index: 0)
69 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
70 |
71 | commandEncoder?.endEncoding()
72 | commandBuffer?.commit()
73 |
74 | textureInputSemaphore.signal()
75 | operationFinished(outputTexture)
76 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
77 | }
78 | }
79 |
80 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/operations/ImageCompositor.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Compositor.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/06.
6 | //
7 |
8 | import Foundation
9 | import MetalKit
10 |
11 | public class ImageCompositor: OperationChain {
12 | public let targets = TargetContainer()
13 |
14 | public var sourceTextureKey: String = ""
15 | public var sourceFrame: CGRect?
16 |
17 | private let baseTextureKey: String
18 | private var sourceTexture: MTLTexture?
19 |
20 | private var pipelineState: MTLRenderPipelineState!
21 | private var render_target_vertex: MTLBuffer!
22 | private var render_target_uniform: MTLBuffer!
23 |
24 | private let textureInputSemaphore = DispatchSemaphore(value:1)
25 |
26 | public init(baseTextureKey: String) {
27 | self.baseTextureKey = baseTextureKey
28 | setup()
29 | }
30 |
31 | private func setup() {
32 | setupPiplineState()
33 | }
34 |
35 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm) {
36 | do {
37 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("vertex_render_target", "fragment_render_target", colorPixelFormat)
38 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
39 | } catch {
40 | debugPrint(error)
41 | }
42 | }
43 |
44 | public func addCompositeImage(_ image: UIImage) {
45 | sourceTexture = image.loadTexture(device: sharedMetalRenderingDevice.device)
46 | }
47 |
48 | public func newTextureAvailable(_ texture: Texture) {
49 | if texture.textureKey == self.baseTextureKey {
50 | baseTextureAvailable(texture)
51 | } else if texture.textureKey == self.sourceTextureKey {
52 | sourceTexture = texture.texture
53 | }
54 | }
55 |
56 | private func loadRenderTargetVertex(_ baseTextureSize: CGSize) {
57 | guard let sourceFrame = sourceFrame else { return }
58 | render_target_vertex = sharedMetalRenderingDevice.makeRenderVertexBuffer(sourceFrame.origin, size: sourceFrame.size)
59 | render_target_uniform = sharedMetalRenderingDevice.makeRenderUniformBuffer(baseTextureSize)
60 | }
61 |
62 | private func baseTextureAvailable(_ texture: Texture) {
63 | guard let soruceTexture = sourceTexture else {
64 | // Bypass received texture if there is no source texture.
65 | operationFinished(texture)
66 | return
67 | }
68 |
69 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
70 | defer {
71 | textureInputSemaphore.signal()
72 | }
73 |
74 | if render_target_vertex == nil {
75 | let baseTextureSize = CGSize(width: texture.texture.width, height: texture.texture.height)
76 | loadRenderTargetVertex(baseTextureSize)
77 | }
78 |
79 | let renderPassDescriptor = MTLRenderPassDescriptor()
80 | let attachment = renderPassDescriptor.colorAttachments[0]
81 | attachment?.texture = texture.texture
82 | attachment?.loadAction = .load
83 | attachment?.storeAction = .store
84 |
85 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
86 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
87 |
88 | commandEncoder?.setRenderPipelineState(pipelineState)
89 |
90 | commandEncoder?.setVertexBuffer(render_target_vertex, offset: 0, index: 0)
91 | commandEncoder?.setVertexBuffer(render_target_uniform, offset: 0, index: 1)
92 | commandEncoder?.setFragmentTexture(soruceTexture, index: 0)
93 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
94 |
95 | commandEncoder?.endEncoding()
96 | commandBuffer?.commit()
97 |
98 | textureInputSemaphore.signal()
99 | operationFinished(texture)
100 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
101 |
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/operations/Kernel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MPSKernel.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/07/21.
6 | //
7 |
8 | import Foundation
9 | import MetalPerformanceShaders
10 |
11 | public class Kernel: OperationChain {
12 | public let targets = TargetContainer()
13 | private let textureInputSemaphore = DispatchSemaphore(value:1)
14 | private let kernel: MPSUnaryImageKernel
15 |
16 | public init(_ kernel: MPSUnaryImageKernel) {
17 | self.kernel = kernel
18 | }
19 |
20 | public func newTextureAvailable(_ texture: Texture) {
21 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
22 | defer {
23 | textureInputSemaphore.signal()
24 | }
25 |
26 | let outputTexture = Texture(Int(texture.texture.width), Int(texture.texture.height), timestamp: texture.timestamp, textureKey: texture.textureKey)
27 |
28 | if let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer() {
29 | kernel.encode(commandBuffer: commandBuffer, sourceTexture: texture.texture, destinationTexture: outputTexture.texture)
30 | commandBuffer.commit()
31 | commandBuffer.waitUntilCompleted()
32 | }
33 |
34 | textureInputSemaphore.signal()
35 | operationFinished(outputTexture)
36 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
37 | }
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/operations/Lookup.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Lookup.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/21.
6 | //
7 |
8 | import Foundation
9 | import MetalKit
10 |
11 | public class Lookup: OperationChain {
12 | public let targets = TargetContainer()
13 |
14 | private var pipelineState: MTLRenderPipelineState!
15 | private var render_target_vertex: MTLBuffer!
16 | private var render_target_uniform: MTLBuffer!
17 | private let textureInputSemaphore = DispatchSemaphore(value:1)
18 |
19 | private var lookupTexture: MTLTexture?
20 | private var textureCoordinate: MTLBuffer?
21 |
22 | public var intensity: Float = 0.5
23 |
24 | public init(_ lookupImage: CGImage) {
25 | setup()
26 | loadLookupTexture(lookupImage)
27 | }
28 |
29 | private func setup() {
30 | setupPiplineState()
31 |
32 | textureCoordinate = sharedMetalRenderingDevice.device.makeBuffer(bytes: standardTextureCoordinate,
33 | length: standardTextureCoordinate.count * MemoryLayout.size,
34 | options: [])!
35 | }
36 |
37 | private func loadLookupTexture(_ lookupImage: CGImage) {
38 | let loader = MTKTextureLoader(device: sharedMetalRenderingDevice.device)
39 | loader.newTexture(cgImage: lookupImage, options: [MTKTextureLoader.Option.SRGB: false]) { (texture, error) in
40 | if let error = error {
41 | debugPrint(error)
42 | } else {
43 | self.lookupTexture = texture
44 | }
45 | }
46 | }
47 |
48 | private func loadRenderTargetVertex(_ baseTextureSize: CGSize) {
49 | render_target_vertex = sharedMetalRenderingDevice.makeRenderVertexBuffer(size: baseTextureSize)
50 | render_target_uniform = sharedMetalRenderingDevice.makeRenderUniformBuffer(baseTextureSize)
51 | }
52 |
53 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm) {
54 | do {
55 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("two_vertex_render_target", "lookupFragment", colorPixelFormat)
56 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
57 | } catch {
58 | debugPrint(error)
59 | }
60 | }
61 |
62 | public func newTextureAvailable(_ texture: Texture) {
63 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
64 | defer {
65 | textureInputSemaphore.signal()
66 | }
67 |
68 | if render_target_vertex == nil {
69 | let baseTextureSize = CGSize(width: texture.texture.width, height: texture.texture.height)
70 | loadRenderTargetVertex(baseTextureSize)
71 | }
72 |
73 | let outputTexture = Texture(Int(texture.texture.width), Int(texture.texture.height), timestamp: texture.timestamp, textureKey: texture.textureKey)
74 |
75 | let renderPassDescriptor = MTLRenderPassDescriptor()
76 | let attachment = renderPassDescriptor.colorAttachments[0]
77 | attachment?.clearColor = MTLClearColorMake(1, 0, 0, 1)
78 | attachment?.texture = outputTexture.texture
79 | attachment?.loadAction = .clear
80 | attachment?.storeAction = .store
81 |
82 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
83 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
84 |
85 | commandEncoder?.setRenderPipelineState(pipelineState)
86 |
87 | let vertexBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: standardImageVertices,
88 | length: standardImageVertices.count * MemoryLayout.size,
89 | options: [])!
90 | vertexBuffer.label = "Vertices"
91 | commandEncoder?.setVertexBuffer(vertexBuffer, offset: 0, index: 0)
92 | commandEncoder?.setVertexBuffer(textureCoordinate, offset: 0, index: 1)
93 | commandEncoder?.setVertexBuffer(textureCoordinate, offset: 0, index: 2)
94 |
95 | commandEncoder?.setFragmentTexture(texture.texture, index: 0)
96 | commandEncoder?.setFragmentTexture(lookupTexture, index: 1)
97 |
98 |
99 | let uniformBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: [intensity],
100 | length: 1 * MemoryLayout.size,
101 | options: [])!
102 | commandEncoder?.setFragmentBuffer(uniformBuffer, offset: 0, index: 1)
103 |
104 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
105 |
106 | commandEncoder?.endEncoding()
107 | commandBuffer?.commit()
108 |
109 | textureInputSemaphore.signal()
110 | operationFinished(outputTexture)
111 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/operations/Mask.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Mask.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/29.
6 | //
7 |
8 | import UIKit
9 |
10 | public class Mask: TwoTextureOperationChain {
11 | public var targets = TargetContainer()
12 |
13 | private var pipelineState: MTLRenderPipelineState!
14 | private let textureInputSemaphore = DispatchSemaphore(value:1)
15 |
16 | private var textureBuffer1: MTLBuffer?
17 | private var textureBuffer2: MTLBuffer?
18 |
19 | public init() {
20 | setup()
21 | }
22 |
23 | private func setup() {
24 | setupPiplineState()
25 | }
26 |
27 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm) {
28 | do {
29 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("two_vertex_render_target", "maskFragment", colorPixelFormat)
30 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
31 | } catch {
32 | debugPrint(error)
33 | }
34 | }
35 |
36 | private func generateTextureBuffer(_ width: Int, _ height: Int, _ targetWidth: Int, _ targetHeight: Int) -> MTLBuffer? {
37 | let targetRatio = Float(targetWidth)/Float(targetHeight)
38 | let curRatio = Float(width)/Float(height)
39 |
40 | let coordinates: [Float]
41 |
42 | if targetRatio > curRatio {
43 | let remainHeight = (Float(height) - Float(width) * targetRatio)/2.0
44 | let remainRatio = remainHeight/Float(height)
45 | coordinates = [0.0, remainRatio, 1.0, remainRatio, 0.0, 1.0 - remainRatio, 1.0, 1.0 - remainRatio]
46 | } else {
47 | let remainWidth = (Float(width) - Float(height) * targetRatio)/2.0
48 | let remainRatio = remainWidth/Float(width)
49 | coordinates = [remainRatio, 0.0, 1.0 - remainRatio, 0.0, remainRatio, 1.0, 1.0 - remainRatio, 1.0]
50 | }
51 |
52 | let textureBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: coordinates,
53 | length: coordinates.count * MemoryLayout.size,
54 | options: [])!
55 | return textureBuffer
56 | }
57 |
58 | public func newTextureAvailable(_ source1: Texture, _ source2: Texture, completion: @escaping ((Texture) -> Void)) {
59 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
60 | defer {
61 | textureInputSemaphore.signal()
62 | }
63 |
64 | let minX = min(source1.texture.width, source2.texture.width)
65 | let minY = min(source1.texture.height, source2.texture.height)
66 |
67 | if textureBuffer1 == nil {
68 | textureBuffer1 = generateTextureBuffer(source1.texture.width, source1.texture.height, minX, minY)
69 | }
70 | if textureBuffer2 == nil {
71 | textureBuffer2 = generateTextureBuffer(source2.texture.width, source2.texture.height, minX, minY)
72 | }
73 |
74 | let outputTexture = Texture(minX, minY, timestamp: source1.timestamp, textureKey: source1.textureKey)
75 |
76 | let renderPassDescriptor = MTLRenderPassDescriptor()
77 | let attachment = renderPassDescriptor.colorAttachments[0]
78 | attachment?.clearColor = MTLClearColorMake(1, 0, 0, 1)
79 | attachment?.texture = outputTexture.texture
80 | attachment?.loadAction = .clear
81 | attachment?.storeAction = .store
82 |
83 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
84 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
85 |
86 | commandEncoder?.setFrontFacing(.counterClockwise)
87 | commandEncoder?.setRenderPipelineState(pipelineState)
88 |
89 | let vertexBuffer = sharedMetalRenderingDevice.device.makeBuffer(bytes: standardImageVertices,
90 | length: standardImageVertices.count * MemoryLayout.size,
91 | options: [])!
92 | vertexBuffer.label = "Vertices"
93 | commandEncoder?.setVertexBuffer(vertexBuffer, offset: 0, index: 0)
94 | commandEncoder?.setVertexBuffer(textureBuffer1, offset: 0, index: 1)
95 | commandEncoder?.setVertexBuffer(textureBuffer2, offset: 0, index: 2)
96 |
97 | commandEncoder?.setFragmentTexture(source1.texture, index: 0)
98 | commandEncoder?.setFragmentTexture(source2.texture, index: 1)
99 |
100 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
101 | commandEncoder?.endEncoding()
102 | commandBuffer?.commit()
103 |
104 | textureInputSemaphore.signal()
105 | completion(outputTexture)
106 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
107 | }
108 |
109 | public func newTextureAvailable(_ texture: Texture) {
110 | fatalError("Should be use newTextureAvailable(_ base: Texture, overlay: Texture, completion: @escaping (() -> Void)) func")
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/operations/MetalKernel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MPSKernel.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/07/21.
6 | //
7 |
8 | import UIKit
9 | import MetalPerformanceShaders
10 |
11 | public class MetalKernel: OperationChain {
12 | public let targets = TargetContainer()
13 | private let textureInputSemaphore = DispatchSemaphore(value:1)
14 | private let kernel: MPSUnaryImageKernel
15 |
16 | public init(_ kernel: MPSUnaryImageKernel) {
17 | self.kernel = kernel
18 | }
19 |
20 | public func newTextureAvailable(_ texture: Texture) {
21 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
22 | defer {
23 | textureInputSemaphore.signal()
24 | }
25 |
26 | let outputTexture = Texture(Int(texture.texture.width), Int(texture.texture.height), timestamp: texture.timestamp, textureKey: texture.textureKey)
27 |
28 | if let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer() {
29 | kernel.encode(commandBuffer: commandBuffer, sourceTexture: texture.texture, destinationTexture: outputTexture.texture)
30 | commandBuffer.commit()
31 | commandBuffer.waitUntilCompleted()
32 | }
33 |
34 | textureInputSemaphore.signal()
35 | operationFinished(outputTexture)
36 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
37 | }
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/operations/Rotation.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Rotation.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/06.
6 | //
7 |
8 | import UIKit
9 |
10 | public enum Rotation {
11 | case degree90
12 | case degree90_flip
13 | case degree180
14 | case degree270
15 |
16 | func generateVertices(_ size: CGSize) -> [Vertex] {
17 | let vertices: [Vertex]
18 | let w = size.width
19 | let h = size.height
20 |
21 | switch self {
22 | case .degree90:
23 | vertices = [
24 | Vertex(position: CGPoint(x: 0 , y: 0), textCoord: CGPoint(x: 0, y: 1)),
25 | Vertex(position: CGPoint(x: w , y: 0), textCoord: CGPoint(x: 0, y: 0)),
26 | Vertex(position: CGPoint(x: 0 , y: h), textCoord: CGPoint(x: 1, y: 1)),
27 | Vertex(position: CGPoint(x: w , y: h), textCoord: CGPoint(x: 1, y: 0)),
28 | ]
29 | case .degree90_flip:
30 | vertices = [
31 | Vertex(position: CGPoint(x: 0 , y: 0), textCoord: CGPoint(x: 0, y: 0)),
32 | Vertex(position: CGPoint(x: w , y: 0), textCoord: CGPoint(x: 0, y: 1)),
33 | Vertex(position: CGPoint(x: 0 , y: h), textCoord: CGPoint(x: 1, y: 0)),
34 | Vertex(position: CGPoint(x: w , y: h), textCoord: CGPoint(x: 1, y: 1)),
35 | ]
36 | case .degree180:
37 | vertices = [
38 | Vertex(position: CGPoint(x: 0 , y: 0), textCoord: CGPoint(x: 1, y: 1)),
39 | Vertex(position: CGPoint(x: w , y: 0), textCoord: CGPoint(x: 0, y: 1)),
40 | Vertex(position: CGPoint(x: 0 , y: h), textCoord: CGPoint(x: 1, y: 0)),
41 | Vertex(position: CGPoint(x: w , y: h), textCoord: CGPoint(x: 0, y: 0)),
42 | ]
43 | case .degree270:
44 | vertices = [
45 | Vertex(position: CGPoint(x: 0 , y: 0), textCoord: CGPoint(x: 1, y: 0)),
46 | Vertex(position: CGPoint(x: w , y: 0), textCoord: CGPoint(x: 1, y: 1)),
47 | Vertex(position: CGPoint(x: 0 , y: h), textCoord: CGPoint(x: 0, y: 0)),
48 | Vertex(position: CGPoint(x: w , y: h), textCoord: CGPoint(x: 0, y: 1)),
49 | ]
50 | }
51 |
52 | return vertices
53 | }
54 | }
55 |
56 | public class RotationOperation: OperationChain {
57 | public let targets = TargetContainer()
58 |
59 | private let rotation: Rotation
60 | private let size: CGSize
61 |
62 | private var pipelineState: MTLRenderPipelineState!
63 | private var render_target_vertex: MTLBuffer!
64 | private var render_target_uniform: MTLBuffer!
65 |
66 | private let textureInputSemaphore = DispatchSemaphore(value:1)
67 |
68 | public init(_ rotation: Rotation, _ size: CGSize = CGSize(width: 720, height: 1280)) {
69 | self.rotation = rotation
70 | self.size = size
71 | setup()
72 | }
73 |
74 | private func setup() {
75 | setupTargetUniforms()
76 | setupPiplineState()
77 | }
78 |
79 | private func setupTargetUniforms() {
80 | render_target_vertex = sharedMetalRenderingDevice.makeRenderVertexBuffer(rotation.generateVertices(size))
81 | render_target_uniform = sharedMetalRenderingDevice.makeRenderUniformBuffer(size)
82 | }
83 |
84 | // FIXME: Need to refactoring this. There are a lot of same functions in library.
85 | private func setupPiplineState(_ colorPixelFormat: MTLPixelFormat = .bgra8Unorm) {
86 | do {
87 | let rpd = try sharedMetalRenderingDevice.generateRenderPipelineDescriptor("vertex_render_target", "fragment_render_target", colorPixelFormat)
88 | pipelineState = try sharedMetalRenderingDevice.device.makeRenderPipelineState(descriptor: rpd)
89 | } catch {
90 | debugPrint(error)
91 | }
92 | }
93 |
94 | public func newTextureAvailable(_ texture: Texture) {
95 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
96 | defer {
97 | textureInputSemaphore.signal()
98 | }
99 |
100 | let outputTexture = Texture(Int(size.width), Int(size.height), timestamp: texture.timestamp, textureKey: texture.textureKey)
101 |
102 | let renderPassDescriptor = MTLRenderPassDescriptor()
103 | let attachment = renderPassDescriptor.colorAttachments[0]
104 | attachment?.clearColor = MTLClearColorMake(1, 0, 0, 1)
105 | attachment?.texture = outputTexture.texture
106 | attachment?.loadAction = .clear
107 | attachment?.storeAction = .store
108 |
109 | let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer()
110 | let commandEncoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
111 |
112 | commandEncoder?.setRenderPipelineState(pipelineState)
113 |
114 | commandEncoder?.setVertexBuffer(render_target_vertex, offset: 0, index: 0)
115 | commandEncoder?.setVertexBuffer(render_target_uniform, offset: 0, index: 1)
116 | commandEncoder?.setFragmentTexture(texture.texture, index: 0)
117 | commandEncoder?.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
118 |
119 | commandEncoder?.endEncoding()
120 | commandBuffer?.commit()
121 |
122 | textureInputSemaphore.signal()
123 | operationFinished(outputTexture)
124 | let _ = textureInputSemaphore.wait(timeout:DispatchTime.distantFuture)
125 | }
126 | }
127 |
128 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/shader/Shaders.metal:
--------------------------------------------------------------------------------
1 | #include
2 | using namespace metal;
3 |
4 | struct Vertex {
5 | float4 position [[position]];
6 | float2 text_coord;
7 | };
8 |
9 | struct TwoInputVertex
10 | {
11 | float4 position [[position]];
12 | float2 textureCoordinate [[user(texturecoord)]];
13 | float2 textureCoordinate2 [[user(texturecoord2)]];
14 | };
15 |
16 | struct Uniforms {
17 | float4x4 scaleMatrix;
18 | };
19 |
20 | vertex Vertex vertex_render_target(constant Vertex *vertexes [[ buffer(0) ]],
21 | constant Uniforms &uniforms [[ buffer(1) ]],
22 | uint vid [[vertex_id]])
23 | {
24 | Vertex out = vertexes[vid];
25 | out.position = uniforms.scaleMatrix * out.position;// * in.position;
26 | return out;
27 | };
28 |
29 | vertex TwoInputVertex two_vertex_render_target(const device packed_float2 *position [[buffer(0)]],
30 | const device packed_float2 *texturecoord [[buffer(1)]],
31 | const device packed_float2 *texturecoord2 [[buffer(2)]],
32 | uint vid [[vertex_id]]) {
33 | TwoInputVertex outputVertices;
34 | outputVertices.position = float4(position[vid], 0, 1.0);
35 | outputVertices.textureCoordinate = texturecoord[vid];
36 | outputVertices.textureCoordinate2 = texturecoord2[vid];
37 | return outputVertices;
38 | };
39 |
40 |
41 | fragment float4 fragment_render_target(Vertex vertex_data [[ stage_in ]],
42 | texture2d tex2d [[ texture(0) ]])
43 | {
44 | constexpr sampler textureSampler(mag_filter::linear, min_filter::linear);
45 | float4 color = float4(tex2d.sample(textureSampler, vertex_data.text_coord));
46 | return color;
47 | };
48 |
49 | fragment float4 gray_fragment_render_target(Vertex vertex_data [[ stage_in ]],
50 | texture2d tex2d [[ texture(0) ]])
51 | {
52 | constexpr sampler textureSampler(mag_filter::linear, min_filter::linear);
53 | float4 color = float4(tex2d.sample(textureSampler, vertex_data.text_coord));
54 | float gray = (color[0] + color[1] + color[2])/3;
55 | return float4(gray, gray, gray, 1.0);
56 | };
57 |
58 | typedef struct
59 | {
60 | float mixturePercent;
61 | } AlphaBlendUniform;
62 |
63 | fragment half4 alphaBlendFragment(TwoInputVertex fragmentInput [[stage_in]],
64 | texture2d inputTexture [[texture(0)]],
65 | texture2d inputTexture2 [[texture(1)]],
66 | constant AlphaBlendUniform& uniform [[ buffer(1) ]])
67 | {
68 | constexpr sampler quadSampler;
69 | half4 textureColor = inputTexture.sample(quadSampler, fragmentInput.textureCoordinate);
70 | constexpr sampler quadSampler2;
71 | half4 textureColor2 = inputTexture2.sample(quadSampler, fragmentInput.textureCoordinate2);
72 |
73 | return half4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * half(uniform.mixturePercent)), textureColor.a);
74 | }
75 |
76 | fragment half4 maskFragment(TwoInputVertex fragmentInput [[stage_in]],
77 | texture2d inputTexture [[texture(0)]],
78 | texture2d inputTexture2 [[texture(1)]])
79 | {
80 | constexpr sampler quadSampler;
81 | half4 textureColor = inputTexture.sample(quadSampler, fragmentInput.textureCoordinate);
82 | constexpr sampler quadSampler2;
83 | half4 textureColor2 = inputTexture2.sample(quadSampler, fragmentInput.textureCoordinate2);
84 |
85 | if(textureColor2.r + textureColor2.g + textureColor2.b > 0) {
86 | return textureColor;
87 | } else {
88 | return half4(0, 0, 0 ,0);
89 | }
90 | }
91 |
92 | typedef struct
93 | {
94 | int32_t classNum;
95 | } SegmentationValue;
96 |
97 | typedef struct
98 | {
99 | int32_t targetClass;
100 | int32_t width;
101 | int32_t height;
102 | } SegmentationUniform;
103 |
104 | fragment float4 segmentation_render_target(Vertex vertex_data [[ stage_in ]],
105 | constant SegmentationValue *segmentation [[ buffer(0) ]],
106 | constant SegmentationUniform& uniform [[ buffer(1) ]])
107 |
108 | {
109 | int index = int(vertex_data.position.x) + int(vertex_data.position.y) * uniform.width;
110 | if(segmentation[index].classNum == uniform.targetClass) {
111 | return float4(1.0, 0, 0, 1.0);
112 | }
113 |
114 | return float4(0,0,0,1.0);
115 | };
116 |
117 | typedef struct
118 | {
119 | float value;
120 | } PoseValue;
121 |
122 | typedef struct
123 | {
124 | int32_t classNum;
125 | int32_t width;
126 | int32_t height;
127 | } PoseUniform;
128 |
129 | fragment half4 posenet_render_target(Vertex vertex_data [[ stage_in ]],
130 | texture2d inputTexture [[texture(0)]],
131 | constant PoseValue *pose [[ buffer(0) ]],
132 | constant PoseUniform& uniform [[ buffer(1) ]])
133 |
134 | {
135 | // int index = int(vertex_data.position.x) + int(vertex_data.position.y) * uniform.width;
136 | // if(segmentation[index].classNum == uniform.targetClass) {
137 | // return float4(1.0, 0, 0, 1.0);
138 | // }
139 |
140 | constexpr sampler quadSampler;
141 | half4 base = inputTexture.sample(quadSampler, vertex_data.text_coord);
142 |
143 | if(vertex_data.position.x < uniform.width * 4 &&
144 | vertex_data.position.y < uniform.height * 4) {
145 |
146 | if(int(vertex_data.position.x) % 4 == 0 &&
147 | int(vertex_data.position.y) % 4 == 0) {
148 |
149 | int col = int(vertex_data.position.x) / 4;
150 | int row = int(vertex_data.position.y) / 4;
151 |
152 | for(int i = 0 ; i < uniform.classNum; i++) {
153 | int index = uniform.width * uniform.height * i + row * uniform.width + col;
154 | if(pose[index].value > 0.9) {
155 | return half4(1.0, 0.0, 0.0, 1.0);
156 | }
157 | }
158 |
159 | return half4(0.0, 0.0, 1.0, 1.0);
160 | }
161 | }
162 |
163 | return base;
164 | };
165 |
166 |
167 | fragment half4 lookupFragment(TwoInputVertex fragmentInput [[stage_in]],
168 | texture2d inputTexture [[texture(0)]],
169 | texture2d inputTexture2 [[texture(1)]],
170 | constant float& intensity [[ buffer(1) ]])
171 | {
172 | constexpr sampler quadSampler;
173 | half4 base = inputTexture.sample(quadSampler, fragmentInput.textureCoordinate);
174 |
175 | half blueColor = base.b * 63.0h;
176 |
177 | half2 quad1;
178 | quad1.y = floor(floor(blueColor) / 8.0h);
179 | quad1.x = floor(blueColor) - (quad1.y * 8.0h);
180 |
181 | half2 quad2;
182 | quad2.y = floor(ceil(blueColor) / 8.0h);
183 | quad2.x = ceil(blueColor) - (quad2.y * 8.0h);
184 |
185 | float2 texPos1;
186 | texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * base.r);
187 | texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * base.g);
188 |
189 | float2 texPos2;
190 | texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * base.r);
191 | texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * base.g);
192 |
193 | constexpr sampler quadSampler3;
194 | half4 newColor1 = inputTexture2.sample(quadSampler3, texPos1);
195 | constexpr sampler quadSampler4;
196 | half4 newColor2 = inputTexture2.sample(quadSampler4, texPos2);
197 |
198 | half4 newColor = mix(newColor1, newColor2, fract(blueColor));
199 |
200 | return half4(mix(base, half4(newColor.rgb, base.w), half(intensity)));
201 | }
202 |
203 | kernel void add_arrays(device const float* inA,
204 | device const float* inB,
205 | device float* result,
206 | uint index [[thread_position_in_grid]])
207 | {
208 | // the for-loop is replaced with a collection of threads, each of which
209 | // calls this function.
210 | result[index] = inA[index] + inB[index];
211 | }
212 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/utils/Colors.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Colors.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/14.
6 | //
7 |
8 | import UIKit
9 |
10 | extension UIColor {
11 | static func randomColor() -> UIColor {
12 | return UIColor(red: CGFloat(drand48()), green: CGFloat(drand48()), blue: CGFloat(drand48()), alpha: 1.0)
13 | }
14 | }
15 |
16 | func generateRandomColors(_ count: Int) -> [[UInt8]] {
17 | var colors = [[UInt8]]()
18 | for _ in 0.. Matrix {
46 | m[12] = x
47 | m[13] = y
48 | m[14] = z
49 | return self
50 | }
51 |
52 | @discardableResult
53 | func scaling(x: Float, y: Float, z: Float) -> Matrix {
54 | m[0] = x
55 | m[5] = y
56 | m[10] = z
57 | return self
58 | }
59 | }
60 |
61 | // MARK: - Point Utils
62 | extension CGPoint {
63 |
64 | static func middle(p1: CGPoint, p2: CGPoint) -> CGPoint {
65 | return CGPoint(x: (p1.x + p2.x) * 0.5, y: (p1.y + p2.y) * 0.5)
66 | }
67 |
68 | func distance(to other: CGPoint) -> CGFloat {
69 | let p = pow(x - other.x, 2) + pow(y - other.y, 2)
70 | return sqrt(p)
71 | }
72 |
73 | func angel(to other: CGPoint = .zero) -> CGFloat {
74 | let point = self - other
75 | if y == 0 {
76 | return x >= 0 ? 0 : CGFloat.pi
77 | }
78 | return -CGFloat(atan2f(Float(point.y), Float(point.x)))
79 | }
80 |
81 | func toFloat4(z: CGFloat = 0, w: CGFloat = 1) -> vector_float4 {
82 | return [Float(x), Float(y), Float(z) ,Float(w)]
83 | }
84 |
85 | func toFloat2() -> vector_float2 {
86 | return [Float(x), Float(y)]
87 | }
88 |
89 | func offsetedBy(x: CGFloat = 0, y: CGFloat = 0) -> CGPoint {
90 | var point = self
91 | point.x += x
92 | point.y += y
93 | return point
94 | }
95 |
96 | func rotatedBy(_ angle: CGFloat, anchor: CGPoint) -> CGPoint {
97 | let point = self - anchor
98 | let a = Double(-angle)
99 | let x = Double(point.x)
100 | let y = Double(point.y)
101 | let x_ = x * cos(a) - y * sin(a);
102 | let y_ = x * sin(a) + y * cos(a);
103 | return CGPoint(x: CGFloat(x_), y: CGFloat(y_)) + anchor
104 | }
105 | }
106 |
107 | func +(lhs: CGPoint, rhs: CGPoint) -> CGPoint {
108 | return CGPoint(x: lhs.x + rhs.x, y: lhs.y + rhs.y)
109 | }
110 |
111 | func +=(lhs: inout CGPoint, rhs: CGPoint) {
112 | lhs = lhs + rhs
113 | }
114 |
115 | func -(lhs: CGPoint, rhs: CGPoint) -> CGPoint {
116 | return CGPoint(x: lhs.x - rhs.x, y: lhs.y - rhs.y)
117 | }
118 |
119 | func *(lhs: CGPoint, rhs: CGFloat) -> CGPoint {
120 | return CGPoint(x: lhs.x * rhs, y: lhs.y * rhs)
121 | }
122 |
123 | func /(lhs: CGPoint, rhs: CGFloat) -> CGPoint {
124 | return CGPoint(x: lhs.x / rhs, y: lhs.y / rhs)
125 | }
126 |
127 | func +(lhs: CGSize, rhs: CGSize) -> CGSize {
128 | return CGSize(width: lhs.width + rhs.width, height: lhs.height + rhs.height)
129 | }
130 |
131 | func *(lhs: CGSize, rhs: CGFloat) -> CGSize {
132 | return CGSize(width: lhs.width * rhs, height: lhs.height * rhs)
133 | }
134 |
135 | func /(lhs: CGSize, rhs: CGFloat) -> CGSize {
136 | return CGSize(width: lhs.width / rhs, height: lhs.height / rhs)
137 | }
138 |
139 | func +(lhs: CGPoint, rhs: CGSize) -> CGPoint {
140 | return CGPoint(x: lhs.x + rhs.width, y: lhs.y + rhs.height)
141 | }
142 |
143 | func -(lhs: CGPoint, rhs: CGSize) -> CGPoint {
144 | return CGPoint(x: lhs.x - rhs.width, y: lhs.y - rhs.height)
145 | }
146 |
147 | func *(lhs: CGPoint, rhs: CGSize) -> CGPoint {
148 | return CGPoint(x: lhs.x * rhs.width, y: lhs.y * rhs.height)
149 | }
150 |
151 | func /(lhs: CGPoint, rhs: CGSize) -> CGPoint {
152 | return CGPoint(x: lhs.x / rhs.width, y: lhs.y / rhs.height)
153 | }
154 |
155 |
156 | extension Comparable {
157 | func valueBetween(min: Self, max: Self) -> Self {
158 | if self > max {
159 | return max
160 | } else if self < min {
161 | return min
162 | }
163 | return self
164 | }
165 | }
166 |
167 |
--------------------------------------------------------------------------------
/Sources/MetalCamera/utils/extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // extensions.swift
3 | // MetalCamera
4 | //
5 | // Created by Eric on 2020/06/06.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 | import MetalKit
11 |
12 | extension AVCaptureDevice.Position {
13 | func device() -> AVCaptureDevice? {
14 | let deviceDescoverySession = AVCaptureDevice.DiscoverySession.init(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera],
15 | mediaType: .video,
16 | position: self)
17 |
18 | for device in deviceDescoverySession.devices where device.position == self {
19 | return device
20 | }
21 |
22 | return nil
23 | }
24 | }
25 |
26 | extension UIImage {
27 | func loadTexture(device: MTLDevice) -> MTLTexture {
28 | guard let cgImage = self.cgImage else {
29 | fatalError("Couldn't load CGImage")
30 | }
31 |
32 | do {
33 | let textureLoader = MTKTextureLoader(device: device)
34 | return try textureLoader.newTexture(cgImage: cgImage, options: [MTKTextureLoader.Option.SRGB: false])
35 | } catch {
36 | fatalError("Couldn't convert CGImage to MTLTexture")
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/Tests/MetalCameraTests/MetalCameraTests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import MetalCamera
3 |
4 | final class MetalCameraTests: XCTestCase {
5 | func testExample() {
6 | // This is an example of a functional test case.
7 | // Use XCTAssert and related functions to verify your tests produce the correct
8 | // results.
9 | XCTAssertEqual(MetalCamera.libraryName, "Metal Camera")
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/build.sh:
--------------------------------------------------------------------------------
1 | swift build -v -Xswiftc "-sdk" -Xswiftc "`xcrun --sdk iphonesimulator --show-sdk-path`" -Xswiftc "-target" -Xswiftc "x86_64-apple-ios13.0-simulator"
2 |
--------------------------------------------------------------------------------