├── timeline.png
├── Example
├── iOSExample
│ ├── CameraCoreExample
│ │ ├── Assets.xcassets
│ │ │ ├── Contents.json
│ │ │ └── AppIcon.appiconset
│ │ │ │ ├── Icon-76.png
│ │ │ │ ├── Icon-60@2x.png
│ │ │ │ ├── Icon-60@3x.png
│ │ │ │ ├── Icon-76@2x.png
│ │ │ │ ├── Icon-Small.png
│ │ │ │ ├── Icon-60@2x-1.png
│ │ │ │ ├── Icon-83.5@2x.png
│ │ │ │ ├── Icon-Small-40.png
│ │ │ │ ├── Icon-Small-41.png
│ │ │ │ ├── Icon-Small-42.png
│ │ │ │ ├── Icon-Small@2x.png
│ │ │ │ ├── Icon-Small@3x.png
│ │ │ │ ├── Icon-Small-40@2x.png
│ │ │ │ ├── Icon-Small@2x-1.png
│ │ │ │ ├── iTunesArtwork@2x.png
│ │ │ │ ├── Icon-Notification.png
│ │ │ │ ├── Icon-Small-40@2x-1.png
│ │ │ │ ├── Icon-Notification@3x.png
│ │ │ │ └── Contents.json
│ │ ├── ProgressViewVC.swift
│ │ ├── ExampleVC
│ │ │ ├── Others
│ │ │ │ ├── Binarization.metal
│ │ │ │ ├── Mask.metal
│ │ │ │ └── QRCodeCaptureExampleVC.swift
│ │ │ ├── FaceLayer.swift
│ │ │ ├── VideoCaptureView001ExampleVC.swift
│ │ │ └── VideoCaptureView003ExampleVC.swift
│ │ ├── Debugger
│ │ │ ├── DebugView.swift
│ │ │ └── DebugView.xib
│ │ ├── Base.lproj
│ │ │ └── LaunchScreen.storyboard
│ │ ├── UseCoreMLExample
│ │ │ ├── UseCoreMLExampleVC.swift
│ │ │ ├── UseCoreMLDeeplabV3ExampleVC.swift
│ │ │ ├── CoreMLYOLOv3TinyLayer.swift
│ │ │ ├── CoreMLMobileNetV2Layer.swift
│ │ │ ├── CoreMLDeeplabV3Layer.swift
│ │ │ └── UseCoreMLYOLOv3TinyExampleVC.swift
│ │ ├── Info.plist
│ │ ├── AppDelegate.swift
│ │ ├── AudioExampleVC.swift
│ │ └── PlayerExample
│ │ │ └── PlayerExample001VC.swift
│ └── CameraCoreExample.xcodeproj
│ │ ├── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ ├── IDEWorkspaceChecks.plist
│ │ │ └── swiftpm
│ │ │ └── Package.resolved
│ │ └── xcshareddata
│ │ └── xcschemes
│ │ └── CameraCoreExample.xcscheme
└── CameraCoreExamples.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ ├── IDEWorkspaceChecks.plist
│ └── swiftpm
│ └── Package.resolved
├── Tests
├── LinuxMain.swift
└── CameraCoreTests
│ ├── XCTestManifests.swift
│ └── CameraCoreTests.swift
├── .swiftpm
└── xcode
│ └── package.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── Sources
└── CameraCore
│ ├── CCDebug
│ ├── CCDebug.swift
│ └── ComponentDebugger.swift
│ ├── CCARCapture
│ ├── CCARCapture.swift
│ ├── -CaptureData.swift
│ └── ARCamera.swift
│ ├── CCVision
│ ├── CCVision.swift
│ ├── CCVisionInferenceProtocol.swift
│ └── Inference.swift
│ ├── CCImageProcess
│ ├── Protocol
│ │ ├── RenderLayerUserInfoProtocol.swift
│ │ ├── RenderLayerCompositionInfoProtocol.swift
│ │ └── RenderLayerProtocol.swift
│ ├── CCImageProcess.swift
│ ├── Id
│ │ └── RenderLayerId.swift
│ └── Layer
│ │ ├── BlankLayer.swift
│ │ ├── LutLayer.swift
│ │ ├── MaskLayer.swift
│ │ ├── ColorOverlayLayer.swift
│ │ ├── TransformLayer.swift
│ │ ├── SequenceImageLayer.swift
│ │ └── ImageLayer.swift
│ ├── CCAudio
│ ├── CCAudio.swift
│ ├── Mic.swift
│ ├── Microphone.swift
│ ├── AudioEngine.swift
│ └── AudioPlayer.swift
│ ├── CCRecorder
│ ├── CCRecorder.swift
│ └── AudioRecorder.swift
│ ├── CCCapture
│ ├── VideoCapture
│ │ └── VideoCapture.swift
│ ├── CCCapture.swift
│ ├── CaptureData.swift
│ ├── CaptureInfo.swift
│ └── Camera.swift
│ ├── AssetManager.swift
│ ├── Utils
│ ├── SoundUtils.swift
│ ├── TransformUtils.swift
│ ├── ComponentDebug.swift
│ ├── GLTextureConvert.swift
│ └── CodableURL.swift
│ ├── CCTexture.swift
│ ├── CCVariable.swift
│ ├── Configuration.swift
│ ├── CCComponentProtocol.swift
│ ├── Settings.swift
│ ├── CustomOperator.swift
│ └── CCPlayer.swift
├── Package.resolved
├── Package.swift
├── .gitignore
└── README.md
/timeline.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/timeline.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/Tests/LinuxMain.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | import CameraCoreTests
4 |
5 | var tests = [XCTestCaseEntry]()
6 | tests += CameraCoreTests.allTests()
7 | XCTMain(tests)
8 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-76.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-76.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-60@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-60@2x.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-60@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-60@3x.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-76@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-76@2x.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-60@2x-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-60@2x-1.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-83.5@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-83.5@2x.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-40.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-40.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-41.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-41.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-42.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-42.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small@3x.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small@2x-1.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/iTunesArtwork@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/iTunesArtwork@2x.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Notification.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Notification.png
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Small-40@2x-1.png
--------------------------------------------------------------------------------
/Tests/CameraCoreTests/XCTestManifests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | #if !canImport(ObjectiveC)
4 | public func allTests() -> [XCTestCaseEntry] {
5 | return [
6 | testCase(CameraCoreTests.allTests),
7 | ]
8 | }
9 | #endif
10 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Notification@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Hideyuki-Machida/CameraCore/HEAD/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Icon-Notification@3x.png
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Example/CameraCoreExamples.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/Example/CameraCoreExamples.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCDebug/CCDebug.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCDebug.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/03/24.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public struct CCDebug {
12 | private init() {} // このstructはnamespace用途なのでインスタンス化防止
13 | }
14 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCARCapture/CCARCapture.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCARCapture.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/04/04.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public struct CCARCapture {
12 | private init() {} // このstructはnamespace用途なのでインスタンス化防止
13 | }
14 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCVision/CCVision.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCVision.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/01/07.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import MetalCanvas
11 |
12 | public struct CCVision {
13 | private init() {} // このstructはnamespace用途なのでインスタンス化防止
14 | }
15 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Protocol/RenderLayerUserInfoProtocol.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RenderLayerUserInfoProtocol.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/12/30.
6 | // Copyright © 2018 町田 秀行. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public protocol RenderLayerUserInfoProtocol {
12 |
13 | }
14 |
15 | public struct RenderLayerUserInfo: RenderLayerUserInfoProtocol {}
16 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCAudio/CCAudio.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCAudio.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/02/15.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public struct CCAudio {
12 | private init() {} // このstructはnamespace用途なのでインスタンス化防止
13 |
14 | public enum ErrorType: Error {
15 | case setup
16 | case render
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCRecorder/CCRecorder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCRecorder.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/01/03.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public struct CCRecorder {
12 | private init() {} // このstructはnamespace用途なのでインスタンス化防止
13 |
14 | internal enum ErrorType: Error {
15 | case setup
16 | case render
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/CCImageProcess.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCImageProcessing.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/01/01.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public struct CCImageProcess {
12 | private init() {} // このstructはnamespace用途なのでインスタンス化防止
13 |
14 | public enum ErrorType: Error {
15 | case setup
16 | case process
17 | case render
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "object": {
3 | "pins": [
4 | {
5 | "package": "iOS_DummyAVAssets",
6 | "repositoryURL": "https://github.com/Hideyuki-Machida/iOS_DummyAVAssets",
7 | "state": {
8 | "branch": "spm",
9 | "revision": "262bd80a4714065491074284b1102fd2594486f2",
10 | "version": null
11 | }
12 | }
13 | ]
14 | },
15 | "version": 1
16 | }
17 |
--------------------------------------------------------------------------------
/Tests/CameraCoreTests/CameraCoreTests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import CameraCore
3 |
4 | final class CameraCoreTests: XCTestCase {
5 | func testExample() {
6 | // This is an example of a functional test case.
7 | // Use XCTAssert and related functions to verify your tests produce the correct
8 | // results.
9 | //XCTAssertEqual(CameraCore().text, "Hello, World!")
10 | }
11 |
12 | static var allTests = [
13 | ("testExample", testExample),
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/ProgressViewVC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ProgressViewVC.swift
3 | // CameraCore_Example
4 | //
5 | // Created by hideyuki machida on 2018/08/27.
6 | // Copyright © 2018 町田 秀行. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import CameraCore
11 | import MetalCanvas
12 | import ProcessLogger_Swift
13 |
14 | class ProgressViewVC: UIViewController {
15 | @IBOutlet weak var progressLabel: UILabel!
16 |
17 | deinit {
18 | ProcessLogger.deinitLog(self)
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCCapture/VideoCapture/VideoCapture.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VideoCapture.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2019/12/31.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | extension CCCapture {
12 | public struct VideoCapture {
13 | private init() {} /* このstructはnamespace用途なのでインスタンス化防止 */
14 |
15 | public enum ErrorType: Error {
16 | case setupError
17 | case render
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCVision/CCVisionInferenceProtocol.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCVisionInferenceProtocol.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/04/20.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | // MARK: 推論処理 protocol
13 |
14 | public protocol CCVisionInferenceProtocol {
15 | mutating func dispose()
16 | mutating func process(pixelBuffer: CVPixelBuffer, timeStamp: CMTime, metadataObjects: [AVMetadataObject], userInfo: inout [String : Any]) throws
17 | }
18 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCCapture/CCCapture.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCCapture.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2019/12/31.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public struct CCCapture {
12 | static let videoOutputQueue: DispatchQueue = DispatchQueue(label: "CCCapture.videoOutputQueue")
13 | static let audioOutputQueue: DispatchQueue = DispatchQueue(label: "CCCapture.audioOutputQueue")
14 | static let depthOutputQueue: DispatchQueue = DispatchQueue(label: "CCCapture.depthOutputQueue")
15 | static let metaDataOutputQueue: DispatchQueue = DispatchQueue(label: "CCCapture.metaDataOutputQueue")
16 |
17 | private init() {} // このstructはnamespace用途なのでインスタンス化防止
18 |
19 | internal enum ErrorType: Error {
20 | case setup
21 | case render
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Id/RenderLayerId.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RenderLayerId.swift
3 | // CameraCore
4 | //
5 | // Created by 町田 秀行 on 2018/08/25.
6 | // Copyright © 2018年 町田 秀行. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public extension CCImageProcess {
12 | struct RenderLayerId: Codable {
13 | public let key: String
14 | public init() {
15 | self.key = NSUUID().uuidString
16 | }
17 | }
18 | }
19 |
20 | extension CCImageProcess.RenderLayerId: Equatable {
21 | public static func ==(lhs: CCImageProcess.RenderLayerId, rhs: CCImageProcess.RenderLayerId) -> Bool{
22 | return lhs.key == rhs.key
23 | }
24 | }
25 |
26 | extension CCImageProcess.RenderLayerId {
27 | public static func !=(lhs: CCImageProcess.RenderLayerId, rhs: CCImageProcess.RenderLayerId) -> Bool{
28 | return lhs.key != rhs.key
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/ExampleVC/Others/Binarization.metal:
--------------------------------------------------------------------------------
1 | //
2 | // Binarization.metal
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2020/08/02.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | #include
10 | #include
11 | #include
12 |
13 | using namespace metal;
14 |
15 | kernel void kernel_Binarization(texture2d disparityTexture [[texture(0)]],
16 | texture2d outputTexture [[texture(1)]],
17 | constant float &intensity [[ buffer(0) ]],
18 | uint2 gid [[thread_position_in_grid]])
19 | {
20 | float4 disparityTextureColor = disparityTexture.read(gid);
21 | float r = disparityTextureColor.r > intensity ? 1.0 : 0.0;
22 | outputTexture.write(float4(r, r, r, 1.0), gid);
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/CameraCore/AssetManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AssetManager.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/09/19.
6 | // Copyright © 2018 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import UIKit
11 |
12 | public final class AssetManager {
13 | static let shard = AssetManager()
14 | private let bundle: Bundle
15 |
16 | private init() {
17 | self.bundle = Bundle(for: type(of: self))
18 | }
19 |
20 | public enum Shader {
21 | case mask
22 | case colorOverlay
23 | public var url: URL {
24 | switch self {
25 | case .mask: return AssetManager.shard.bundle.url(forResource: "Shader/Mask", withExtension: "cikernel")!
26 | case .colorOverlay: return AssetManager.shard.bundle.url(forResource: "Shader/ColorOverlay", withExtension: "cikernel")!
27 | }
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Layer/BlankLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BlankLayer.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/08/22.
6 | // Copyright © 2018 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 |
12 | public extension CCImageProcess {
13 | final class BlankLayer: RenderLayerProtocol {
14 | public let type: RenderLayerType = RenderLayerType.blank
15 | public let id: RenderLayerId
16 | public let customIndex: Int = 0
17 |
18 | public convenience init() {
19 | self.init(id: RenderLayerId())
20 | }
21 |
22 | public init(id: RenderLayerId) {
23 | self.id = id
24 | }
25 |
26 | public func dispose() {}
27 | }
28 | }
29 |
30 | public extension CCImageProcess.BlankLayer {
31 | func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {}
32 | }
33 |
--------------------------------------------------------------------------------
/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "object": {
3 | "pins": [
4 | {
5 | "package": "GraphicsLibs.Swift",
6 | "repositoryURL": "https://github.com/Hideyuki-Machida/GraphicsLibs.Swift",
7 | "state": {
8 | "branch": "master",
9 | "revision": "89c002dae842675d13cfe752e36bc771f282b88e",
10 | "version": null
11 | }
12 | },
13 | {
14 | "package": "MetalCanvas",
15 | "repositoryURL": "https://github.com/Hideyuki-Machida/MetalCanvas",
16 | "state": {
17 | "branch": "master",
18 | "revision": "09d40467b6c8164b3778b6a9fee3957554310bf8",
19 | "version": null
20 | }
21 | },
22 | {
23 | "package": "ProcessLogger.Swift",
24 | "repositoryURL": "https://github.com/Hideyuki-Machida/ProcessLogger.Swift",
25 | "state": {
26 | "branch": "master",
27 | "revision": "fa8e1ce7e599f8e4b3cb54f5e7119153e6c9e32e",
28 | "version": null
29 | }
30 | }
31 | ]
32 | },
33 | "version": 1
34 | }
35 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:5.3
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "CameraCore",
8 | platforms: [
9 | .iOS(.v13),
10 | ],
11 | products: [
12 | .library(
13 | name: "CameraCore",
14 | targets: ["CameraCore"]),
15 | ],
16 | dependencies: [
17 | .package(url: "https://github.com/Hideyuki-Machida/MetalCanvas", .branch("master")),
18 | .package(url: "https://github.com/Hideyuki-Machida/ProcessLogger.Swift", .branch("master")),
19 | .package(url: "https://github.com/Hideyuki-Machida/GraphicsLibs.Swift", .branch("master"))
20 |
21 | ],
22 | targets: [
23 | .target(
24 | name: "CameraCore",
25 | dependencies: ["MetalCanvas", "ProcessLogger.Swift", "GraphicsLibs.Swift"]
26 | ),
27 | .testTarget(
28 | name: "CameraCoreTests",
29 | dependencies: ["CameraCore"]),
30 | ],
31 | swiftLanguageVersions: [.v5]
32 | )
33 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Debugger/DebugView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DebugView.swift
3 | // CameraCoreExample
4 | //
5 | // Created by machida.hideyuki on 2020/03/06.
6 | // Copyright © 2020 Donuts. All rights reserved.
7 | //
8 |
9 | import CameraCore
10 | import MetalCanvas
11 | import UIKit
12 | import ProcessLogger_Swift
13 |
14 | class DebugView: UIView {
15 | @IBOutlet weak var label: UILabel!
16 |
17 | deinit {
18 | ProcessLogger.deinitLog(self)
19 | }
20 |
21 | func set(debugData: CCDebug.ComponentDebugger.Output.Data) {
22 | var str: String = ""
23 | str += "Time: \(debugData.time)\n"
24 | str += "\n-- Device --\n"
25 | str += "usedCPU: \(debugData.usedCPU)\n"
26 | str += "usedMemory: \(debugData.usedMemory)\n"
27 | str += "thermalState: \(debugData.thermalState)\n\n"
28 | str += "\n-- FPS --\n"
29 | str += "mainthredFPS: \(debugData.mainthredFPS)\n"
30 | for i: CCDebug.ComponentDebugger.Output.Data.CompornetFPS in debugData.compornetFPSList {
31 | str += "\(i.name)FPS: \(i.fps)\n"
32 | }
33 | self.label.text = str
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/ExampleVC/Others/Mask.metal:
--------------------------------------------------------------------------------
1 | //
2 | // Mask.metal
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2020/08/02.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | #include
10 | #include
11 | #include
12 |
13 | using namespace metal;
14 |
15 | kernel void kernel_Mask(texture2d sorceTexture [[texture(0)]],
16 | texture2d maskTexture [[texture(1)]],
17 | texture2d outputTexture [[texture(2)]],
18 | uint2 gid [[thread_position_in_grid]])
19 | {
20 | float4 sorceTextureColor = sorceTexture.read(gid);
21 | float4 maskTextureColor = maskTexture.read(gid);
22 | float4 maskColor = float4(
23 | sorceTextureColor.r * maskTextureColor.r,
24 | sorceTextureColor.g * maskTextureColor.g,
25 | sorceTextureColor.b * maskTextureColor.b,
26 | 1.0);
27 | outputTexture.write(maskColor, gid);
28 | }
29 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/ExampleVC/FaceLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FaceLayer.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/10/15.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 | import CameraCore
12 |
13 | final public class FaceLayer: RenderLayerProtocol {
14 | public let type: RenderLayerType = RenderLayerType.custom
15 | public let id: RenderLayerId
16 | public var customIndex: Int = 0
17 |
18 | public init() {
19 | self.id = RenderLayerId()
20 | }
21 |
22 | /// キャッシュを消去
23 | public func dispose() {
24 | }
25 | }
26 |
27 | extension FaceLayer: MetalRenderLayerProtocol {
28 | public func process(commandBuffer: inout MTLCommandBuffer, source: MTLTexture, destination: inout MTLTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
29 | guard let metadataObjects = renderLayerCompositionInfo.metadataObjects else { return }
30 | for metadataObject in metadataObjects {
31 | if let faceObject: AVMetadataFaceObject = metadataObject as? AVMetadataFaceObject {
32 | print(faceObject)
33 | }
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Sources/CameraCore/Utils/SoundUtils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SoundUtils.swift
3 | // MystaVideoModule
4 | //
5 | // Created by machidahideyuki on 2018/04/09.
6 | // Copyright © 2018年 tv.mysta. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import UIKit
11 | import AVFoundation
12 | import MetalCanvas
13 | import ProcessLogger_Swift
14 |
15 | public final class SoundUtils {
16 | public static let shared = SoundUtils()
17 |
18 | public func brank(url: URL) -> Double {
19 | do {
20 | let audioFile = try AVAudioFile(forReading: url)
21 | let rate: Int = Int(audioFile.fileFormat.sampleRate)
22 | let frameCount = UInt32(audioFile.length)
23 | //let duration = Double(audioFile.length) / audioFile.fileFormat.sampleRate
24 | let PCMBuffer: AVAudioPCMBuffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat, frameCapacity: frameCount)!
25 | try audioFile.read(into: PCMBuffer, frameCount: frameCount)
26 | for i in 0.. 0 {
30 | return Double(index / rate)
31 | }
32 | }
33 | }
34 |
35 | //self.ffmpeg(url: url)
36 | } catch {
37 | ProcessLogger.log(url)
38 | }
39 | return 0
40 | }
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCAudio/Mic.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Mic.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/02/15.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 |
12 | extension CCAudio {
13 | public class Mic {
14 | var audioEngine: AVAudioEngine = AVAudioEngine()
15 | public var volume: Float {
16 | get {
17 | return self.audioEngine.inputNode.volume
18 | }
19 | set {
20 | self.audioEngine.inputNode.volume = newValue
21 | }
22 | }
23 | public init() throws {
24 | /*
25 | // Bluetooth接続を許可
26 | try AVAudioSession.sharedInstance()
27 | .setCategory(.playAndRecord,
28 | mode: .voiceChat,
29 | options: .allowBluetoothA2DP)
30 | */
31 | }
32 | }
33 | }
34 |
35 | extension CCAudio.Mic {
36 | func pipe(audioEngine: inout AVAudioEngine) throws -> CCAudio.Mic {
37 |
38 | audioEngine.inputNode.volume = self.audioEngine.inputNode.volume
39 | self.audioEngine = audioEngine
40 | self.audioEngine.connect(self.audioEngine.inputNode, to: self.audioEngine.mainMixerNode, format: self.audioEngine.inputNode.inputFormat(forBus: 0))
41 | return self
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCARCapture/-CaptureData.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CaptureData.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/04/04.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import ARKit
11 | import MetalCanvas
12 |
13 | extension CCARCapture {
14 | public struct CaptureData {
15 | public let arFrame: ARFrame
16 | public let captureInfo: CCCapture.VideoCapture.CaptureInfo
17 | public let mtlPixelFormat: MTLPixelFormat
18 | public let outPutPixelFormatType: MCPixelFormatType
19 | public let presentationTimeStamp: CMTime
20 | public let captureVideoOrientation: AVCaptureVideoOrientation
21 |
22 | internal init(arFrame: ARFrame, captureInfo: CCCapture.VideoCapture.CaptureInfo, mtlPixelFormat: MTLPixelFormat, outPutPixelFormatType: MCPixelFormatType, captureVideoOrientation: AVCaptureVideoOrientation) {
23 | self.arFrame = arFrame
24 | self.captureInfo = captureInfo
25 | self.mtlPixelFormat = mtlPixelFormat
26 | self.outPutPixelFormatType = outPutPixelFormatType
27 | let scale = 60
28 | self.presentationTimeStamp = CMTime.init(value: CMTimeValue(arFrame.timestamp * Double(scale)), timescale: CMTimeScale(scale))
29 | self.captureVideoOrientation = captureVideoOrientation
30 | }
31 |
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/Example/CameraCoreExamples.xcworkspace/xcshareddata/swiftpm/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "object": {
3 | "pins": [
4 | {
5 | "package": "GraphicsLibs.Swift",
6 | "repositoryURL": "https://github.com/Hideyuki-Machida/GraphicsLibs.Swift",
7 | "state": {
8 | "branch": "master",
9 | "revision": "f8217e8bb3af77add4ad59ace8e52c71f9708a94",
10 | "version": null
11 | }
12 | },
13 | {
14 | "package": "iOS_DummyAVAssets",
15 | "repositoryURL": "https://github.com/Hideyuki-Machida/iOS_DummyAVAssets",
16 | "state": {
17 | "branch": "spm",
18 | "revision": "6fca9fee8b58d4456c122ef08d5e3e169d7d9abc",
19 | "version": null
20 | }
21 | },
22 | {
23 | "package": "MetalCanvas",
24 | "repositoryURL": "https://github.com/Hideyuki-Machida/MetalCanvas",
25 | "state": {
26 | "branch": "master",
27 | "revision": "c619468c5e75abcd53a1a9e9e58286b0030d094e",
28 | "version": null
29 | }
30 | },
31 | {
32 | "package": "ProcessLogger.Swift",
33 | "repositoryURL": "https://github.com/Hideyuki-Machida/ProcessLogger.Swift",
34 | "state": {
35 | "branch": "master",
36 | "revision": "624f37efdfca9f91a16f1e827f677ecf163153d0",
37 | "version": null
38 | }
39 | }
40 | ]
41 | },
42 | "version": 1
43 | }
44 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCTexture.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCTexture.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/01/06.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 | import MetalCanvas
12 |
13 | public typealias CCTexture = MCTexture
14 |
15 | extension CCTexture {
16 | enum CCTextureOptionKey: String {
17 | case presentationTimeStamp = "CCTexture.presentationTimeStamp"
18 | case captureVideoOrientation = "CCTexture.captureVideoOrientation"
19 | case presetSize = "CCTexture.captureSize"
20 | }
21 |
22 | public var presentationTimeStamp: CMTime {
23 | get {
24 | return (self.userInfo[CCTextureOptionKey.presentationTimeStamp.rawValue] as? CMTime) ?? CMTime()
25 | }
26 | set {
27 | self.userInfo[CCTextureOptionKey.presentationTimeStamp.rawValue] = newValue
28 | }
29 | }
30 |
31 | public var captureVideoOrientation: AVCaptureVideoOrientation? {
32 | get {
33 | return self.userInfo[CCTextureOptionKey.captureVideoOrientation.rawValue] as? AVCaptureVideoOrientation
34 | }
35 | set {
36 | self.userInfo[CCTextureOptionKey.captureVideoOrientation.rawValue] = newValue
37 | }
38 | }
39 |
40 | public var presetSize: Settings.PresetSize {
41 | get {
42 | return (self.userInfo[CCTextureOptionKey.presetSize.rawValue] as? Settings.PresetSize) ?? Settings.PresetSize.p1280x720
43 | }
44 | set {
45 | self.userInfo[CCTextureOptionKey.presetSize.rawValue] = newValue
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCVariable.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCVariable.swift
3 | //
4 | //
5 | // Created by hideyuki machida on 2020/07/21.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct CCBindble {
11 | fileprivate var id: String
12 | fileprivate var callback: (T) -> Void
13 | init(_ callback: @escaping (T) -> Void) {
14 | self.id = NSUUID().uuidString
15 | self.callback = callback
16 | }
17 | }
18 |
19 | public class CCVariable {
20 | private var _value: T
21 | public var value: T {
22 | get {
23 | objc_sync_enter(self)
24 | defer { objc_sync_exit(self) }
25 | return self._value
26 | }
27 | set {
28 | objc_sync_enter(self)
29 | self._value = newValue
30 | objc_sync_exit(self)
31 | }
32 | }
33 |
34 | private var callbacks: [CCBindble] = []
35 |
36 | public init(_ value: T) {
37 | self._value = value
38 | }
39 |
40 | @discardableResult
41 | public func bind(dataDidChange: @escaping (T) -> Void) -> UnBindKey {
42 | let item: CCBindble = CCBindble(dataDidChange)
43 | self.callbacks.append(item)
44 | return UnBindKey.init(id: item.id)
45 | }
46 |
47 | public func unBind(key: UnBindKey ) {
48 | self.callbacks = self.callbacks.filter { $0.id != key.id }
49 | }
50 |
51 | public func notice() {
52 | self.callbacks.forEach { $0.callback(self.value) }
53 | }
54 |
55 | public func dispose() {
56 | self.callbacks.removeAll()
57 | }
58 | }
59 |
60 | public extension CCVariable {
61 | struct UnBindKey {
62 | let id: String
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCCapture/CaptureData.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CaptureData.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2019/12/28.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import CoreVideo
11 | import Foundation
12 | import MetalCanvas
13 | import MetalPerformanceShaders
14 |
15 | extension CCCapture.VideoCapture {
16 | public struct CaptureData {
17 | public let sampleBuffer: CMSampleBuffer
18 | public let captureInfo: CCCapture.VideoCapture.CaptureInfo
19 | public let depthData: AVDepthData?
20 | public let metadataObjects: [AVMetadataObject]
21 | public let mtlPixelFormat: MTLPixelFormat
22 | public let outPutPixelFormatType: MCPixelFormatType
23 | public let presentationTimeStamp: CMTime
24 | public let captureVideoOrientation: AVCaptureVideoOrientation
25 |
26 | internal init(sampleBuffer: CMSampleBuffer, captureInfo: CCCapture.VideoCapture.CaptureInfo, depthData: AVDepthData?, metadataObjects: [AVMetadataObject], mtlPixelFormat: MTLPixelFormat, outPutPixelFormatType: MCPixelFormatType, captureVideoOrientation: AVCaptureVideoOrientation) {
27 | self.sampleBuffer = sampleBuffer
28 | self.captureInfo = captureInfo
29 | self.depthData = depthData
30 | self.metadataObjects = metadataObjects
31 | self.mtlPixelFormat = mtlPixelFormat
32 | self.outPutPixelFormatType = outPutPixelFormatType
33 | self.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
34 | self.captureVideoOrientation = captureVideoOrientation
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/Sources/CameraCore/Utils/TransformUtils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TransformUtils.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/09/13.
6 | // Copyright © 2018 町田 秀行. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 | public class TransformUtils {
12 | public static func convertTransformSKToCI (userTransform: CGAffineTransform, videoSize: CGSize, renderSize: CGSize, preferredTransform: CGAffineTransform) -> CGAffineTransform {
13 | var conversionUserTransform: CGAffineTransform = userTransform // ユーザー設定トランスフォーム
14 | let originalSize: CGSize
15 | switch preferredTransform.isRotate {
16 | case .portrait, .portraitUpsideDown:
17 | originalSize = CGSize(width: videoSize.height, height: videoSize.width)
18 | default:
19 | originalSize = CGSize(width: videoSize.width, height: videoSize.height)
20 | }
21 |
22 | ////////////////////////////////////////////
23 | // ビデオファイルサイズ と レンダリングサイズ の差
24 | let diffX: CGFloat = renderSize.width / originalSize.width
25 | let diffY: CGFloat = renderSize.height / originalSize.height
26 | let diff: CGFloat = max(diffX, diffY)
27 |
28 | let x: CGFloat = (renderSize.width / 2) - ((((originalSize.width * diffX) * conversionUserTransform.a) - ((originalSize.height * diffY) * conversionUserTransform.b)) / 2)
29 | conversionUserTransform.tx = x + (conversionUserTransform.tx * diff)
30 |
31 | let y: CGFloat = (renderSize.height / 2) - ((((originalSize.height * diffY) * conversionUserTransform.d) - ((originalSize.width * diffX) * conversionUserTransform.c)) / 2)
32 | conversionUserTransform.ty = y + (conversionUserTransform.ty * diff)
33 | return conversionUserTransform
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/UseCoreMLExample/UseCoreMLExampleVC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // UseCoreMLExampleVC.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/09/23.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVFoundation
11 | import CameraCore
12 | /*
13 | class UseCoreMLExampleVC: UIViewController {
14 |
15 | @IBOutlet weak var videoCaptureView: CameraCore.VideoCaptureView!
16 | @IBOutlet weak var classificationLabel: UILabel!
17 |
18 | var videoCaputurePropertys = CCRenderer.VideoCapture.Propertys.init(
19 | devicePosition: AVCaptureDevice.Position.back,
20 | isAudioDataOutput: true,
21 | required: [
22 | .captureSize(Settings.PresetSize.p1280x720),
23 | .frameRate(Settings.PresetFrameRate.fr30)
24 | ],
25 | option: []
26 | )
27 |
28 |
29 | deinit {
30 | self.videoCaptureView.pause()
31 | self.videoCaptureView.dispose()
32 | }
33 |
34 | override func viewDidLoad() {
35 | super.viewDidLoad()
36 |
37 | do {
38 | try self.videoCaptureView.setup(self.videoCaputurePropertys)
39 | let coreMLLayer = try CoreMLMobileNetV2Layer()
40 | coreMLLayer.onProcessClassifications = { [weak self] (descriptions: [String]) in
41 | DispatchQueue.main.async { [weak self] in
42 | self?.classificationLabel.text = descriptions.joined(separator: "\n")
43 | }
44 | }
45 | self.videoCaptureView.renderLayers = [ coreMLLayer ]
46 | } catch {
47 | }
48 | }
49 |
50 | override func viewWillAppear(_ animated: Bool) {
51 | super.viewWillAppear(animated)
52 | self.videoCaptureView.play()
53 | }
54 |
55 | override func viewWillDisappear(_ animated: Bool) {
56 | super.viewWillDisappear(animated)
57 | self.videoCaptureView.pause()
58 | self.videoCaptureView.dispose()
59 | }
60 | }
61 | */
62 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/UseCoreMLExample/UseCoreMLDeeplabV3ExampleVC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // UseCoreMLDeeplabV3ExampleVC.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/10/06.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVFoundation
11 | import CameraCore
12 | import Vision
13 | /*
14 | @available(iOS 12.0, *)
15 | class UseCoreMLDeeplabV3ExampleVC: UIViewController {
16 |
17 | @IBOutlet weak var videoCaptureView: CameraCore.VideoCaptureView!
18 |
19 | private var detectionOverlay: CALayer! = nil
20 | private var videoCaputureParamator = CCRenderer.VideoCapture.VideoCaputureParamator.init(
21 | presetiFrame: Settings.PresetiFrame.p960x540,
22 | frameRate: 30,
23 | devicePosition: AVCaptureDevice.Position.back,
24 | isDepth: false
25 | )
26 |
27 |
28 | deinit {
29 | self.videoCaptureView.pause()
30 | self.videoCaptureView.dispose()
31 | }
32 |
33 | var rootLayer: CALayer! = nil
34 |
35 | override func viewDidLoad() {
36 | super.viewDidLoad()
37 |
38 | self.rootLayer = self.videoCaptureView.layer
39 | do {
40 | try self.videoCaptureView.setup(self.videoCaputureParamator)
41 | //let coreMLLayer = try CoreMLDeeplabV3Layer()
42 | //self.videoCaptureView.renderLayers = [ coreMLLayer ]
43 | } catch {
44 | }
45 | }
46 |
47 | override func viewWillAppear(_ animated: Bool) {
48 | super.viewWillAppear(animated)
49 | self.videoCaptureView.play()
50 | }
51 |
52 | override func viewWillDisappear(_ animated: Bool) {
53 | super.viewWillDisappear(animated)
54 | self.videoCaptureView.pause()
55 | self.videoCaptureView.dispose()
56 | }
57 | }
58 | */
59 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCRecorder/AudioRecorder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioRecorder.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/02/17.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | extension CCRecorder {
13 | public class AudioRecorder {
14 | var audioFile: AVAudioFile?
15 | public var isRecording: Bool = false
16 |
17 | public init() throws {
18 | }
19 | }
20 | }
21 |
22 | public extension CCRecorder.AudioRecorder {
23 | func setup(parameter: CCRecorder.CaptureWriter.Parameter) {
24 | CCRecorder.CaptureWriter.setup(parameter)
25 | }
26 |
27 | func start() throws {
28 | self.isRecording = true
29 |
30 |
31 | let format: AVAudioFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16,
32 | sampleRate: 44100.0,
33 | channels: 1,
34 | interleaved: true)!
35 |
36 | let documentDir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first!
37 | let filePath = URL(fileURLWithPath: documentDir + "/sample.caf")
38 |
39 | // オーディオファイル
40 | self.audioFile = try AVAudioFile(forWriting: filePath, settings: format.settings)
41 | }
42 |
43 | func stop() {
44 | self.isRecording = false
45 | }
46 | }
47 |
48 | extension CCRecorder.AudioRecorder {
49 | func pipe(audioEngine: CCAudio.AudioEngine) throws {
50 | /*
51 | audioEngine.onUpdatePCMBuffer = { [weak self] (pcmBuffer: AVAudioPCMBuffer) in
52 | guard self?.isRecording == true else { return }
53 | do {
54 | try self?.audioFile?.write(from: pcmBuffer)
55 | } catch {
56 |
57 | }
58 | }
59 | */
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 |
3 | # SPM
4 | /.build
5 | /Packages
6 |
7 | # Xcode
8 | #
9 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
10 |
11 | ## Build generated
12 | build/
13 | DerivedData/
14 |
15 | ## Various settings
16 | *.pbxuser
17 | !default.pbxuser
18 | *.mode1v3
19 | !default.mode1v3
20 | *.mode2v3
21 | !default.mode2v3
22 | *.perspectivev3
23 | !default.perspectivev3
24 | xcuserdata/
25 |
26 | ## Other
27 | *.moved-aside
28 | *.xccheckout
29 | *.xcscmblueprint
30 |
31 | ## Obj-C/Swift specific
32 | *.hmap
33 | *.ipa
34 | *.dSYM.zip
35 | *.dSYM
36 |
37 | ## Playgrounds
38 | timeline.xctimeline
39 | playground.xcworkspace
40 |
41 | # Swift Package Manager
42 | #
43 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
44 | # Packages/
45 | # Package.pins
46 | .build/
47 |
48 | # CocoaPods
49 | #
50 | # We recommend against adding the Pods directory to your .gitignore. However
51 | # you should judge for yourself, the pros and cons are mentioned at:
52 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
53 | #
54 | # Pods/
55 | Pods/*
56 | Podfile.lock
57 |
58 |
59 | # Carthage
60 | #
61 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
62 | # Carthage/Checkouts
63 | Carthage/Checkouts
64 | Carthage/Build
65 |
66 | # bundle
67 | vendor/bundle
68 | # fastlane
69 | #
70 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
71 | # screenshots whenever they are needed.
72 | # For more information about the recommended setup visit:
73 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
74 |
75 | fastlane/report.xml
76 | fastlane/Preview.html
77 | fastlane/screenshots
78 | fastlane/test_output
79 |
80 |
--------------------------------------------------------------------------------
/Sources/CameraCore/Utils/ComponentDebug.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ComponentDebugger.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/03/19.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import MetalCanvas
11 |
12 | /*
13 | public class ComponentDebug {
14 | private var deviceDebugger: MCDebug.Device = MCDebug.Device()
15 | private var framerateDebugger: MCDebug.Framerate = MCDebug.Framerate()
16 |
17 | private var d: [thread_basic_info] = []
18 | private var dooo: (thredIndex: Int, threadInfo: thread_basic_info) = (thredIndex: 0, threadInfo: thread_basic_info())
19 |
20 | init() {}
21 |
22 | func update() {
23 | self.framerateDebugger.update()
24 | }
25 |
26 | func update(thred: Thread, queue: DispatchQueue) {
27 | guard let queueLabel: String = String(validatingUTF8: __dispatch_queue_get_label(queue)) else { return }
28 | let machTID: mach_port_t = pthread_mach_thread_np(pthread_self())
29 | guard let thredBasicInfo: (thredIndex: Int, threadInfo: thread_basic_info) = self.deviceDebugger.thredBasicInfo(machTID: machTID) else { return }
30 | self.dooo = thredBasicInfo
31 | }
32 |
33 | public func fps() -> Int {
34 | return self.framerateDebugger.fps()
35 | }
36 |
37 | public func cpu() {
38 | let count: Float = Float(self.d.count)
39 | let cpu_usage: Float = self.d.map { Float($0.cpu_usage) }.reduce(0, +)
40 | let microseconds: Float = self.d.map { Float($0.user_time.microseconds) }.reduce(0, +)
41 | //print(self.d)
42 | print((cpu_usage / Float(TH_USAGE_SCALE) * 100) / count, microseconds / count / 1000)
43 | print("thredIndex", self.dooo.thredIndex, Float(self.dooo.threadInfo.cpu_usage) / Float(TH_USAGE_SCALE) * 100, Float(self.dooo.threadInfo.user_time.microseconds) / Float(1000.0))
44 | self.d.removeAll()
45 | }
46 |
47 | }
48 | */
49 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Layer/LutLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LutLayer.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/08/22.
6 | // Copyright © 2018 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 |
12 | public extension CCImageProcess {
13 | final class LutLayer: RenderLayerProtocol {
14 | public enum Dimension: Int, Codable {
15 | case dim3 = 64
16 | }
17 |
18 | public let type: RenderLayerType = RenderLayerType.lut
19 | public let id: RenderLayerId
20 | public let customIndex: Int = 0
21 | private let lutImageURL: URL
22 | private var lutFilter: MCFilter.ColorProcessing.Lut3DFilter
23 | private let dimension: Dimension
24 |
25 | public var intensity: Float = 1.0 {
26 | willSet {
27 | self.lutFilter.intensity = newValue
28 | }
29 | }
30 |
31 | public convenience init(lutImageURL: URL, dimension: Dimension) throws {
32 | try self.init(id: RenderLayerId(), lutImageURL: lutImageURL, dimension: dimension)
33 | }
34 |
35 | public init(id: RenderLayerId, lutImageURL: URL, dimension: Dimension) throws {
36 | self.id = id
37 | self.dimension = dimension
38 | self.lutImageURL = lutImageURL
39 | self.lutFilter = try MCFilter.ColorProcessing.Lut3DFilter(lutImageTexture: try CCTexture(URL: lutImageURL, isSRGB: false))
40 | self.lutFilter.intensity = self.intensity
41 | }
42 |
43 | public func dispose() {}
44 | }
45 | }
46 |
47 | public extension CCImageProcess.LutLayer {
48 | func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
49 | try self.lutFilter.process(commandBuffer: commandBuffer, imageTexture: source, destinationTexture: &destination)
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | LSSupportsOpeningDocumentsInPlace
24 |
25 | NSCameraUsageDescription
26 | カメラを使用する理由
27 | NSMicrophoneUsageDescription
28 | マイクを使用する理由
29 | NSPhotoLibraryUsageDescription
30 | 動画保存のために許可して下さい
31 | UIFileSharingEnabled
32 |
33 | UILaunchStoryboardName
34 | LaunchScreen
35 | UIMainStoryboardFile
36 | Main
37 | UIRequiredDeviceCapabilities
38 |
39 | armv7
40 |
41 | UISupportedInterfaceOrientations
42 |
43 | UIInterfaceOrientationPortrait
44 | UIInterfaceOrientationLandscapeLeft
45 | UIInterfaceOrientationLandscapeRight
46 | UIInterfaceOrientationPortraitUpsideDown
47 |
48 | UISupportedInterfaceOrientations~ipad
49 |
50 | UIInterfaceOrientationPortrait
51 | UIInterfaceOrientationPortraitUpsideDown
52 | UIInterfaceOrientationLandscapeLeft
53 | UIInterfaceOrientationLandscapeRight
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/UseCoreMLExample/CoreMLYOLOv3TinyLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CoreMLMobileNetV2Layer_.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/09/23.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import CameraCore
10 | import Vision
11 | /*
12 | @available(iOS 12.0, *)
13 | final public class CoreMLYOLOv3TinyLayer: RenderLayerProtocol {
14 | public let type: RenderLayerType = RenderLayerType.custom
15 | public let id: RenderLayerId
16 | public let customIndex: Int = 0
17 | public var request: VNCoreMLRequest?
18 | public var items: [ VNRecognizedObjectObservation ] = []
19 | public var onUpdate: ((_ items: [ VNRecognizedObjectObservation ])->Void)?
20 |
21 | public init() throws {
22 | self.id = RenderLayerId()
23 | let model = try VNCoreMLModel(for: YOLOv3Tiny().model)
24 | self.request = VNCoreMLRequest(model: model, completionHandler: { [weak self] (request, error) in
25 | self?.processClassifications(for: request, error: error)
26 | })
27 | self.request?.imageCropAndScaleOption = .centerCrop
28 | }
29 |
30 | /// キャッシュを消去
31 | public func dispose() {
32 | self.request = nil
33 | }
34 | }
35 |
36 | @available(iOS 12.0, *)
37 | extension CoreMLYOLOv3TinyLayer: CVPixelBufferRenderLayerProtocol {
38 | public func processing(commandBuffer: inout MTLCommandBuffer, pixelBuffer: inout CVPixelBuffer, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
39 | let pixelBuffer = pixelBuffer
40 | DispatchQueue.global(qos: .userInitiated).async {
41 | let handler = VNImageRequestHandler.init(cvPixelBuffer: pixelBuffer, options: [:])
42 | do {
43 | try handler.perform([self.request!])
44 | } catch {
45 | print("Failed to perform classification.\n\(error.localizedDescription)")
46 | }
47 | }
48 |
49 | }
50 | }
51 |
52 | @available(iOS 12.0, *)
53 | extension CoreMLYOLOv3TinyLayer {
54 | func processClassifications(for request: VNRequest, error: Error?) {
55 | guard let results = request.results else {
56 | return
57 | }
58 | self.items = []
59 | for observation in results where observation is VNRecognizedObjectObservation {
60 | guard let objectObservation = observation as? VNRecognizedObjectObservation else {
61 | continue
62 | }
63 | self.items.append(objectObservation)
64 | }
65 | self.onUpdate?(self.items)
66 | }
67 | }
68 | */
69 |
--------------------------------------------------------------------------------
/Sources/CameraCore/Utils/GLTextureConvert.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenGLUtils.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/12/19.
6 | // Copyright © 2018 町田 秀行. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import GLKit
11 | import OpenGLES
12 |
13 | public class GLTextureConvert {
14 | var unmanagedVideoTexture: Unmanaged?
15 | var videoTexture: CVOpenGLESTexture?
16 | var videoTextureID: GLuint?
17 | var coreVideoTextureCache: CVOpenGLESTextureCache?
18 | var context: EAGLContext?
19 |
20 | public init(context: EAGLContext){
21 | self.context = context
22 | CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nil, self.context!, nil, &coreVideoTextureCache)
23 | //print("coreVideoTextureCache : \(self.coreVideoTextureCache)")
24 | }
25 |
26 | public func getTextureFromSampleBuffer(pixelBuffer: inout CVPixelBuffer, textureID: inout GLuint?) -> Bool {
27 |
28 | let textureWidth: Int = CVPixelBufferGetWidth(pixelBuffer)
29 | let textureHeight: Int = CVPixelBufferGetHeight(pixelBuffer)
30 |
31 | let cvRet: CVReturn = CVOpenGLESTextureCacheCreateTextureFromImage(
32 | kCFAllocatorDefault,
33 | coreVideoTextureCache!,
34 | pixelBuffer,
35 | nil,
36 | GLenum(GL_TEXTURE_2D),
37 | GL_RGBA,
38 | GLsizei(textureWidth),
39 | GLsizei(textureHeight),
40 | GLenum(GL_BGRA),
41 | UInt32(GL_UNSIGNED_BYTE),
42 | 0,
43 | &videoTexture
44 | )
45 |
46 | guard kCVReturnSuccess == cvRet else { return false }
47 | guard videoTexture != nil else { return false }
48 | textureID = CVOpenGLESTextureGetName(videoTexture!);
49 | guard textureID != nil else { return false }
50 | glBindTexture(GLenum(GL_TEXTURE_2D), textureID!)
51 | glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR)
52 | glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), GL_LINEAR)
53 | glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE)
54 | glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE)
55 | return true
56 | }
57 |
58 | public func clear() {
59 | self.videoTexture = nil
60 | guard self.coreVideoTextureCache != nil else { return }
61 | CVOpenGLESTextureCacheFlush(self.coreVideoTextureCache!, 0)
62 | }
63 |
64 | }
65 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Layer/MaskLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MaskLayer.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/09/19.
6 | // Copyright © 2018 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 | import CoreImage
12 |
13 | public extension CCImageProcess {
14 | final class MaskLayer: RenderLayerProtocol {
15 | public let type: RenderLayerType = RenderLayerType.mask
16 | public let id: RenderLayerId
17 | public let mask: CIImage
18 | public let customIndex: Int = 0
19 | public let maskShader: CIColorKernel
20 |
21 | public convenience init(mask: CIImage) throws {
22 | try self.init(id: RenderLayerId(), mask: mask)
23 | }
24 |
25 | public init(id: RenderLayerId, mask: CIImage) throws {
26 | self.id = id
27 |
28 | let maskShaderPath: URL = AssetManager.Shader.mask.url
29 | let maskShaderString: String = try String(contentsOf: maskShaderPath, encoding: .utf8)
30 | guard let maskShader: CIColorKernel = CIColorKernel(source: maskShaderString) else { throw RenderLayerErrorType.setupError }
31 | self.maskShader = maskShader
32 | self.mask = mask
33 | }
34 |
35 | public func dispose() {}
36 | }
37 | }
38 |
39 | public extension CCImageProcess.MaskLayer {
40 | func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
41 | guard let image: CIImage = CIImage(mtlTexture: source.texture, options: nil) else { throw RenderLayerErrorType.renderingError }
42 |
43 | let colorSpace: CGColorSpace = image.colorSpace ?? CGColorSpaceCreateDeviceRGB()
44 | let outImage = try self.process(image: image, renderLayerCompositionInfo: &renderLayerCompositionInfo)
45 | MCCore.ciContext.render(outImage, to: destination.texture, commandBuffer: commandBuffer, bounds: outImage.extent, colorSpace: colorSpace)
46 | }
47 | }
48 |
49 | fileprivate extension CCImageProcess.MaskLayer {
50 | func process(image: CIImage, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws -> CIImage {
51 | guard
52 | let image: CIImage = self.maskShader.apply(extent: image.extent, arguments: [
53 | image,
54 | self.mask,
55 | CIVector(x: image.extent.width, y: image.extent.height),
56 | ])
57 | else { throw RenderLayerErrorType.renderingError }
58 |
59 | return image
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/Sources/CameraCore/Configuration.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Configuration.swift
3 | // CameraCore
4 | //
5 | // Created by machidahideyuki on 2018/01/08.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 | import UIKit
12 | import AVFoundation
13 | import Foundation
14 | import MetalCanvas
15 | import ProcessLogger_Swift
16 |
17 | public class Configuration {
18 | public static let shared: CameraCore.Configuration = CameraCore.Configuration()
19 |
20 | public let isMetalCanvas: Bool = MCCore.isMetalCanvas
21 | public private(set) var currentUIInterfaceOrientation: UIInterfaceOrientation = .portrait
22 |
23 | @objc func orientationChange() {
24 | DispatchQueue.main.async { [weak self] in
25 | // UIApplication.shared.statusBarOrientation.toAVCaptureVideoOrientation はメインスレッドからしか呼べない
26 | self?.currentUIInterfaceOrientation = UIApplication.shared.statusBarOrientation
27 | }
28 | }
29 |
30 | // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange について
31 | // http://kentaroid.com/kcvpixelformattype%E3%81%AB%E3%81%A4%E3%81%84%E3%81%A6%E3%81%AE%E8%80%83%E5%AF%9F/
32 | public let sourcePixelBufferPixelFormatTypeKey: OSType = kCVPixelFormatType_32BGRA
33 | public let outputPixelBufferPixelFormatTypeKey: OSType = kCVPixelFormatType_32BGRA
34 |
35 | public let colorSpace: CGColorSpace = CGColorSpaceCreateDeviceRGB()
36 |
37 | public let defaultUIInterfaceOrientation: UIInterfaceOrientation = .portrait
38 | public let defaultAVCaptureVideoOrientation: AVCaptureVideoOrientation = .portrait
39 | public let defaultDeviceFormatVideoOrientation: AVCaptureVideoOrientation = .landscapeRight
40 |
41 | public init() {
42 | self.orientationChange()
43 | NotificationCenter.default.removeObserver(self, name: UIApplication.didChangeStatusBarOrientationNotification, object: nil)
44 | NotificationCenter.default.addObserver(self, selector: #selector(orientationChange), name: UIApplication.didChangeStatusBarOrientationNotification, object: nil)
45 | }
46 |
47 | deinit {
48 | NotificationCenter.default.removeObserver(self)
49 | ProcessLogger.deinitLog(self)
50 | }
51 | }
52 |
53 | public func configure() throws {
54 | Configuration.shared.orientationChange()
55 | let colorSpace: CGColorSpace = CGColorSpaceCreateDeviceRGB()
56 | try MCCore.setup(contextOptions: [
57 | CIContextOption.workingColorSpace: colorSpace,
58 | CIContextOption.useSoftwareRenderer: NSNumber(value: false),
59 | ])
60 | }
61 |
62 | public func flush() {
63 | MCCore.flush()
64 | }
65 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // CameraCore_Example
4 | //
5 | // Created by 町田 秀行 on 2018/08/07.
6 | // Copyright © 2018年 町田 秀行. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import MetalCanvas
11 | import CameraCore
12 | import ProcessLogger_Swift
13 |
14 | @UIApplicationMain
15 | class AppDelegate: UIResponder, UIApplicationDelegate {
16 |
17 | var window: UIWindow?
18 |
19 |
20 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
21 | // Override point for customization after application launch.
22 | do {
23 | try CameraCore.configure() // CameraCoreセットアップ(CameraCoreを使用する際に必ず最初に呼ぶ)
24 | } catch {
25 | ProcessLogger.errorLog("CameraCore: 初期化エラー")
26 | }
27 | return true
28 | }
29 |
30 | func applicationWillResignActive(_ application: UIApplication) {
31 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
32 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
33 | }
34 |
35 | func applicationDidEnterBackground(_ application: UIApplication) {
36 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
37 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
38 | }
39 |
40 | func applicationWillEnterForeground(_ application: UIApplication) {
41 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
42 | }
43 |
44 | func applicationDidBecomeActive(_ application: UIApplication) {
45 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
46 | }
47 |
48 | func applicationWillTerminate(_ application: UIApplication) {
49 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
50 | }
51 |
52 |
53 | }
54 |
55 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCComponentProtocol.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCComponentProtocol.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/03/15.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import MetalCanvas
11 | import ProcessLogger_Swift
12 |
13 | public protocol CCComponentSetupProtocol {
14 | }
15 |
16 | public protocol CCComponentTriggerProtocol {
17 | }
18 |
19 | public protocol CCComponentPipeProtocol: NSObjectProtocol {
20 | }
21 |
22 | public protocol CCComponentEventProtocol: NSObjectProtocol {
23 | }
24 |
25 |
26 | public protocol CCComponentProtocol: NSObjectProtocol {
27 | //var setup: CCComponentSetupProtocol { get }
28 | //var trigger: CCComponentTrigerProtocol { get }
29 | //var pipe: CCComponentPipeProtocol { get }
30 |
31 | var debug: CCComponentDebug? { get set }
32 | var isDebugMode: Bool { get set }
33 | }
34 |
35 | extension CCComponentProtocol {
36 | public var isDebugMode: Bool {
37 | get {
38 | return self.debug != nil
39 | }
40 | set {
41 | self.debug = newValue ? CCComponentDebug() : nil
42 | }
43 | }
44 | }
45 |
46 | public class CCComponentDebug {
47 | private var deviceDebugger: ProcessLogger.Device = ProcessLogger.Device()
48 | private var framerateDebugger: ProcessLogger.Framerate = ProcessLogger.Framerate()
49 |
50 | private var d: [thread_basic_info] = []
51 | private var dooo: thread_basic_info = thread_basic_info()
52 |
53 | init() {}
54 |
55 | func update() {
56 | self.framerateDebugger.update()
57 | }
58 |
59 | func update(thred: Thread, queue: DispatchQueue) {
60 | guard let queueLabel: String = String(validatingUTF8: __dispatch_queue_get_label(queue)) else { return }
61 | let machTID: mach_port_t = pthread_mach_thread_np(pthread_self())
62 | guard let thredBasicInfo: thread_basic_info = self.deviceDebugger.thredBasicInfo(machTID: machTID) else { return }
63 | //self.dooo = thredBasicInfo
64 | }
65 |
66 | public func fps() -> Int {
67 | return self.framerateDebugger.fps()
68 | }
69 |
70 | public func cpu() {
71 | /*
72 | let count: Float = Float(self.d.count)
73 | let cpu_usage: Float = self.d.map { Float($0.cpu_usage) }.reduce(0, +)
74 | let microseconds: Float = self.d.map { Float($0.user_time.microseconds) }.reduce(0, +)
75 | //print(self.d)
76 | print((cpu_usage / Float(TH_USAGE_SCALE) * 100) / count, microseconds / count / 1000)
77 | print("thredIndex", self.dooo.thredIndex, Float(self.dooo.threadInfo.cpu_usage) / Float(TH_USAGE_SCALE) * 100, Float(self.dooo.threadInfo.user_time.microseconds) / Float(1000.0))
78 | self.d.removeAll()
79 | */
80 | }
81 |
82 | }
83 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/UseCoreMLExample/CoreMLMobileNetV2Layer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CoreMLMobileNetV2Layer.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/09/23.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import CameraCore
10 | import Vision
11 | /*
12 | final public class CoreMLMobileNetV2Layer: RenderLayerProtocol {
13 | public let type: RenderLayerType = RenderLayerType.custom
14 | public let id: RenderLayerId
15 | public let customIndex: Int = 0
16 | public var request: VNCoreMLRequest?
17 | public var onProcessClassifications: ((_ descriptions: [String])->Void)?
18 |
19 | fileprivate var isDetecting: Bool = false
20 |
21 | public init() throws {
22 | self.id = RenderLayerId()
23 | let model = try VNCoreMLModel(for: MobileNetV2().model)
24 | self.request = VNCoreMLRequest(model: model, completionHandler: { [weak self](request, error) in
25 | self?.processClassifications(for: request, error: error)
26 | })
27 | self.request?.imageCropAndScaleOption = .centerCrop
28 | }
29 |
30 | /// キャッシュを消去
31 | public func dispose() {
32 | self.request = nil
33 | }
34 | }
35 |
36 | extension CoreMLMobileNetV2Layer: CVPixelBufferRenderLayerProtocol {
37 | public func processing(commandBuffer: inout MTLCommandBuffer, pixelBuffer: inout CVPixelBuffer, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
38 | guard !self.isDetecting else { return }
39 | self.isDetecting = true
40 | let pixelBuffer = pixelBuffer
41 | DispatchQueue.global(qos: .userInitiated).async { [weak self] in
42 | guard
43 | let self = self,
44 | let request = self.request
45 | else { return }
46 |
47 | let handler = VNImageRequestHandler.init(cvPixelBuffer: pixelBuffer, options: [:])
48 | do {
49 | try handler.perform([request])
50 | } catch {
51 |
52 | }
53 |
54 | }
55 |
56 | }
57 | }
58 |
59 | extension CoreMLMobileNetV2Layer {
60 | func processClassifications(for request: VNRequest, error: Error?) {
61 | self.isDetecting = false
62 | guard let results = request.results else {
63 | self.onProcessClassifications?(["Unable to classify image."])
64 | return
65 | }
66 | let classifications = results as! [VNClassificationObservation]
67 | if classifications.isEmpty {
68 | self.onProcessClassifications?(["Nothing recognized."])
69 | } else {
70 | let topClassifications: ArraySlice = classifications.prefix(2)
71 | let descriptions: [String] = topClassifications.map { (classification: VNClassificationObservation) in
72 | return String(format: " (%.2f) %@", classification.confidence, classification.identifier)
73 | }
74 | self.onProcessClassifications?(descriptions)
75 | }
76 | }
77 | }
78 | */
79 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "size" : "20x20",
5 | "idiom" : "iphone",
6 | "filename" : "Icon-Small-40.png",
7 | "scale" : "2x"
8 | },
9 | {
10 | "size" : "20x20",
11 | "idiom" : "iphone",
12 | "filename" : "Icon-Notification@3x.png",
13 | "scale" : "3x"
14 | },
15 | {
16 | "size" : "29x29",
17 | "idiom" : "iphone",
18 | "filename" : "Icon-Small@2x.png",
19 | "scale" : "2x"
20 | },
21 | {
22 | "size" : "29x29",
23 | "idiom" : "iphone",
24 | "filename" : "Icon-Small@3x.png",
25 | "scale" : "3x"
26 | },
27 | {
28 | "size" : "40x40",
29 | "idiom" : "iphone",
30 | "filename" : "Icon-Small-40@2x-1.png",
31 | "scale" : "2x"
32 | },
33 | {
34 | "size" : "40x40",
35 | "idiom" : "iphone",
36 | "filename" : "Icon-60@2x-1.png",
37 | "scale" : "3x"
38 | },
39 | {
40 | "size" : "60x60",
41 | "idiom" : "iphone",
42 | "filename" : "Icon-60@2x.png",
43 | "scale" : "2x"
44 | },
45 | {
46 | "size" : "60x60",
47 | "idiom" : "iphone",
48 | "filename" : "Icon-60@3x.png",
49 | "scale" : "3x"
50 | },
51 | {
52 | "size" : "20x20",
53 | "idiom" : "ipad",
54 | "filename" : "Icon-Notification.png",
55 | "scale" : "1x"
56 | },
57 | {
58 | "size" : "20x20",
59 | "idiom" : "ipad",
60 | "filename" : "Icon-Small-42.png",
61 | "scale" : "2x"
62 | },
63 | {
64 | "size" : "29x29",
65 | "idiom" : "ipad",
66 | "filename" : "Icon-Small.png",
67 | "scale" : "1x"
68 | },
69 | {
70 | "size" : "29x29",
71 | "idiom" : "ipad",
72 | "filename" : "Icon-Small@2x-1.png",
73 | "scale" : "2x"
74 | },
75 | {
76 | "size" : "40x40",
77 | "idiom" : "ipad",
78 | "filename" : "Icon-Small-41.png",
79 | "scale" : "1x"
80 | },
81 | {
82 | "size" : "40x40",
83 | "idiom" : "ipad",
84 | "filename" : "Icon-Small-40@2x.png",
85 | "scale" : "2x"
86 | },
87 | {
88 | "size" : "76x76",
89 | "idiom" : "ipad",
90 | "filename" : "Icon-76.png",
91 | "scale" : "1x"
92 | },
93 | {
94 | "size" : "76x76",
95 | "idiom" : "ipad",
96 | "filename" : "Icon-76@2x.png",
97 | "scale" : "2x"
98 | },
99 | {
100 | "size" : "83.5x83.5",
101 | "idiom" : "ipad",
102 | "filename" : "Icon-83.5@2x.png",
103 | "scale" : "2x"
104 | },
105 | {
106 | "size" : "1024x1024",
107 | "idiom" : "ios-marketing",
108 | "filename" : "iTunesArtwork@2x.png",
109 | "scale" : "1x"
110 | }
111 | ],
112 | "info" : {
113 | "version" : 1,
114 | "author" : "xcode"
115 | }
116 | }
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Layer/ColorOverlayLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ColorOverlayLayer.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/09/19.
6 | // Copyright © 2018 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 | import CoreImage
12 |
13 | public extension CCImageProcess {
14 | final class ColorOverlayLayer: RenderLayerProtocol {
15 | public let type: RenderLayerType = RenderLayerType.colorOverlay
16 | public let id: RenderLayerId
17 | public let customIndex: Int = 0
18 | private let color: CIVector
19 | private var offset: Float = 0
20 | private let fragmentShader: CIColorKernel
21 |
22 | public convenience init(color: CGColor, offset: Float) throws {
23 | try self.init(id: RenderLayerId(), color: color, offset: offset)
24 | }
25 |
26 | public init(id: RenderLayerId, color: CGColor, offset: Float) throws {
27 | self.id = id
28 | let color: CIColor = CIColor(cgColor: color)
29 | self.color = CIVector(x: color.red, y: color.green, z: color.blue)
30 |
31 | let fragmentShaderPath: URL = AssetManager.Shader.colorOverlay.url
32 | let fragmentShaderString: String = try String(contentsOf: fragmentShaderPath, encoding: .utf8)
33 | guard let fragmentShader: CIColorKernel = CIColorKernel(source: fragmentShaderString) else { throw RenderLayerErrorType.setupError }
34 | self.fragmentShader = fragmentShader
35 | self.offset = offset
36 | }
37 |
38 | public func update(offset: Float) {
39 | self.offset = offset
40 | }
41 |
42 | public func dispose() {}
43 | }
44 | }
45 |
46 | public extension CCImageProcess.ColorOverlayLayer {
47 | func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
48 | guard let image: CIImage = CIImage(mtlTexture: source.texture, options: nil) else { throw RenderLayerErrorType.renderingError }
49 | let colorSpace: CGColorSpace = image.colorSpace ?? CGColorSpaceCreateDeviceRGB()
50 | let outImage: CIImage = try self.processing(image: image, renderLayerCompositionInfo: &renderLayerCompositionInfo)
51 | MCCore.ciContext.render(outImage, to: destination.texture, commandBuffer: commandBuffer, bounds: outImage.extent, colorSpace: colorSpace)
52 | }
53 | }
54 |
55 | fileprivate extension CCImageProcess.ColorOverlayLayer {
56 | func processing(image: CIImage, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws -> CIImage {
57 | let arguments: [Any] = [
58 | image,
59 | CIVector(x: image.extent.width, y: image.extent.height),
60 | self.color,
61 | NSNumber(value: self.offset),
62 | ]
63 |
64 | guard let img: CIImage = self.fragmentShader.apply(extent: image.extent, arguments: arguments) else {
65 | throw RenderLayerErrorType.renderingError
66 | }
67 |
68 | return img
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Layer/TransformLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TransformLayer.swift
3 | // CameraCore
4 | //
5 | // Created by 町田 秀行 on 2018/01/21.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 | import CoreImage
12 |
13 | public extension CCImageProcess {
14 | final class TransformLayer: RenderLayerProtocol {
15 | public let type: RenderLayerType = RenderLayerType.transformLayer
16 | public let id: RenderLayerId
17 | public let customIndex: Int = 0
18 | public let transform: CGAffineTransform
19 | public let backgroundColor: CGColor
20 |
21 | public convenience init(transform: CGAffineTransform, backgroundColor: CGColor) {
22 | self.init(id: RenderLayerId(), transform: transform, backgroundColor: backgroundColor)
23 | }
24 |
25 | public init(id: RenderLayerId, transform: CGAffineTransform, backgroundColor: CGColor) {
26 | self.id = id
27 | self.transform = transform
28 | self.backgroundColor = backgroundColor
29 | }
30 |
31 | public func dispose() {}
32 | }
33 | }
34 |
35 | public extension CCImageProcess.TransformLayer {
36 | func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
37 | guard let image: CIImage = CIImage(mtlTexture: source.texture, options: nil) else { throw RenderLayerErrorType.renderingError }
38 | let colorSpace: CGColorSpace = image.colorSpace ?? CGColorSpaceCreateDeviceRGB()
39 | let outImage = try self.process(image: image,
40 | compositionTime: renderLayerCompositionInfo.compositionTime,
41 | timeRange: renderLayerCompositionInfo.timeRange,
42 | percentComplete: Float(renderLayerCompositionInfo.percentComplete),
43 | renderSize: renderLayerCompositionInfo.renderSize)
44 | MCCore.ciContext.render(outImage, to: destination.texture, commandBuffer: commandBuffer, bounds: outImage.extent, colorSpace: colorSpace)
45 | }
46 | }
47 |
48 | private extension CCImageProcess.TransformLayer {
49 | func process(image: CIImage, compositionTime: CMTime, timeRange: CMTimeRange, percentComplete: Float, renderSize: MCSize) throws -> CIImage {
50 | let transformImage: CIImage = image.transformed(by: self.transform)
51 | let croppingImage: CIImage = transformImage.cropped(to: CGRect(origin: CGPoint.zero, size: renderSize.toCGSize()))
52 | guard let result: CIFilter = CIFilter(name: Blendmode.alpha.CIFilterName) else { throw RenderLayerErrorType.renderingError }
53 | result.setValue(CIImage(color: CIColor(cgColor: self.backgroundColor)), forKey: kCIInputBackgroundImageKey)
54 | result.setValue(croppingImage, forKey: kCIInputImageKey)
55 | guard let croppingImage002: CIImage = result.outputImage?.cropped(to: CGRect(origin: CGPoint.zero, size: renderSize.toCGSize())) else { throw RenderLayerErrorType.renderingError }
56 | return croppingImage002
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Protocol/RenderLayerCompositionInfoProtocol.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RenderLayerCompotitionInfoProtocol.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/12/30.
6 | // Copyright © 2018 町田 秀行. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 | import Foundation
12 | import ARKit
13 |
14 | public struct RenderLayerCompositionInfoProperty {
15 | internal var compositionTime: CMTime
16 | internal var presentationTimeStamp: CMTime
17 | internal var timeRange: CMTimeRange
18 | internal var percentComplete: Double
19 | internal var renderSize: MCSize
20 | internal var metadataObjects: [AVMetadataObject]
21 | internal var pixelFormat: MTLPixelFormat
22 | internal var userInfo: [ String : Any]
23 | }
24 |
25 | public protocol RenderLayerCompositionInfoProtocol {
26 | var __property: RenderLayerCompositionInfoProperty { get set }
27 | var compositionTime: CMTime { get }
28 | var presentationTimeStamp: CMTime { get }
29 | var timeRange: CMTimeRange { get }
30 | var percentComplete: Double { get }
31 | var renderSize: MCSize { get }
32 | var metadataObjects: [AVMetadataObject] { get }
33 | var pixelFormat: MTLPixelFormat { get }
34 | var userInfo: [ String : Any] { get set }
35 | }
36 |
37 | extension RenderLayerCompositionInfoProtocol {
38 | public var compositionTime: CMTime { return self.__property.compositionTime }
39 | public var presentationTimeStamp: CMTime { return self.__property.presentationTimeStamp }
40 | public var timeRange: CMTimeRange { return self.__property.timeRange }
41 | public var percentComplete: Double { return self.__property.percentComplete }
42 | public var renderSize: MCSize { return self.__property.renderSize }
43 | public var metadataObjects: [AVMetadataObject] { return self.__property.metadataObjects }
44 | public var pixelFormat: MTLPixelFormat { return self.__property.pixelFormat }
45 | public var userInfo: [ String : Any] { get { return self.__property.userInfo } set { self.__property.userInfo = newValue } }
46 | }
47 |
48 | public class RenderLayerCompositionInfo: RenderLayerCompositionInfoProtocol {
49 | public enum Key: String {
50 | case depthData = "depthData"
51 | case videoCaptureData = "videoCaptureData"
52 | case arFrame = "arFrame"
53 | }
54 |
55 | public var __property: RenderLayerCompositionInfoProperty
56 | public init(
57 | compositionTime: CMTime,
58 | presentationTimeStamp: CMTime,
59 | timeRange: CMTimeRange,
60 | percentComplete: Double,
61 | renderSize: MCSize,
62 | metadataObjects: [AVMetadataObject],
63 | pixelFormat: MTLPixelFormat = .bgra8Unorm,
64 | userInfo: [ String : Any ]
65 | ) {
66 | self.__property = RenderLayerCompositionInfoProperty(
67 | compositionTime: compositionTime,
68 | presentationTimeStamp: presentationTimeStamp,
69 | timeRange: timeRange,
70 | percentComplete: percentComplete,
71 | renderSize: renderSize,
72 | metadataObjects: metadataObjects,
73 | pixelFormat: pixelFormat,
74 | userInfo: userInfo
75 | )
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCAudio/Microphone.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by hideyuki machida on 2022/02/08.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | extension CCAudio {
12 | public class Microphone {
13 |
14 | // MARK: - CCComponentProtocol
15 | public let setup: CCAudio.Microphone.Setup = CCAudio.Microphone.Setup()
16 | public let trigger: CCAudio.Microphone.Trigger = CCAudio.Microphone.Trigger()
17 | public let pipe: CCAudio.Microphone.Pipe = CCAudio.Microphone.Pipe()
18 | public var debug: CCComponentDebug?
19 |
20 | public let engine: AVAudioEngine = AVAudioEngine()
21 |
22 | public init() {
23 | self.engine.inputNode.installTap(onBus: 0, bufferSize: 4096, format: nil) { [weak self] (audioPCMBuffer: AVAudioPCMBuffer, audioTime: AVAudioTime) in
24 | //self.engine.mainMixerNode.installTap(onBus: 0, bufferSize: 4096, format: nil) { [weak self] (audioPCMBuffer: AVAudioPCMBuffer, audioTime: AVAudioTime) in
25 | guard let buffer: CMSampleBuffer = CMSampleBuffer.create(audioPCMBuffer: audioPCMBuffer, audioTime: audioTime) else { return }
26 | self?.pipe.audioCaptureSampleBuffer = buffer
27 | }
28 |
29 | self.setup.audioEngine = self
30 | self.trigger.audioEngine = self
31 | self.pipe.audioEngine = self
32 | }
33 | }
34 | }
35 |
36 | fileprivate extension CCAudio.Microphone {
37 | func start() throws {
38 | try self.engine.start()
39 | }
40 |
41 | func stop() {
42 | self.engine.stop()
43 | }
44 |
45 | func dispose() {
46 | self.engine.stop()
47 | self.setup._dispose()
48 | self.trigger._dispose()
49 | self.pipe._dispose()
50 | }
51 | }
52 |
53 | public extension CCAudio.Microphone {
54 |
55 | // MARK: - Setup
56 | class Setup: CCComponentSetupProtocol {
57 | fileprivate var audioEngine: CCAudio.Microphone?
58 |
59 | fileprivate func _dispose() {
60 | self.audioEngine = nil
61 | }
62 | }
63 |
64 | // MARK: - Trigger
65 | class Trigger: CCComponentTriggerProtocol {
66 | fileprivate var audioEngine: CCAudio.Microphone?
67 |
68 | public func start() throws {
69 | try self.audioEngine?.start()
70 | }
71 |
72 | public func stop() {
73 | self.audioEngine?.stop()
74 | }
75 |
76 | public func dispose() {
77 | self.audioEngine?.dispose()
78 | }
79 |
80 | fileprivate func _dispose() {
81 | self.audioEngine = nil
82 | }
83 | }
84 |
85 | // MARK: - Pipe
86 | class Pipe: NSObject, CCComponentPipeProtocol {
87 |
88 | // MARK: - Queue
89 | fileprivate let completeQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCAudio.AudioEngine.completeQueue")
90 |
91 | fileprivate var audioEngine: CCAudio.Microphone?
92 |
93 | @Published public var audioCaptureSampleBuffer: CMSampleBuffer?
94 |
95 | fileprivate func _dispose() {
96 | self.audioEngine = nil
97 | }
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCAudio/AudioEngine.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioEngine.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/02/16.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 |
12 | extension CCAudio {
13 | public class AudioEngine {
14 |
15 | // MARK: - CCComponentProtocol
16 | public let setup: CCAudio.AudioEngine.Setup = CCAudio.AudioEngine.Setup()
17 | public let trigger: CCAudio.AudioEngine.Trigger = CCAudio.AudioEngine.Trigger()
18 | public let pipe: CCAudio.AudioEngine.Pipe = CCAudio.AudioEngine.Pipe()
19 | public var debug: CCComponentDebug?
20 |
21 | public let engine: AVAudioEngine = AVAudioEngine()
22 |
23 | public init() {
24 | self.engine.inputNode.installTap(onBus: 0, bufferSize: 4096, format: nil) { [weak self] (audioPCMBuffer: AVAudioPCMBuffer, audioTime: AVAudioTime) in
25 | //self.engine.mainMixerNode.installTap(onBus: 0, bufferSize: 4096, format: nil) { [weak self] (audioPCMBuffer: AVAudioPCMBuffer, audioTime: AVAudioTime) in
26 | guard let buffer: CMSampleBuffer = CMSampleBuffer.create(audioPCMBuffer: audioPCMBuffer, audioTime: audioTime) else { return }
27 | self?.pipe.audioCaptureSampleBuffer = buffer
28 | }
29 |
30 | self.setup.audioEngine = self
31 | self.trigger.audioEngine = self
32 | self.pipe.audioEngine = self
33 | }
34 | }
35 | }
36 |
37 | fileprivate extension CCAudio.AudioEngine {
38 | func start() throws {
39 | try self.engine.start()
40 | }
41 |
42 | func stop() {
43 | self.engine.stop()
44 | }
45 |
46 | func dispose() {
47 | self.engine.stop()
48 | self.setup._dispose()
49 | self.trigger._dispose()
50 | self.pipe._dispose()
51 | }
52 | }
53 |
54 | public extension CCAudio.AudioEngine {
55 |
56 | // MARK: - Setup
57 | class Setup: CCComponentSetupProtocol {
58 | fileprivate var audioEngine: CCAudio.AudioEngine?
59 |
60 | fileprivate func _dispose() {
61 | self.audioEngine = nil
62 | }
63 | }
64 |
65 | // MARK: - Trigger
66 | class Trigger: CCComponentTriggerProtocol {
67 | fileprivate var audioEngine: CCAudio.AudioEngine?
68 |
69 | public func start() throws {
70 | try self.audioEngine?.start()
71 | }
72 |
73 | public func stop() {
74 | self.audioEngine?.stop()
75 | }
76 |
77 | public func dispose() {
78 | self.audioEngine?.dispose()
79 | }
80 |
81 | fileprivate func _dispose() {
82 | self.audioEngine = nil
83 | }
84 | }
85 |
86 | // MARK: - Pipe
87 | class Pipe: NSObject, CCComponentPipeProtocol {
88 |
89 | // MARK: - Queue
90 | fileprivate let completeQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCAudio.AudioEngine.completeQueue")
91 |
92 | fileprivate var audioEngine: CCAudio.AudioEngine?
93 |
94 | @Published public var audioCaptureSampleBuffer: CMSampleBuffer?
95 |
96 | fileprivate func _dispose() {
97 | self.audioEngine = nil
98 | }
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/ExampleVC/VideoCaptureView001ExampleVC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VideoCaptureViewExample001VC.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/12/31.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import CoreVideo
10 | import AVFoundation
11 | import CameraCore
12 | import iOS_DummyAVAssets
13 | import MetalCanvas
14 | import UIKit
15 | import ProcessLogger_Swift
16 |
17 | class VideoCaptureView001ExampleVC: UIViewController {
18 | @IBOutlet weak var recordingButton: UIButton!
19 |
20 | var videoCaptureProperty = CCCapture.VideoCapture.Property(
21 | devicePosition: AVCaptureDevice.Position.back,
22 | isAudioDataOutput: true,
23 | required: [
24 | .captureSize(Settings.PresetSize.p1280x720),
25 | .frameRate(Settings.PresetFrameRate.fps60),
26 | ],
27 | option: [
28 | .colorSpace(AVCaptureColorSpace.P3_D65),
29 | ]
30 | )
31 |
32 | var camera: CCCapture.Camera?
33 | var videoRecorder: CCRecorder.VideoRecorder?
34 |
35 | @IBOutlet weak var drawView: CCView!
36 |
37 | deinit {
38 | self.camera?.trigger.dispose()
39 | self.drawView.trigger.dispose()
40 | self.videoRecorder?.trigger.dispose()
41 | CameraCore.flush()
42 | ProcessLogger.deinitLog(self)
43 | }
44 |
45 | override func viewDidLoad() {
46 | super.viewDidLoad()
47 |
48 | do {
49 | let camera: CCCapture.Camera = try CCCapture.Camera(property: self.videoCaptureProperty)
50 | try camera --> self.drawView
51 | let videoRecorder: CCRecorder.VideoRecorder = try CCRecorder.VideoRecorder()
52 | try camera --> videoRecorder
53 | camera.trigger.start()
54 | self.camera = camera
55 | self.videoRecorder = videoRecorder
56 | } catch {
57 |
58 | }
59 |
60 | }
61 |
62 | @IBAction func recordingTapAction(_ sender: Any) {
63 |
64 | if self.videoRecorder?.isRecording == true {
65 | self.videoRecorder?.trigger.stop()
66 | self.recordingButton.setTitle("撮影開始", for: UIControl.State.normal)
67 | } else {
68 | let filePath: String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + "/" + "recordingVideo" + NSUUID().uuidString + ".mp4"
69 | let size: MCSize = Settings.PresetSize.p1280x720.size(orientation: AVCaptureVideoOrientation.portrait)
70 | do {
71 | let parameter: CCRecorder.CaptureWriter.Parameter = CCRecorder.CaptureWriter.Parameter(
72 | outputFilePath: URL(fileURLWithPath: filePath),
73 | presetFrame: Settings.PresetSize.p1280x720,
74 | frameRate: 30,
75 | devicePosition: AVCaptureDevice.Position.back,
76 | croppingRect: CGRect(origin: CGPoint(), size: size.toCGSize()),
77 | fileType: AVFileType.mp4,
78 | videoCodecType: Settings.VideoCodec.hevc
79 | )
80 | try self.videoRecorder?.setup.setup(parameter: parameter)
81 | self.videoRecorder?.trigger.start()
82 | self.recordingButton.setTitle("撮影ストップ", for: UIControl.State.normal)
83 | } catch {}
84 | }
85 |
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/Sources/CameraCore/Utils/CodableURL.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CodableAVAsset.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/08/27.
6 | // Copyright © 2018 町田 秀行. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 | public enum CodableURLType: Int {
12 | case userFile = 0
13 | case bundleFile = 1
14 | case other = 2
15 |
16 | public func path(url: URL) -> String {
17 | switch self {
18 | case .userFile:
19 | let documentsPath: String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
20 | let range: NSRange = (documentsPath as NSString).range(of: "Documents")
21 | let str: String = (url.relativePath as NSString).replacingCharacters(in: NSRange.init(location: 0, length: range.location - 1), with: "")
22 | return str
23 | case .bundleFile:
24 | let bundlePath: String = Bundle.main.bundlePath
25 | let range: NSRange = (bundlePath as NSString).range(of: Bundle.main.bundlePath)
26 | return (url.relativePath as NSString).replacingCharacters(in: range, with: "")
27 | case .other:
28 | return url.absoluteString
29 | }
30 | }
31 | public func url(path: String) -> URL {
32 | switch self {
33 | case .userFile:
34 | var documentsPath: String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
35 | documentsPath = (documentsPath as NSString).replacingOccurrences(of: "Documents", with: "") as String
36 | return URL.init(fileURLWithPath: documentsPath + path)
37 | case .bundleFile:
38 | let bundlePath: String = Bundle.main.bundlePath
39 | return URL.init(fileURLWithPath: bundlePath + path)
40 | case .other:
41 | return URL.init(string: path)!
42 | }
43 | }
44 | }
45 |
46 | public struct CodableURL {
47 | public var type: CodableURLType {
48 | guard self.url.absoluteString.hasPrefix("file://") else { return .other}
49 | if self.url.relativePath.hasPrefix(Bundle.main.bundlePath) {
50 | return .bundleFile
51 | } else if self.url.relativePath.hasPrefix("/var/mobile/Media/") {
52 | return .other
53 | } else {
54 | return .userFile
55 | }
56 | }
57 |
58 | public var url: URL
59 | public init(url: URL) {
60 | self.url = url
61 | }
62 | }
63 |
64 |
65 | extension CodableURL {
66 | enum CodingKeys: String, CodingKey {
67 | case type
68 | case url
69 | }
70 | }
71 |
72 | extension CodableURL: Encodable {
73 | public func encode(to encoder: Encoder) throws {
74 | var container = encoder.container(keyedBy: CodingKeys.self)
75 | let type: CodableURLType = self.type
76 | try container.encode(type.rawValue, forKey: .type)
77 | switch type {
78 | case .userFile:
79 | try container.encode(type.path(url: self.url), forKey: .url)
80 | case .bundleFile:
81 | try container.encode(type.path(url: self.url), forKey: .url)
82 | case .other:
83 | try container.encode(self.url.absoluteString, forKey: .url)
84 | }
85 | }
86 | }
87 |
88 | extension CodableURL: Decodable {
89 | public init(from decoder: Decoder) throws {
90 | let values = try decoder.container(keyedBy: CodingKeys.self)
91 | let type: CodableURLType = CodableURLType.init(rawValue: try values.decode(Int.self, forKey: .type))!
92 | let path: String = try values.decode(String.self, forKey: .url)
93 | self.url = type.url(path: path)
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/UseCoreMLExample/CoreMLDeeplabV3Layer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CoreMLDeeplabV3Layer.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/10/06.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 | /*
9 | import UIKit
10 | import CoreMLHelpers
11 | import MetalCanvas
12 | import CameraCore
13 | import Vision
14 |
15 | @available(iOS 12.0, *)
16 | final public class CoreMLDeeplabV3Layer: RenderLayerProtocol {
17 | public let type: RenderLayerType = RenderLayerType.custom
18 | public let id: RenderLayerId
19 | public let customIndex: Int = 0
20 | public var request: VNCoreMLRequest?
21 | public var items: [ VNRecognizedObjectObservation ] = []
22 | public var renderLayerCompositionInfo: RenderLayerCompositionInfo?
23 | public var depthImage: CIImage?
24 |
25 | fileprivate var isDetecting: Bool = false
26 |
27 | public init() throws {
28 | self.id = RenderLayerId()
29 | let model = try VNCoreMLModel(for: DeepLabV3().model)
30 | self.request = VNCoreMLRequest(model: model, completionHandler: { [weak self] (request, error) in
31 | self?.processClassifications(for: request, error: error)
32 | })
33 | self.request?.preferBackgroundProcessing = true
34 | self.request?.imageCropAndScaleOption = .centerCrop
35 | }
36 |
37 | /// キャッシュを消去
38 | public func dispose() {
39 | self.request = nil
40 | }
41 | }
42 |
43 | @available(iOS 12.0, *)
44 | extension CoreMLDeeplabV3Layer: CVPixelBufferRenderLayerProtocol {
45 | public func processing(commandBuffer: inout MTLCommandBuffer, pixelBuffer: inout CVPixelBuffer, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
46 |
47 | self.drawDepthImage(pixelBuffer: pixelBuffer)
48 |
49 | guard !self.isDetecting else { return }
50 | self.isDetecting = true
51 | let pixelBuffer: CVPixelBuffer = pixelBuffer
52 | self.renderLayerCompositionInfo = renderLayerCompositionInfo
53 | DispatchQueue.global(qos: .userInitiated).async { [weak self] in
54 | guard
55 | let self = self,
56 | let request = self.request
57 | else { return }
58 |
59 | let handler = VNImageRequestHandler.init(cvPixelBuffer: pixelBuffer, options: [:])
60 | do {
61 | try handler.perform([request])
62 | } catch {
63 | print("Failed to perform classification.\n\(error.localizedDescription)")
64 | }
65 | }
66 |
67 | }
68 | }
69 |
70 | @available(iOS 12.0, *)
71 | extension CoreMLDeeplabV3Layer {
72 | func processClassifications(for request: VNRequest, error: Error?) {
73 | self.isDetecting = false
74 | guard let result: VNCoreMLFeatureValueObservation = request.results?.first as? VNCoreMLFeatureValueObservation else { return }
75 | guard result.featureValue.type == .multiArray else { return }
76 | guard let multiArray: MLMultiArray = result.featureValue.multiArrayValue else { return }
77 | let cgImage: CGImage = multiArray.cgImage(min: 0.83, max: 2.5, channel: 4, axes: nil)!
78 | let depthImage: CIImage = CIImage.init(cgImage: cgImage).transformed(by: CGAffineTransform.init(scaleX: 1.0, y: 1.0))
79 | self.depthImage = depthImage
80 | }
81 | }
82 |
83 | @available(iOS 12.0, *)
84 | extension CoreMLDeeplabV3Layer {
85 | func drawDepthImage(pixelBuffer: CVPixelBuffer) {
86 | guard let depthImage: CIImage = self.depthImage else { return }
87 | MCCore.ciContext.render(depthImage, to: pixelBuffer, bounds: depthImage.extent, colorSpace: depthImage.colorSpace)
88 | }
89 | }
90 | */
91 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample.xcodeproj/xcshareddata/xcschemes/CameraCoreExample.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
37 |
38 |
39 |
40 |
41 |
42 |
53 |
55 |
61 |
62 |
63 |
64 |
70 |
72 |
78 |
79 |
80 |
81 |
83 |
84 |
87 |
88 |
89 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/Debugger/DebugView.xib:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Protocol/RenderLayerProtocol.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RenderLayerProtocol.swift
3 | // CameraCore
4 | //
5 | // Created by machidahideyuki on 2018/01/07.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import CoreImage
11 | import Metal
12 | import MetalCanvas
13 |
14 | ///////////////////////////////////////////////////////////////////////////////////////////////////
15 |
16 | // MARK: - RenderLayerErrorType
17 |
18 | public enum RenderLayerErrorType: Error {
19 | case decodeError
20 | case setupError
21 | case renderingError
22 | }
23 |
24 | ///////////////////////////////////////////////////////////////////////////////////////////////////
25 |
26 | // MARK: - ImageProcessing
27 |
28 | // MARK: レンダリングレイヤータイプ
29 |
30 | public enum RenderLayerType: Int, Codable {
31 | case blank = 0
32 | case transformLayer = 1
33 | case image = 2
34 | case lut = 3
35 | case sequenceImage = 4
36 | case mask = 5
37 | case colorOverlay = 6
38 |
39 | //case imageBlend = 100
40 |
41 | case custom = 9999
42 |
43 | public var type: RenderLayerProtocol.Type? {
44 | switch self {
45 | case .blank: return CCImageProcess.BlankLayer.self
46 | case .transformLayer: return CCImageProcess.TransformLayer.self
47 | case .image: return CCImageProcess.ImageLayer.self
48 | case .lut: return CCImageProcess.LutLayer.self
49 | case .sequenceImage: return CCImageProcess.SequenceImageLayer.self
50 | case .mask: return CCImageProcess.MaskLayer.self
51 | case .colorOverlay: return CCImageProcess.ColorOverlayLayer.self
52 | //case .imageBlend: return CCImageProcessing.ImageBlendLayer.self
53 | case .custom: return nil
54 | }
55 | }
56 | }
57 |
58 | // MARK: レンダリングブレンドモード
59 |
60 | public enum Blendmode: String, Codable {
61 | case alpha
62 | case addition // 加算
63 | case multiplication // 乗算
64 | case screen // スクリーン
65 | case softLight // ソフトライト
66 | case hardLight // ハードライト
67 | case overlay // オーバーレイ
68 |
69 | public var CIFilterName: String {
70 | switch self {
71 | case .alpha: return "CISourceAtopCompositing"
72 | case .addition: return "CIAdditionCompositing"
73 | case .multiplication: return "CIMultiplyCompositing"
74 | case .screen: return "CIScreenBlendMode"
75 | case .softLight: return "CISoftLightBlendMode"
76 | case .hardLight: return "CIHardLightBlendMode"
77 | case .overlay: return "CIOverlayBlendMode"
78 | }
79 | }
80 | }
81 |
82 | // MARK: レンダリングレイヤー protocol
83 |
84 | public protocol RenderLayerProtocol {
85 | var id: CCImageProcess.RenderLayerId { get }
86 | var type: RenderLayerType { get }
87 | var customIndex: Int { get }
88 | mutating func dispose()
89 | mutating func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws
90 | }
91 |
92 | public extension RenderLayerProtocol {
93 | func blitEncoder(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture) throws {
94 | guard source.size == destination.size else { throw RenderLayerErrorType.renderingError }
95 | let blitEncoder: MTLBlitCommandEncoder? = commandBuffer.makeBlitCommandEncoder()
96 | blitEncoder?.copy(from: source.texture,
97 | sourceSlice: 0,
98 | sourceLevel: 0,
99 | sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0),
100 | sourceSize: MTLSizeMake(source.texture.width, source.texture.height, source.texture.depth),
101 | to: destination.texture,
102 | destinationSlice: 0,
103 | destinationLevel: 0,
104 | destinationOrigin: MTLOrigin(x: 0, y: 0, z: 0))
105 | blitEncoder?.endEncoding()
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/Sources/CameraCore/Settings.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Settings.swift
3 | // CameraCore
4 | //
5 | // Created by machidahideyuki on 2018/01/08.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 | import UIKit
12 |
13 | public class Settings {
14 | public enum PresetSize: Int, Codable {
15 | // case p640x480 = 0 現状は使用しない
16 | case p960x540 = 1
17 | case p1280x720 = 2
18 | case p1920x1080 = 3
19 |
20 | public var aVCaptureSessionPreset: AVCaptureSession.Preset {
21 | switch self {
22 | // case .p640x480: return AVCaptureSession.Preset.vga640x480
23 | case .p960x540: return AVCaptureSession.Preset.iFrame960x540
24 | case .p1280x720: return AVCaptureSession.Preset.iFrame1280x720
25 | case .p1920x1080: return AVCaptureSession.Preset.hd1920x1080
26 | }
27 | }
28 |
29 | public var aVAssetExportSessionPreset: String {
30 | switch self {
31 | // case .p640x480: return AVAssetExportPreset640x480
32 | case .p960x540: return AVAssetExportPreset960x540
33 | case .p1280x720: return AVAssetExportPreset1280x720
34 | case .p1920x1080: return AVAssetExportPreset1920x1080
35 | }
36 | }
37 |
38 | public var aVAssetExportSessionHEVCPreset: String {
39 | switch self {
40 | // case .p640x480: return AVAssetExportPreset640x480
41 | case .p960x540: return AVAssetExportPreset960x540
42 | case .p1280x720: return AVAssetExportPreset1280x720
43 | case .p1920x1080: return AVAssetExportPresetHEVC1920x1080
44 | }
45 | }
46 |
47 | public func size(orientation: AVCaptureVideoOrientation) -> MCSize {
48 | switch orientation {
49 | case .portrait, .portraitUpsideDown: return self.portraitSize
50 | case .landscapeLeft, .landscapeRight: return self.landscapeSize
51 | @unknown default: return self.portraitSize
52 | }
53 | }
54 |
55 | public func size(orientation: UIInterfaceOrientation) -> MCSize {
56 | let currentOrientation: AVCaptureVideoOrientation = orientation.toAVCaptureVideoOrientation ?? Configuration.shared.defaultAVCaptureVideoOrientation
57 | return size(orientation: currentOrientation)
58 | }
59 |
60 | fileprivate var portraitSize: MCSize {
61 | switch self {
62 | // case .p640x480: return CGSize(width: 480, height: 640)
63 | case .p960x540: return MCSize(w: 540, h: 960)
64 | case .p1280x720: return MCSize(w: 720, h: 1280)
65 | case .p1920x1080: return MCSize(w: 1080, h: 1920)
66 | }
67 | }
68 |
69 | fileprivate var landscapeSize: MCSize {
70 | switch self {
71 | // case .p640x480: return CGSize(width: 640, height: 480)
72 | case .p960x540: return MCSize(w: 960, h: 540)
73 | case .p1280x720: return MCSize(w: 1280, h: 720)
74 | case .p1920x1080: return MCSize(w: 1920, h: 1080)
75 | }
76 | }
77 | }
78 |
79 | public enum PresetFrameRate: Int32 {
80 | case fps15 = 15
81 | case fps24 = 24
82 | case fps30 = 30
83 | case fps60 = 60
84 | case fps90 = 90
85 | case fps120 = 120
86 | case fps240 = 240
87 | }
88 |
89 | public enum VideoCodec {
90 | case h264
91 | case hevc
92 | /* 現状は使用しない
93 | case proRes422
94 | case proRes4444
95 | case jpg
96 | */
97 | public var val: AVVideoCodecType {
98 | switch self {
99 | case .h264: return AVVideoCodecType.h264
100 | case .hevc: return MetalCanvas.MCTools.shared.hasHEVCHardwareEncoder ? AVVideoCodecType.hevc : AVVideoCodecType.h264
101 | }
102 | }
103 | }
104 |
105 | public enum RenderType: Int, Codable {
106 | case openGL = 0
107 | case metal = 1
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CustomOperator.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CustomOperator.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/01/01.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 | import MetalCanvas
12 |
13 |
14 | // MARK: - infix operator
15 |
16 | infix operator -->: AdditionPrecedence
17 |
18 |
19 | // MARK: - CCImageProcess.ImageProcess
20 |
21 | @discardableResult
22 | public func --> (camera: CCCapture.Camera, imageProcess: CCImageProcess.ImageProcess) throws -> CCImageProcess.ImageProcess {
23 | return try imageProcess.pipe.input(camera: camera)
24 | }
25 |
26 | @discardableResult
27 | public func --> (player: CCPlayer, imageProcess: CCImageProcess.ImageProcess) throws -> CCImageProcess.ImageProcess {
28 | return try imageProcess.pipe.input(player: player)
29 | }
30 |
31 | @discardableResult
32 | @available(iOS 13.0, *)
33 | public func --> (camera: CCARCapture.cARCamera, imageProcess: CCImageProcess.ImageProcess) throws -> CCImageProcess.ImageProcess {
34 | return try imageProcess.pipe.input(camera: camera)
35 | }
36 |
37 | @discardableResult
38 | public func --> (inference: CCVision.Inference, imageProcess: CCImageProcess.ImageProcess) throws -> CCImageProcess.ImageProcess {
39 | return try imageProcess.pipe.input(inference: inference)
40 | }
41 |
42 |
43 | // MARK: - CCVision.Inference
44 |
45 | @discardableResult
46 | public func --> (camera: CCCapture.Camera, imageRecognition: CCVision.Inference) throws -> CCVision.Inference {
47 | return try imageRecognition.pipe.input(camera: camera)
48 | }
49 |
50 | @discardableResult
51 | public func --> (player: CCPlayer, inference: CCVision.Inference) throws -> CCVision.Inference {
52 | return try inference.pipe.input(player: player)
53 | }
54 |
55 |
56 | // MARK: - CCView
57 |
58 | public func --> (camera: CCCapture.Camera, view: CCView) throws {
59 | try view.pipe.input(camera: camera)
60 | }
61 |
62 | public func --> (camera: CCARCapture.cARCamera, view: CCView) throws {
63 | try view.pipe.input(camera: camera)
64 | }
65 |
66 | public func --> (imageProcess: CCImageProcess.ImageProcess, view: CCView) throws {
67 | try view.pipe.input(imageProcess: imageProcess)
68 | }
69 |
70 | public func --> (player: CCPlayer, view: CCView) throws {
71 | try view.pipe.input(player: player)
72 | }
73 |
74 |
75 | // MARK: - CCRecorder.VideoRecorder
76 |
77 | public func --> (camera: CCCapture.Camera, videoRecorder: CCRecorder.VideoRecorder) throws {
78 | try videoRecorder.pipe.input(camera: camera)
79 | }
80 |
81 | public func --> (imageProcess: CCImageProcess.ImageProcess, videoRecorder: CCRecorder.VideoRecorder) throws {
82 | try videoRecorder.pipe.input(imageProcess: imageProcess)
83 | }
84 |
85 | public func --> (audioEngine: CCAudio.AudioEngine, videoRecorder: CCRecorder.VideoRecorder) throws {
86 | try videoRecorder.pipe.input(audioEngine: audioEngine)
87 | }
88 |
89 | public func --> (microphone: CCAudio.Microphone, videoRecorder: CCRecorder.VideoRecorder) throws {
90 | try videoRecorder.pipe.input(microphone: microphone)
91 | }
92 |
93 |
94 |
95 | @discardableResult
96 | public func --> (audioEngine: CCAudio.AudioEngine, audioRecorder: CCRecorder.AudioRecorder) throws {
97 | try audioRecorder.pipe(audioEngine: audioEngine)
98 | }
99 |
100 |
101 |
102 |
103 |
104 | /*
105 | @discardableResult
106 | public func --> (imageRecognition: CCVision.ImageRecognition, view: CCView) throws -> CCView {
107 | return try view.pipe(imageRecognition: imageRecognition)
108 | }
109 | */
110 |
111 |
112 |
113 |
114 | @discardableResult
115 | public func --> (audioEngine: CCAudio.AudioEngine, audioPlayer: CCAudio.AudioPlayer) throws -> CCAudio.AudioPlayer {
116 | var audioEngine: CCAudio.AudioEngine = audioEngine
117 | return try audioPlayer.pipe.input(audioEngine: &audioEngine)
118 | }
119 |
120 | @discardableResult
121 | public func --> (audioEngine: CCAudio.AudioEngine, audioMic: CCAudio.Mic) throws -> CCAudio.Mic {
122 | var audioEngine: AVAudioEngine = audioEngine.engine
123 | return try audioMic.pipe(audioEngine: &audioEngine)
124 | }
125 |
126 |
127 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # CameraCore
2 |
3 | [](https://developer.apple.com/swift/)
4 | [](https://developer.apple.com/swift/)
5 | [](https://developer.apple.com/swift/)
6 |
7 |
8 | ## 概要
9 |
10 | このフレームワークは、映像や音声を少ない手続き・インターフェースで使えることを目指しています。
11 |
12 | 少ないコードで 動画の 撮影・再生・編集・エンコード を行うことができます。
13 |
14 |
15 | #### 参考
16 |
17 | * [AVFoundation プログラミングガイド](https://developer.apple.com/jp/documentation/AVFoundationPG.pdf)
18 |
19 |
20 | #### 依存ライブラリ
21 | CameraCoreは[MetalCanvas](https://github.com/Hideyuki-Machida/MetalCanvas)に依存しています。
22 |
23 |
24 |
25 |
26 | ## カメラ起動・Video撮影
27 | ### MetalVideoCaptureView(MTKView)
28 |
29 | Class: [MetalVideoCaptureView.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/CameraCore/Renderer/VideoCapture/MetalVideoCaptureView.swift)
30 |
31 | Protocol: [VideoCaptureViewProtocol.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/CameraCore/Renderer/VideoCapture/VideoCaptureViewProtocol.swift)
32 |
33 | Example: [MetalVideoCaptureViewExampleVC.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/Example/CameraCoreExample/MetalVideoCaptureViewExampleVC.swift)
34 |
35 |
36 | ## CompositionDataをセットし、Videoを再生
37 | ### MetalVideoPlaybackView(MTKView)
38 |
39 | Class: [MetalVideoPlaybackView.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/CameraCore/Renderer/CompositionAVPlayer/MetalVideoPlaybackView.swift)
40 |
41 | Protocol: [CompositionAVPlayerProtocol.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/CameraCore/Renderer/CompositionAVPlayer/CompositionAVPlayerProtocol.swift)
42 |
43 | Example: [MetalVideoPlaybackViewExampleVC.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/Example/CameraCoreExample/MetalVideoPlaybackViewExampleVC.swift)
44 |
45 |
46 | ## CompositionDataをセットし、Video・Audioを再生
47 | ### CompositionAVPlayer
48 |
49 | Class: [MetalCompositionAVPlayer.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/CameraCore/Renderer/CompositionAVPlayer/MetalCompositionAVPlayer.swift)
50 |
51 | Protocol: [CompositionAVPlayerProtocol.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/CameraCore/Renderer/CompositionAVPlayer/CompositionAVPlayerProtocol.swift)
52 |
53 | Example: [CompositionAVPlayerExampleVC.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/Example/CameraCoreExample/CompositionAVPlayerExampleVC.swift)
54 |
55 |
56 | ## ImageProcessing
57 | ### RenderLayer
58 |
59 | ビデオのフレーム毎に画像処理をしたい場合に用いるレイヤー(PhotoShopの調整レイヤーのイメージ)
60 |
61 | Protocol: [RenderLayerProtocol.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/CameraCore/ImageProcessing/RenderLayerProtocol.swift)
62 |
63 | Example: [RenderLayerExampleVC.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/Example/CameraCoreExample/RenderLayerExampleVC.swift)
64 |
65 |
66 | ## コンポジションしたビデオをエンコード & 保存
67 | ### VideoEncoder
68 |
69 | Protocol: [VideoBitRateEncoder.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/CameraCore/Encoder/VideoBitRateEncoder.swift)
70 |
71 | Example: [VideoBitRateEncoderExampleVC.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/Example/CameraCoreExample/VideoBitRateEncoderExampleVC.swift)
72 |
73 |
74 | ## CompositionData
75 |
76 | CameraCoreの基本データModel
77 |
78 | ```
79 | CompositionData
80 | CompositionTrackProtocol (CompositionVideoTrack, CompositionAudioTrack)
81 | CompositionAssetProtocol (CompositionVideoAsset, CompositionAudioAsset)
82 | ```
83 |
84 | |Data|Track|Asset|
85 | |:---|:---|:---|
86 | |CompositionData|CompositionVideoTrack {n個}|CompositionVideoAsset {n個}|
87 | ||CompositionAudioTrack {n個}|CompositionAudioAsset {n個}|
88 |
89 | 編集結果のパラメータを持つ CompositionData は
90 | 複数のTrack、さらに複数のAssetで構成されます。
91 |
92 | * 下記の図のように、動画編集ソフトのタイムラインのように扱います。
93 | * 一つのTrackには、複数のAssetの配置が可能です。
94 | * TrackにAssetを配置するには、下記を指定します。
95 | * atTime: Track内のAssetスタート時間
96 | * TrimTimeRange: Assetの再生レンジ
97 |
98 | 
99 | Example: [CompositionAVPlayerExampleVC.swift](https://github.com/Hideyuki-Machida/CameraCore/blob/master/Example/CameraCoreExample/CompositionAVPlayerExampleVC.swift)
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/ExampleVC/VideoCaptureView003ExampleVC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VideoCaptureView003ExampleVC.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/11/04.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVFoundation
11 | import MetalCanvas
12 | import CameraCore
13 | import iOS_DummyAVAssets
14 |
15 | class VideoCaptureView003ExampleVC: UIViewController {
16 |
17 | @IBOutlet weak var videoCaptureView: CameraCore.VideoCaptureView!
18 | @IBOutlet weak var recordingButton: UIButton!
19 |
20 | var lutLayer: LutLayer!
21 |
22 | var videoCaputurePropertys = CCRenderer.VideoCapture.Propertys.init(
23 | devicePosition: AVCaptureDevice.Position.back,
24 | isAudioDataOutput: true,
25 | required: [
26 | .captureSize(Settings.PresetSize.p960x540),
27 | .frameRate(Settings.PresetFrameRate.fps60),
28 | .isDepthDataOut(false)
29 | ],
30 | option: [
31 | .colorSpace(AVCaptureColorSpace.P3_D65)
32 | ]
33 | )
34 |
35 |
36 | deinit {
37 | self.videoCaptureView.pause()
38 | self.videoCaptureView.dispose()
39 | MCDebug.deinitLog(self)
40 | }
41 |
42 | override func viewDidLoad() {
43 | super.viewDidLoad()
44 |
45 | do {
46 | let faceDetactionLayer: FaceDetactionLayer = try FaceDetactionLayer.init(renderSize: Settings.PresetSize.p1280x720.size())
47 | self.videoCaptureView.renderLayers = [faceDetactionLayer]
48 | try self.videoCaptureView.setup(self.videoCaputurePropertys)
49 | } catch {
50 | MCDebug.errorLog("videoCaptureView: Setting Error")
51 | }
52 | }
53 |
54 | override func viewWillAppear(_ animated: Bool) {
55 | super.viewWillAppear(animated)
56 | self.videoCaptureView.play()
57 | }
58 |
59 | override func viewWillDisappear(_ animated: Bool) {
60 | super.viewWillDisappear(animated)
61 | self.videoCaptureView.pause()
62 | self.videoCaptureView.dispose()
63 | }
64 |
65 | override func didReceiveMemoryWarning() {
66 | super.didReceiveMemoryWarning()
67 | }
68 | }
69 |
70 |
71 | final public class FaceDetactionLayer: RenderLayerProtocol {
72 | public let type: RenderLayerType = RenderLayerType.custom
73 | public let id: RenderLayerId
74 | public var customIndex: Int = 0
75 | fileprivate let faceDetector: MCVision.Detection.Face = MCVision.Detection.Face()
76 | fileprivate var faces: [MCVision.Detection.Face.Item] = []
77 | fileprivate var destinationTexture: MCTexture
78 |
79 | public init(renderSize: CGSize) throws {
80 | self.id = RenderLayerId()
81 | self.destinationTexture = try MCTexture.init(renderSize: renderSize)
82 | }
83 |
84 | /// キャッシュを消去
85 | public func dispose() {
86 | }
87 | }
88 |
89 | extension FaceDetactionLayer: CVPixelBufferRenderLayerProtocol {
90 | public func process(commandBuffer: inout MTLCommandBuffer, pixelBuffer: inout CVPixelBuffer, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
91 | self.faces = try self.faceDetector.detection(pixelBuffer: &pixelBuffer, renderSize: renderLayerCompositionInfo.renderSize) { [weak self] ( faces: [MCVision.Detection.Face.Item] ) in
92 | //print(faces)
93 | //self?.faces = faces
94 | }
95 |
96 | var drawItems: [MCPrimitiveTypeProtocol] = []
97 | for face in self.faces {
98 | for point in face.allPoints {
99 | let p: MCPoint = MCPoint.init(
100 | ppsition: SIMD3.init(x: Float(point.x), y: Float(point.y), z: 0),
101 | color: MCColor.init(hex: "0x00FF00"), size: 5.0
102 | )
103 | drawItems.append(p)
104 | }
105 | }
106 |
107 | guard drawItems.count >= 1 else { return }
108 | var t: MCTexture = try MCTexture.init(pixelBuffer: &pixelBuffer, planeIndex: 0)
109 | // キャンバスを生成
110 | let canvas: MCCanvas = try MCCanvas.init(destination: &t, orthoType: .topLeft)
111 | try canvas.draw(commandBuffer: &commandBuffer, objects: drawItems)
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCARCapture/ARCamera.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ARCamera.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/04/04.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import MetalCanvas
11 | import ARKit
12 | import ProcessLogger_Swift
13 |
14 | @available(iOS 13.0, *)
15 | extension CCARCapture {
16 | @objc public class cARCamera: NSObject, CCComponentProtocol {
17 | public enum Mode {
18 | case worldTracking
19 | case orientationTracking
20 | case faceTracking
21 |
22 | var configuration: ARConfiguration {
23 | switch self {
24 | case .worldTracking:
25 | return ARWorldTrackingConfiguration()
26 | case .orientationTracking:
27 | return AROrientationTrackingConfiguration()
28 | case .faceTracking:
29 | return ARFaceTrackingConfiguration()
30 | }
31 | }
32 | }
33 |
34 | // MARK: - CCComponentProtocol
35 | public let setup: CCARCapture.cARCamera.Setup = CCARCapture.cARCamera.Setup()
36 | public let trigger: CCARCapture.cARCamera.Trigger = CCARCapture.cARCamera.Trigger()
37 | public let pipe: CCARCapture.cARCamera.Pipe = CCARCapture.cARCamera.Pipe()
38 | public var debug: CCComponentDebug?
39 |
40 | var configuration: ARConfiguration
41 | let session: ARSession = ARSession.init()
42 |
43 | public init(mode: CCARCapture.cARCamera.Mode) {
44 |
45 | self.configuration = mode.configuration
46 | self.configuration.frameSemantics = .personSegmentation
47 | super.init()
48 |
49 | self.setup.camera = self
50 | self.trigger.camera = self
51 | self.pipe.camera = self
52 | }
53 |
54 | fileprivate func start() {
55 | self.session.delegate = self
56 | self.session.run(self.configuration)
57 | }
58 |
59 | deinit {
60 | self.dispose()
61 | ProcessLogger.deinitLog(self)
62 | }
63 |
64 | func dispose() {
65 | self.setup._dispose()
66 | self.trigger._dispose()
67 | self.pipe._dispose()
68 | }
69 | }
70 |
71 | }
72 |
73 | @available(iOS 13.0, *)
74 | extension CCARCapture.cARCamera: ARSessionDelegate {
75 | public func session(_ session: ARSession, didUpdate: [ARAnchor]) {
76 |
77 | }
78 | public func session(_ session: ARSession, didUpdate: ARFrame) {
79 | didUpdate.timestamp
80 | let captureInfo: CCCapture.VideoCapture.CaptureInfo = CCCapture.VideoCapture.CaptureInfo()
81 | captureInfo.updateAr()
82 | let captureData: CCARCapture.CaptureData = CCARCapture.CaptureData.init(
83 | arFrame: didUpdate,
84 | captureInfo: captureInfo,
85 | mtlPixelFormat: MTLPixelFormat.bgra8Unorm,
86 | outPutPixelFormatType: MCPixelFormatType.kCV420YpCbCr8BiPlanarFullRange,
87 | captureVideoOrientation: .portrait
88 | )
89 |
90 | self.pipe.captureData = captureData
91 | self.pipe.arFrame = didUpdate
92 | self.pipe.ouTimeStamp = captureData.presentationTimeStamp
93 | }
94 | }
95 |
96 |
97 | @available(iOS 13.0, *)
98 | extension CCARCapture.cARCamera {
99 | // MARK: - Setup
100 | public class Setup: CCComponentSetupProtocol {
101 | fileprivate var camera: CCARCapture.cARCamera?
102 |
103 | fileprivate func _dispose() {
104 | self.camera = nil
105 | }
106 | }
107 |
108 | // MARK: - Trigger
109 | public class Trigger: CCComponentTriggerProtocol {
110 | fileprivate var camera: CCARCapture.cARCamera?
111 |
112 | public func start() {
113 | self.camera?.start()
114 | }
115 |
116 | public func dispose() {
117 | self.camera?.dispose()
118 | }
119 |
120 | fileprivate func _dispose() {
121 | self.camera = nil
122 | }
123 | }
124 |
125 | // MARK: - Pipe
126 | public class Pipe: NSObject, CCComponentPipeProtocol {
127 | fileprivate var camera: CCARCapture.cARCamera?
128 |
129 | public var captureData: CCARCapture.CaptureData?
130 | public var arFrame: ARFrame?
131 | @objc dynamic public var ouTimeStamp: CMTime = CMTime.zero
132 |
133 | fileprivate func _dispose() {
134 | self.camera = nil
135 | }
136 | }
137 | }
138 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/AudioExampleVC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioExampleVC.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2020/02/15.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import CameraCore
11 | import iOS_DummyAVAssets
12 | import UIKit
13 |
14 | class AudioExampleVC: UIViewController {
15 |
16 | @IBOutlet weak var recordingButton: UIButton!
17 |
18 | var videoCaptureProperty = CCCapture.VideoCapture.Property(
19 | devicePosition: AVCaptureDevice.Position.back,
20 | isAudioDataOutput: false,
21 | required: [
22 | .captureSize(Settings.PresetSize.p1280x720),
23 | .frameRate(Settings.PresetFrameRate.fps30),
24 | ],
25 | option: [
26 | .colorSpace(AVCaptureColorSpace.P3_D65),
27 | ]
28 | )
29 |
30 | var camera: CCCapture.Camera!
31 | var audioEngine: CCAudio.AudioEngine!
32 | var audioPlayer: CCAudio.AudioPlayer!
33 | var audioMic: CCAudio.Mic!
34 | var videoRecorder: CCRecorder.VideoRecorder!
35 | var audioRecorder: CCRecorder.AudioRecorder!
36 |
37 | override func viewDidLoad() {
38 | do {
39 | self.videoRecorder = try CCRecorder.VideoRecorder()
40 | //self.audioRecorder = try CCRecorder.AudioRecorder()
41 |
42 | let camera: CCCapture.Camera = try CCCapture.Camera(property: self.videoCaptureProperty)
43 | self.camera = camera
44 | try self.camera --> self.videoRecorder
45 |
46 | let audioUrl: URL = iOS_DummyAVAssets.DummyAssetManager.AudioAsset.svg_girl_theme_01.url
47 | self.audioEngine = CCAudio.AudioEngine()
48 | self.audioPlayer = try CCAudio.AudioPlayer(url: audioUrl)
49 | self.audioPlayer.volume = 0.05
50 | //self.audioMic = try CCAudio.Mic()
51 | //self.audioMic.volume = 1.0
52 |
53 | try self.audioEngine --> self.audioPlayer
54 | //try self.audioEngine --> self.audioMic
55 | //try self.audioEngine --> self.videoRecorder
56 | //try self.audioEngine --> self.audioRecorder
57 | try self.audioEngine.trigger.start()
58 |
59 | } catch {
60 | print("error")
61 | }
62 |
63 | }
64 |
65 | override func viewDidAppear(_ animated: Bool) {
66 | super.viewDidAppear(animated)
67 | do {
68 | self.camera?.trigger.start()
69 | try self.audioPlayer?.trigger.play()
70 | } catch {
71 | print("error")
72 | }
73 | }
74 |
75 |
76 | @IBAction func micSlider(slider: UISlider){
77 | self.audioMic.volume = slider.value
78 | }
79 |
80 | @IBAction func playerSlider(slider: UISlider){
81 | self.audioPlayer.volume = slider.value
82 | }
83 |
84 |
85 | @IBAction func recordingTapAction(_ sender: Any) {
86 | self.videoRecording()
87 | //self.audioRecording()
88 | }
89 |
90 | }
91 |
92 | extension AudioExampleVC {
93 | func videoRecording() {
94 | if self.videoRecorder.isRecording {
95 | self.recordingButton.setTitle("撮影開始", for: UIControl.State.normal)
96 | self.videoRecorder.trigger.stop()
97 | } else {
98 | let filePath: String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + "/" + "recordingVideo" + NSUUID().uuidString + ".mp4"
99 |
100 | do {
101 | var param = CCRecorder.CaptureWriter.Parameter(
102 | outputFilePath: URL(fileURLWithPath: filePath),
103 | presetFrame: Settings.PresetSize.p1280x720,
104 | frameRate: 30,
105 | devicePosition: AVCaptureDevice.Position.back,
106 | croppingRect: CGRect(origin: CGPoint(), size: Settings.PresetSize.p1280x720.size(orientation: UIInterfaceOrientation.portrait).toCGSize()),
107 | fileType: AVFileType.mp4,
108 | videoCodecType: Settings.VideoCodec.hevc
109 | )
110 |
111 | try self.videoRecorder.setup.setup(parameter: param)
112 | self.videoRecorder.trigger.start()
113 | self.recordingButton.setTitle("撮影ストップ", for: UIControl.State.normal)
114 | } catch {}
115 | }
116 | }
117 | }
118 |
119 | extension AudioExampleVC {
120 | func audioRecording() {
121 | if self.audioRecorder.isRecording {
122 | self.recordingButton.setTitle("撮影開始", for: UIControl.State.normal)
123 | self.audioRecorder.stop()
124 | } else {
125 | do {
126 | try self.audioRecorder.start()
127 | self.recordingButton.setTitle("撮影ストップ", for: UIControl.State.normal)
128 | } catch {
129 |
130 | }
131 | }
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/PlayerExample/PlayerExample001VC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PlayerExample001VC.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2020/03/31.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import CameraCore
11 | import iOS_DummyAVAssets
12 | import MetalCanvas
13 | import UIKit
14 | import ProcessLogger_Swift
15 |
16 | class PlayerExample001VC: UIViewController {
17 | var videoCaptureProperty = CCCapture.VideoCapture.Property(
18 | devicePosition: AVCaptureDevice.Position.back,
19 | isAudioDataOutput: true,
20 | required: [
21 | .captureSize(Settings.PresetSize.p1280x720),
22 | .frameRate(Settings.PresetFrameRate.fps30),
23 | ],
24 | option: [
25 | .colorSpace(AVCaptureColorSpace.P3_D65),
26 | ]
27 | )
28 |
29 | private var player: CCPlayer = CCPlayer()
30 | private var imageProcess: CCImageProcess.ImageProcess?
31 | private var debugger: CCDebug.ComponentDebugger = CCDebug.ComponentDebugger()
32 | @IBOutlet weak var drawView: CCView!
33 | @IBOutlet weak var seekBar: UISlider!
34 |
35 | deinit {
36 | self.player.trigger.dispose()
37 | self.drawView.trigger.dispose()
38 | self.debugger.trigger.dispose()
39 |
40 | CameraCore.flush()
41 | ProcessLogger.deinitLog(self)
42 | }
43 |
44 | override func viewDidLoad() {
45 | super.viewDidLoad()
46 |
47 | //let url: URL = iOS_DummyAVAssets.AssetManager.VideoAsset.portrait002.url
48 | //let url: URL = URL(fileURLWithPath: "https://devimages.apple.com.edgekey.net/samplecode/avfoundationMedia/AVFoundationQueuePlayer_HLS2/master.m3u8")
49 | //let url: URL = URL(string: "https://devimages.apple.com.edgekey.net/samplecode/avfoundationMedia/AVFoundationQueuePlayer_HLS2/master.m3u8")!
50 | //let url: URL = URL(string: "https://video-dev.github.io/streams/x36xhzz/x36xhzz.m3u8")!
51 | let url: URL = URL(string: "https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_4x3/bipbop_4x3_variant.m3u8")!
52 | //let url: URL = URL(string: "https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_ts/master.m3u8")!
53 | //let url: URL = URL(string: "http://infinite-set.heteml.jp/hls/h264_256/playlist.m3u8")!
54 | //let url: URL = URL(string: "http://infinite-set.heteml.jp/hls/h265_256/output.m3u8")!
55 | //let url: URL = URL(string: "http://infinite-set.heteml.jp/hls/h265_256_ts/output.m3u8")!
56 | //let url: URL = URL(string: "http://infinite-set.heteml.jp/hls/h264_256_fmp4/output.m3u8")!
57 | //let url: URL = URL(string: "http://infinite-set.heteml.jp/hls/h265_256.mp4")!
58 | //let url: URL = URL(string: "http://infinite-set.heteml.jp/hls/h265_256_fmp4/stream.m3u8")!
59 |
60 | //let imageProcess: CCImageProcess.ImageProcess = CCImageProcess.ImageProcess(isDisplayLink: true)
61 | do {
62 | //try self.player --> imageProcess --> self.drawView
63 | try self.player --> self.drawView
64 | self.player.setup.update(url: url)
65 |
66 | /*
67 | let playerStatusObservation: NSKeyValueObservation = self.player.event.observe(\.statuss, options: [.new]) { [weak self] (object: CCPlayer.Event, change) in
68 | guard
69 | let self = self,
70 | let statusId: Int = change.newValue,
71 | let status: CCPlayer.Status = CCPlayer.Status.init(rawValue: statusId)
72 | else { return }
73 | }
74 | self.observations.append(playerStatusObservation)
75 | */
76 | self.player.event.outProgress.bind() { [weak self] (progress: TimeInterval) in
77 | DispatchQueue.main.async { [weak self] in
78 | self?.seekBar.value = Float(progress)
79 | }
80 | }
81 |
82 | //self.imageProcess = imageProcess
83 |
84 | try self.debugger.setup.set(component: self.player)
85 | try self.debugger.setup.set(component: self.drawView)
86 |
87 | self.debugger.trigger.start()
88 | self.setDebuggerView()
89 | } catch {
90 |
91 | }
92 | }
93 |
94 | override func viewWillAppear(_ animated: Bool) {
95 | super.viewWillAppear(animated)
96 |
97 | self.player.trigger.play()
98 | }
99 |
100 | override func viewWillDisappear(_ animated: Bool) {
101 | super.viewWillDisappear(animated)
102 |
103 | self.player.trigger.pause()
104 | }
105 |
106 |
107 | @IBAction func seek(_ sender: UISlider) {
108 | self.player.trigger.seek(progress: sender.value)
109 | }
110 | @IBAction func seekOut(_ sender: UISlider) {
111 | self.player.trigger.play()
112 | }
113 | }
114 |
115 | extension PlayerExample001VC {
116 | public func setDebuggerView() {
117 | DispatchQueue.main.async { [weak self] in
118 | guard let self = self else { return }
119 | let debugView: DebugView = Bundle.main.loadNibNamed("DebugView", owner: self, options: nil)?.first as! DebugView
120 | self.view.addSubview(debugView)
121 |
122 | self.debugger.outPut.data.bind() { (data: CCDebug.ComponentDebugger.Output.Data) in
123 | DispatchQueue.main.async {
124 | debugView.set(debugData: data)
125 | }
126 | }
127 |
128 | }
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCAudio/AudioPlayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioPlayer.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/02/15.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 |
12 | extension CCAudio {
13 | public class AudioPlayer {
14 | // MARK: - CCComponentProtocol
15 | public let setup: CCAudio.AudioPlayer.Setup = CCAudio.AudioPlayer.Setup()
16 | public let trigger: CCAudio.AudioPlayer.Trigger = CCAudio.AudioPlayer.Trigger()
17 | public let pipe: CCAudio.AudioPlayer.Pipe = CCAudio.AudioPlayer.Pipe()
18 | public var debug: CCComponentDebug?
19 |
20 | let audioFile: AVAudioFile
21 | let player: AVAudioPlayerNode = AVAudioPlayerNode()
22 |
23 | public var volume: Float {
24 | get {
25 | return self.player.volume
26 | }
27 | set {
28 | self.player.volume = newValue
29 | }
30 | }
31 |
32 | public init(url: URL) throws {
33 | self.audioFile = try AVAudioFile(forReading: url)
34 |
35 | self.setup.audioPlayer = self
36 | self.trigger.audioPlayer = self
37 | self.pipe.audioPlayer = self
38 | }
39 | }
40 | }
41 |
42 |
43 | fileprivate extension CCAudio.AudioPlayer {
44 | func play() throws {
45 | guard self.pipe.audioEngine?.engine.isRunning == true else { return }
46 |
47 | let sampleRate: Double = self.audioFile.fileFormat.sampleRate
48 | let length: AVAudioFramePosition = self.audioFile.length
49 | let duration = Double(length) / sampleRate
50 | //var output = self.audioEngine.outputNode
51 |
52 | /*
53 | var reverb = AVAudioUnitReverb()
54 | //reverbの設定
55 | reverb.loadFactoryPreset(.largeRoom2)
56 | reverb.wetDryMix = 100
57 | self.audioEngine.attach(reverb)
58 | self.audioEngine.connect(self.player, to: reverb, format: self.audioFile.processingFormat)
59 | self.audioEngine.connect(reverb, to: output, format: self.audioFile.processingFormat)
60 | */
61 | //self.player.scheduleFile(self.audioFile, at: nil, completionHandler: nil)
62 |
63 | self.player.scheduleFile(self.audioFile, at: nil) {
64 | //self.audioEngine.mainMixerNode.removeTap(onBus: 0)
65 | //let nodeTime: AVAudioTime = self.player.lastRenderTime!
66 | //let playerTime: AVAudioTime = self.player.playerTime(forNodeTime: nodeTime)!
67 | //let currentTime = (Double(playerTime.sampleTime) / sampleRate)
68 | //print(currentTime)
69 | //self.audioEngine.stop()
70 | }
71 |
72 | self.player.installTap(onBus: 0, bufferSize: 4096, format: nil) { [weak self] (a: AVAudioPCMBuffer, t: AVAudioTime) in
73 | a.audioBufferList
74 | guard
75 | let self = self,
76 | let nodeTime: AVAudioTime = self.player.lastRenderTime,
77 | let playerTime: AVAudioTime = self.player.playerTime(forNodeTime: t)
78 | else { return }
79 |
80 | let currentTime = (Double(playerTime.sampleTime) / sampleRate)
81 | if currentTime >= duration {
82 | self.player.stop()
83 | }
84 | //print(currentTime)
85 | }
86 |
87 | self.player.play()
88 | }
89 |
90 | func pause() {
91 | self.player.pause()
92 | }
93 |
94 | func dispose() {
95 | self.player.pause()
96 | self.setup._dispose()
97 | self.trigger._dispose()
98 | self.pipe._dispose()
99 | }
100 | }
101 |
102 | public extension CCAudio.AudioPlayer {
103 |
104 | // MARK: - Setup
105 | class Setup: CCComponentSetupProtocol {
106 | fileprivate var audioPlayer: CCAudio.AudioPlayer?
107 |
108 | fileprivate func _dispose() {
109 | self.audioPlayer = nil
110 | }
111 | }
112 |
113 | // MARK: - Trigger
114 | class Trigger: CCComponentTriggerProtocol {
115 | fileprivate var audioPlayer: CCAudio.AudioPlayer?
116 |
117 | public func play() throws {
118 | try self.audioPlayer?.play()
119 | }
120 |
121 | public func pause() {
122 | self.audioPlayer?.pause()
123 | }
124 |
125 | public func dispose() {
126 | self.audioPlayer?.dispose()
127 | }
128 |
129 | fileprivate func _dispose() {
130 | self.audioPlayer = nil
131 | }
132 | }
133 |
134 | // MARK: - Pipe
135 | class Pipe: NSObject, CCComponentPipeProtocol {
136 |
137 | // MARK: - Queue
138 | fileprivate let completeQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCAudio.AudioPlayer.completeQueue")
139 |
140 | fileprivate var audioEngine: CCAudio.AudioEngine?
141 | fileprivate var audioPlayer: CCAudio.AudioPlayer?
142 |
143 | fileprivate func _dispose() {
144 | self.audioPlayer = nil
145 | }
146 |
147 | func input(audioEngine: inout CCAudio.AudioEngine) throws -> CCAudio.AudioPlayer {
148 | audioEngine.engine.attach(self.audioPlayer!.player)
149 | let mainMixer: AVAudioMixerNode = audioEngine.engine.mainMixerNode
150 | audioEngine.engine.connect(self.audioPlayer!.player, to: mainMixer, format: self.audioPlayer!.audioFile.processingFormat)
151 | self.audioEngine = audioEngine
152 |
153 | return self.audioPlayer!
154 | }
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/ExampleVC/Others/QRCodeCaptureExampleVC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // QRCodeCaptureExampleVC.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2020/05/03.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import CameraCore
11 | import iOS_DummyAVAssets
12 | import MetalCanvas
13 | import UIKit
14 | import ProcessLogger_Swift
15 |
16 |
17 | //MARK: - ViewController
18 |
19 | @available(iOS 11.1, *)
20 | class QRCodeCaptureExampleVC: UIViewController {
21 |
22 | var videoCaptureProperty: CCCapture.VideoCapture.Property = CCCapture.VideoCapture.Property(
23 | devicePosition: AVCaptureDevice.Position.back,
24 | deviceType: .builtInDualCamera,
25 | isAudioDataOutput: false,
26 | metadata: [.qr],
27 | required: [
28 | .captureSize(Settings.PresetSize.p1280x720)
29 | ],
30 | option: []
31 | )
32 |
33 | private var camera: CCCapture.Camera?
34 | private var imageProcess: CCImageProcess.ImageProcess?
35 | private var debugger: CCDebug.ComponentDebugger = CCDebug.ComponentDebugger()
36 |
37 | @IBOutlet weak var drawView: CCView!
38 |
39 | deinit {
40 | self.camera?.trigger.dispose()
41 | self.imageProcess?.trigger.dispose()
42 | self.drawView.trigger.dispose()
43 | self.debugger.trigger.stop()
44 | self.debugger.trigger.dispose()
45 | CameraCore.flush()
46 | ProcessLogger.deinitLog(self)
47 | }
48 |
49 | override func viewDidLoad() {
50 | super.viewDidLoad()
51 |
52 | do {
53 | let camera: CCCapture.Camera = try CCCapture.Camera(property: self.videoCaptureProperty)
54 | let imageProcess: CCImageProcess.ImageProcess = try CCImageProcess.ImageProcess()
55 | imageProcess.renderLayers.value = [try QRCodeLayer()]
56 |
57 | try camera --> imageProcess --> self.drawView
58 |
59 | camera.trigger.start()
60 | self.camera = camera
61 | self.imageProcess = imageProcess
62 |
63 | try self.debugger.setup.set(component: camera)
64 | try self.debugger.setup.set(component: imageProcess)
65 | try self.debugger.setup.set(component: self.drawView)
66 |
67 | } catch {
68 |
69 | }
70 |
71 | self.setDebuggerView()
72 | self.debugger.trigger.start()
73 | }
74 | }
75 |
76 |
77 | //MARK: - DebuggerView
78 |
79 | @available(iOS 11.1, *)
80 | extension QRCodeCaptureExampleVC {
81 | public func setDebuggerView() {
82 | DispatchQueue.main.async { [weak self] in
83 | guard let self = self else { return }
84 | let debugView: DebugView = Bundle.main.loadNibNamed("DebugView", owner: self, options: nil)?.first as! DebugView
85 | self.view.addSubview(debugView)
86 |
87 | self.debugger.outPut.data.bind() { (data: CCDebug.ComponentDebugger.Output.Data) in
88 | DispatchQueue.main.async {
89 | debugView.set(debugData: data)
90 | }
91 | }
92 |
93 | }
94 | }
95 | }
96 |
97 |
98 | //MARK: - RenderLayer
99 |
100 | final public class QRCodeLayer: RenderLayerProtocol {
101 | public let type: RenderLayerType = RenderLayerType.custom
102 | public let id: CCImageProcess.RenderLayerId = CCImageProcess.RenderLayerId()
103 | public var customIndex: Int = 0
104 |
105 | public init() throws {
106 | }
107 |
108 | deinit {
109 | ProcessLogger.deinitLog(self)
110 | }
111 |
112 | /// キャッシュを消去
113 | public func dispose() {
114 | }
115 |
116 | public func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
117 |
118 | guard
119 | var destinationPixelBuffer: CVPixelBuffer = destination.pixelBuffer
120 | else { throw CCImageProcess.ErrorType.process }
121 |
122 | var objects: [MCPrimitiveTypeProtocol] = []
123 |
124 | for metadataObject in renderLayerCompositionInfo.metadataObjects {
125 | guard let metadataObject: AVMetadataMachineReadableCodeObject = metadataObject as? AVMetadataMachineReadableCodeObject else { continue }
126 |
127 | let color: MCColor = MCColor(hex: "#FF0000")
128 | let pointSize: Float = 10.0
129 |
130 | let p: CGPoint = metadataObject.bounds.origin
131 | let size: CGSize = metadataObject.bounds.size
132 | let tl: MCPrimitive.Point = try MCPrimitive.Point.init(position: SIMD3.init(Float(p.x), Float(p.y), 0.0), color: color, size: pointSize)
133 | objects.append(tl)
134 | let tr: MCPrimitive.Point = try MCPrimitive.Point.init(position: SIMD3.init(Float(p.x + size.width), Float(p.y), 0.0), color: color, size: pointSize)
135 | objects.append(tr)
136 | let bl: MCPrimitive.Point = try MCPrimitive.Point.init(position: SIMD3.init(Float(p.x), Float(p.y + size.height), 0.0), color: color, size: pointSize)
137 | objects.append(bl)
138 | let br: MCPrimitive.Point = try MCPrimitive.Point.init(position: SIMD3.init(Float(p.x + size.width), Float(p.y + size.height), 0.0), color: color, size: pointSize)
139 | objects.append(br)
140 | }
141 |
142 | let canvas: MCCanvas = try MCCanvas.init(pixelBuffer: &destinationPixelBuffer, orthoType: MCCanvas.OrthoType.topLeft, renderSize: renderLayerCompositionInfo.renderSize.toCGSize())
143 | try canvas.draw(commandBuffer: commandBuffer, objects: objects)
144 | }
145 | }
146 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Layer/SequenceImageLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SequenceImageLayer.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/08/22.
6 | // Copyright © 2018 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 | import CoreImage
12 |
13 | public extension CCImageProcess {
14 | final class SequenceImageLayer: RenderLayerProtocol {
15 | public let type: RenderLayerType = RenderLayerType.sequenceImage
16 | public let id: RenderLayerId
17 | public let customIndex: Int = 0
18 | private let imagePaths: [URL]
19 | private let blendMode: Blendmode
20 | private let alpha: CGFloat
21 | private let updateFrameRate: TimeInterval
22 | private let resize: Bool
23 | private var filterCacheImageList: [Int: CIImage] = [:] // エフェクトフィルターキャッシュ
24 |
25 | public init(imagePaths: [URL], blendMode: Blendmode, alpha: CGFloat = 1.0, updateFrameRate: Int32 = 30, resize: Bool = true) {
26 | self.id = RenderLayerId()
27 | self.imagePaths = imagePaths.sorted(by: { $0.lastPathComponent < $1.lastPathComponent })
28 | self.blendMode = blendMode
29 | self.alpha = alpha
30 | self.updateFrameRate = TimeInterval(updateFrameRate)
31 | self.resize = resize
32 | }
33 |
34 | fileprivate init(id: RenderLayerId, imagePaths: [URL], blendMode: Blendmode, alpha: CGFloat = 1.0, updateFrameRate: TimeInterval = 30, resize: Bool = true) {
35 | self.id = id
36 | self.imagePaths = imagePaths.sorted(by: { $0.lastPathComponent < $1.lastPathComponent })
37 | self.blendMode = blendMode
38 | self.alpha = alpha
39 | self.updateFrameRate = updateFrameRate
40 | self.resize = resize
41 | }
42 |
43 | /// キャッシュを消去
44 | public func dispose() {
45 | self.filterCacheImageList.removeAll()
46 | }
47 | }
48 | }
49 |
50 | public extension CCImageProcess.SequenceImageLayer {
51 | func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
52 | guard
53 | !self.imagePaths.isEmpty,
54 | var inputImage: CIImage = CIImage(mtlTexture: source.texture, options: nil)
55 | else { throw RenderLayerErrorType.renderingError }
56 |
57 | inputImage = try process(image: inputImage, renderLayerCompositionInfo: &renderLayerCompositionInfo)
58 | let colorSpace: CGColorSpace = inputImage.colorSpace ?? CGColorSpaceCreateDeviceRGB()
59 | MCCore.ciContext.render(inputImage, to: destination.texture, commandBuffer: commandBuffer, bounds: inputImage.extent, colorSpace: colorSpace)
60 | }
61 | }
62 |
63 | fileprivate extension CCImageProcess.SequenceImageLayer {
64 | func process(image: CIImage, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws -> CIImage {
65 | let imageCounter: Float = Float(renderLayerCompositionInfo.compositionTime.value) * Float(self.updateFrameRate) / Float(renderLayerCompositionInfo.compositionTime.timescale)
66 |
67 | // フィルターイメージ生成
68 | let counter: Int = Int(floorf(imageCounter)) % self.imagePaths.count
69 | var filterImage: CIImage = try self.filterImage(count: counter, renderSize: renderLayerCompositionInfo.renderSize)
70 |
71 | // 上下反転
72 | filterImage = filterImage.transformed(by: CGAffineTransform(scaleX: 1, y: -1.0).translatedBy(x: 0, y: -CGFloat(filterImage.extent.height)))
73 |
74 | guard let colorMatrixFilter: CIFilter = CIFilter(name: "CIColorMatrix") else { throw RenderLayerErrorType.renderingError }
75 | colorMatrixFilter.setValue(filterImage, forKey: kCIInputImageKey)
76 | colorMatrixFilter.setValue(CIVector(x: 0.0, y: 0.0, z: 0.0, w: self.alpha), forKey: "inputAVector")
77 |
78 | // フィルター合成
79 | guard
80 | let result: CIFilter = CIFilter(name: self.blendMode.CIFilterName),
81 | let colorMatrixFilterOutputImage: CIImage = colorMatrixFilter.outputImage
82 | else { throw RenderLayerErrorType.renderingError }
83 |
84 | result.setValue(image, forKey: kCIInputBackgroundImageKey)
85 | result.setValue(colorMatrixFilterOutputImage, forKey: kCIInputImageKey)
86 |
87 | guard let outImage: CIImage = result.outputImage else { throw RenderLayerErrorType.renderingError }
88 | return outImage
89 | }
90 |
91 | // MARK: - Private -
92 |
93 | func filterImage(count: Int, renderSize: MCSize) throws -> CIImage {
94 | // フィルターイメージ作成
95 | if let filter: CIImage = self.filterCacheImageList[count] {
96 | return filter
97 | }
98 | let filter: CIImage = try self.loadFilterImage(count: count, renderSize: renderSize)
99 | self.filterCacheImageList[count] = filter
100 | return filter
101 | }
102 |
103 | /// フィルタイメージ生成・取得
104 | func loadFilterImage(count: Int, renderSize: MCSize) throws -> CIImage {
105 | let renderSize: CGSize = renderSize.toCGSize()
106 | // フィルターイメージ作成
107 | guard self.imagePaths.indices.contains(count) else { throw RenderLayerErrorType.renderingError }
108 | let imagePath: URL = self.imagePaths[count]
109 | guard var effect: CIImage = CIImage(contentsOf: imagePath) else { throw RenderLayerErrorType.renderingError }
110 | if self.resize {
111 | // フィルターイメージリサイズ
112 | let effectExtent: CGRect = effect.extent
113 | let scale: CGFloat = max(renderSize.width / effectExtent.width, renderSize.height / effectExtent.height)
114 | effect = effect.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
115 | let y: CGFloat = effect.extent.size.height - renderSize.height
116 | effect = effect.transformed(by: CGAffineTransform(translationX: 0, y: -y))
117 | effect = effect.cropped(to: CGRect(origin: CGPoint(0.0, 0.0), size: renderSize))
118 | return effect
119 | }
120 | return effect
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCCapture/CaptureInfo.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CaptureInfo.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2019/12/28.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 | import MetalCanvas
12 | import ProcessLogger_Swift
13 |
14 | extension CCCapture.VideoCapture {
15 | public class CaptureInfo {
16 | private let traceQueue: DispatchQueue = DispatchQueue(label: "CCCapture.VideoCapture.CaptureInfo.Queue")
17 |
18 | public private(set) var device: AVCaptureDevice?
19 | public private(set) var deviceFormat: AVCaptureDevice.Format?
20 | public private(set) var depthDataFormat: AVCaptureDevice.Format?
21 | public private(set) var deviceType: AVCaptureDevice.DeviceType?
22 | public private(set) var presetSize: Settings.PresetSize = Settings.PresetSize.p1280x720
23 | public private(set) var captureSize: MCSize = Settings.PresetSize.p1280x720.size(orientation: Configuration.shared.currentUIInterfaceOrientation)
24 | public private(set) var devicePosition: AVCaptureDevice.Position = .back
25 | public private(set) var frameRate: Int32 = 30
26 | public private(set) var colorSpace: AVCaptureColorSpace = .sRGB
27 | public private(set) var outPutPixelFormatType: MCPixelFormatType = MCPixelFormatType.kCV32BGRA
28 | public private(set) var videoHDR: Bool?
29 | public private(set) var isSmoothAutoFocusEnabled: Bool = true
30 | public private(set) var depthDataOut: Bool = false
31 | public private(set) var metadata: [AVMetadataObject.ObjectType] = []
32 |
33 | func updateAr() {
34 | self.captureSize = MCSize.init(1920, 1440)
35 | self.frameRate = 60
36 | self.outPutPixelFormatType = MCPixelFormatType.kCV420YpCbCr8BiPlanarFullRange
37 | }
38 |
39 | func update(
40 | device: AVCaptureDevice,
41 | deviceFormat: AVCaptureDevice.Format,
42 | isDepthDataOutput: Bool,
43 | metadata: [AVMetadataObject.ObjectType],
44 | outPutPixelFormatType: MCPixelFormatType,
45 | itemList: [CCCapture.VideoCapture.Property.Item]
46 | ) {
47 |
48 | self.device = device
49 | self.devicePosition = device.position
50 | self.deviceFormat = deviceFormat
51 | self.outPutPixelFormatType = outPutPixelFormatType
52 | self.metadata = metadata
53 |
54 | //////////////////////////////////////////////////////////
55 | // isDepthDataOutputがtrueの場合にはdepthFormatsを取得
56 | if isDepthDataOutput {
57 | var depthFormats: [AVCaptureDevice.Format] = []
58 | deviceFormat.supportedDepthDataFormats.forEach { depthFormats.append($0) }
59 | depthFormats = depthFormats.filter { CMFormatDescriptionGetMediaSubType($0.formatDescription) == kCVPixelFormatType_DepthFloat32 }
60 | self.depthDataFormat = depthFormats.max(by: { first, second in
61 | CMVideoFormatDescriptionGetDimensions(first.formatDescription).width < CMVideoFormatDescriptionGetDimensions(second.formatDescription).width
62 | })!
63 | self.depthDataOut = isDepthDataOutput
64 | }
65 | //////////////////////////////////////////////////////////
66 |
67 | for item in itemList {
68 | switch item {
69 | case let .captureSize(captureSize):
70 | self.presetSize = captureSize
71 | let w: Int32 = CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription).width
72 | let h: Int32 = CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription).height
73 | self.captureSize = MCSize(w: CGFloat(w), h: CGFloat(h))
74 | case let .frameRate(frameRate):
75 | self.frameRate = self.frameRate(frameRate: frameRate, deviceFormat: deviceFormat)
76 | case let .colorSpace(colorSpace):
77 | let colorSpaces: [AVCaptureColorSpace] = deviceFormat.supportedColorSpaces.filter { $0 == colorSpace }
78 | guard let colorSpace: AVCaptureColorSpace = colorSpaces.first else { break }
79 | self.colorSpace = colorSpace
80 | case .videoHDR:
81 | self.videoHDR = deviceFormat.isVideoHDRSupported
82 | case let .isSmoothAutoFocusEnabled(on):
83 | self.isSmoothAutoFocusEnabled = (on && device.isSmoothAutoFocusSupported) ? true : false
84 | }
85 | }
86 |
87 | self.traceQueue.async { [weak self] () in
88 | self?.trace()
89 | }
90 | }
91 |
92 | private func frameRate(frameRate: Settings.PresetFrameRate, deviceFormat: AVCaptureDevice.Format) -> Int32 {
93 | var resultFrameRate: Int32 = 1
94 | for videoSupportedFrameRateRange: Any in deviceFormat.videoSupportedFrameRateRanges {
95 | guard let range: AVFrameRateRange = videoSupportedFrameRateRange as? AVFrameRateRange else { continue }
96 | if range.minFrameRate <= Float64(frameRate.rawValue), Float64(frameRate.rawValue) <= range.maxFrameRate {
97 | // MAX・MINともにレンジに収まっている
98 | resultFrameRate = frameRate.rawValue
99 | break
100 | } else if range.minFrameRate > Float64(frameRate.rawValue), Float64(frameRate.rawValue) <= range.maxFrameRate {
101 | // MAXはレンジに収まっているがMINよりも小さい
102 | resultFrameRate = Int32(range.minFrameRate)
103 | continue
104 | } else if range.minFrameRate <= Float64(frameRate.rawValue), Float64(frameRate.rawValue) > range.maxFrameRate {
105 | // MINはレンジに収まっているがMAXよりも大きい
106 | resultFrameRate = Int32(range.maxFrameRate)
107 | continue
108 | }
109 | }
110 | return resultFrameRate
111 | }
112 |
113 | func trace() {
114 | ProcessLogger.log("----------------------------------------------------")
115 | ProcessLogger.log("■ deviceInfo")
116 | if let device: AVCaptureDevice = self.device {
117 | ProcessLogger.log("device: \(device)")
118 | }
119 | if let deviceFormat: AVCaptureDevice.Format = self.deviceFormat {
120 | ProcessLogger.log("deviceFormat: \(deviceFormat)")
121 | ProcessLogger.log("videoHDR: \(deviceFormat.isVideoHDRSupported)")
122 | }
123 | ProcessLogger.log("deviceType: \(String(describing: self.deviceType))")
124 | ProcessLogger.log("captureSize: \(self.captureSize)")
125 | ProcessLogger.log("frameRate: \(self.frameRate)")
126 | ProcessLogger.log("devicePosition: \(self.devicePosition.toString)")
127 | ProcessLogger.log("colorSpace: \(self.colorSpace.toString)")
128 | ProcessLogger.log("isSmoothAutoFocusEnabled: \(self.isSmoothAutoFocusEnabled)")
129 | ProcessLogger.log("----------------------------------------------------")
130 | }
131 | }
132 | }
133 |
134 |
--------------------------------------------------------------------------------
/Example/iOSExample/CameraCoreExample/UseCoreMLExample/UseCoreMLYOLOv3TinyExampleVC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // UseCoreMLYOLOv3TinyExampleVC.swift
3 | // CameraCoreExample
4 | //
5 | // Created by hideyuki machida on 2019/09/23.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVFoundation
11 | import CameraCore
12 | import Vision
13 | /*
14 | @available(iOS 12.0, *)
15 | class UseCoreMLYOLOv3TinyExampleVC: UIViewController {
16 |
17 | @IBOutlet weak var videoCaptureView: CameraCore.VideoCaptureView!
18 |
19 | private var detectionOverlay: CALayer! = nil
20 | var videoCaputurePropertys = CCRenderer.VideoCapture.Propertys.init(
21 | devicePosition: AVCaptureDevice.Position.back,
22 | isAudioDataOutput: true,
23 | required: [
24 | .captureSize(Settings.PresetSize.p960x540),
25 | .frameRate(Settings.PresetFrameRate.fr30)
26 | ],
27 | option: []
28 | )
29 |
30 |
31 | deinit {
32 | self.videoCaptureView.pause()
33 | self.videoCaptureView.dispose()
34 | }
35 |
36 | var rootLayer: CALayer! = nil
37 |
38 | override func viewDidLoad() {
39 | super.viewDidLoad()
40 |
41 | let renderSize: CGSize = Settings.PresetSize.p960x540.size()
42 |
43 | self.rootLayer = self.videoCaptureView.layer
44 | self.setupLayers()
45 | self.updateLayerGeometry()
46 |
47 | do {
48 | try self.videoCaptureView.setup(self.videoCaputurePropertys)
49 | let coreMLLayer = try CoreMLYOLOv3TinyLayer()
50 | coreMLLayer.onUpdate = { [weak self] (items: [ VNRecognizedObjectObservation ]) in
51 | guard let self = self else { return }
52 | for objectObservation: VNRecognizedObjectObservation in items {
53 | self.detectionOverlay.sublayers = nil
54 |
55 | let topLabelObservation = objectObservation.labels[0]
56 | let objectBounds = VNImageRectForNormalizedRect(objectObservation.boundingBox, Int(renderSize.width), Int(renderSize.height))
57 |
58 | let shapeLayer = self.createRoundedRectLayerWithBounds(objectBounds)
59 | let textLayer = self.createTextSubLayerInBounds(objectBounds, identifier: topLabelObservation.identifier, confidence: topLabelObservation.confidence)
60 |
61 | shapeLayer.addSublayer(textLayer)
62 | self.detectionOverlay.addSublayer(shapeLayer)
63 | }
64 | }
65 | self.videoCaptureView.renderLayers = [ coreMLLayer ]
66 | } catch {
67 | }
68 |
69 |
70 | }
71 |
72 | override func viewWillAppear(_ animated: Bool) {
73 | super.viewWillAppear(animated)
74 | self.videoCaptureView.play()
75 | }
76 |
77 | override func viewWillDisappear(_ animated: Bool) {
78 | super.viewWillDisappear(animated)
79 | self.videoCaptureView.pause()
80 | self.videoCaptureView.dispose()
81 | }
82 | }
83 |
84 | @available(iOS 12.0, *)
85 | extension UseCoreMLYOLOv3TinyExampleVC {
86 |
87 | func drawVisionRequestResults(_ results: [Any]) {
88 | CATransaction.begin()
89 | CATransaction.setValue(kCFBooleanTrue, forKey: kCATransactionDisableActions)
90 | detectionOverlay.sublayers = nil // remove all the old recognized objects
91 |
92 | /*
93 | for observation in results where observation is VNRecognizedObjectObservation {
94 | guard let objectObservation = observation as? VNRecognizedObjectObservation else {
95 | continue
96 | }
97 | // Select only the label with the highest confidence.
98 | let topLabelObservation = objectObservation.labels[0]
99 | let objectBounds = VNImageRectForNormalizedRect(objectObservation.boundingBox, Int(bufferSize.width), Int(bufferSize.height))
100 |
101 | let shapeLayer = self.createRoundedRectLayerWithBounds(objectBounds)
102 |
103 | let textLayer = self.createTextSubLayerInBounds(objectBounds,
104 | identifier: topLabelObservation.identifier,
105 | confidence: topLabelObservation.confidence)
106 | shapeLayer.addSublayer(textLayer)
107 | detectionOverlay.addSublayer(shapeLayer)
108 | }
109 | */
110 | //self.updateLayerGeometry()
111 | CATransaction.commit()
112 | }
113 |
114 |
115 | func setupLayers() {
116 | let renderSize: CGSize = Settings.PresetSize.p960x540.size()
117 |
118 | detectionOverlay = CALayer() // container layer that has all the renderings of the observations
119 | detectionOverlay.name = "DetectionOverlay"
120 | detectionOverlay.bounds = CGRect(x: 0.0,
121 | y: 0.0,
122 | width: renderSize.width,
123 | height: renderSize.height)
124 | detectionOverlay.position = CGPoint(x: rootLayer.bounds.midX, y: rootLayer.bounds.midY)
125 | rootLayer.addSublayer(detectionOverlay)
126 | }
127 |
128 | func updateLayerGeometry() {
129 | let renderSize: CGSize = Settings.PresetSize.p960x540.size()
130 |
131 | let bounds = rootLayer.bounds
132 | var scale: CGFloat
133 |
134 | let xScale: CGFloat = bounds.size.width / renderSize.height
135 | let yScale: CGFloat = bounds.size.height / renderSize.width
136 |
137 | scale = fmax(xScale, yScale)
138 | if scale.isInfinite {
139 | scale = 1.0
140 | }
141 | CATransaction.begin()
142 | CATransaction.setValue(kCFBooleanTrue, forKey: kCATransactionDisableActions)
143 |
144 | // rotate the layer into screen orientation and scale and mirror
145 | detectionOverlay.setAffineTransform(CGAffineTransform(rotationAngle: CGFloat(.pi / 2.0)).scaledBy(x: scale, y: -scale))
146 | // center the layer
147 | detectionOverlay.position = CGPoint (x: bounds.midX, y: bounds.midY)
148 |
149 | CATransaction.commit()
150 |
151 | }
152 |
153 | func createTextSubLayerInBounds(_ bounds: CGRect, identifier: String, confidence: VNConfidence) -> CATextLayer {
154 | let textLayer = CATextLayer()
155 | textLayer.name = "Object Label"
156 | let formattedString = NSMutableAttributedString(string: String(format: "\(identifier)\nConfidence: %.2f", confidence))
157 | let largeFont = UIFont(name: "Helvetica", size: 24.0)!
158 | formattedString.addAttributes([NSAttributedString.Key.font: largeFont], range: NSRange(location: 0, length: identifier.count))
159 | textLayer.string = formattedString
160 | textLayer.bounds = CGRect(x: 0, y: 0, width: bounds.size.height - 10, height: bounds.size.width - 10)
161 | textLayer.position = CGPoint(x: bounds.midX, y: bounds.midY)
162 | textLayer.shadowOpacity = 0.7
163 | textLayer.shadowOffset = CGSize(width: 2, height: 2)
164 | textLayer.foregroundColor = CGColor(colorSpace: CGColorSpaceCreateDeviceRGB(), components: [0.0, 0.0, 0.0, 1.0])
165 | textLayer.contentsScale = 2.0 // retina rendering
166 | // rotate the layer into screen orientation and scale and mirror
167 | textLayer.setAffineTransform(CGAffineTransform(rotationAngle: CGFloat(.pi / 2.0)).scaledBy(x: 1.0, y: -1.0))
168 | return textLayer
169 | }
170 |
171 | func createRoundedRectLayerWithBounds(_ bounds: CGRect) -> CALayer {
172 | let shapeLayer = CALayer()
173 | shapeLayer.bounds = bounds
174 | shapeLayer.position = CGPoint(x: bounds.midX, y: bounds.midY)
175 | shapeLayer.name = "Found Object"
176 | shapeLayer.backgroundColor = CGColor(colorSpace: CGColorSpaceCreateDeviceRGB(), components: [1.0, 1.0, 0.2, 0.4])
177 | shapeLayer.cornerRadius = 7
178 | return shapeLayer
179 | }
180 | }
181 | */
182 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCVision/Inference.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Inference.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/01/07.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 | import MetalCanvas
12 | import ProcessLogger_Swift
13 |
14 | public extension CCVision {
15 | class Inference: NSObject, CCComponentProtocol {
16 | public let setup: CCVision.Inference.Setup = CCVision.Inference.Setup()
17 | public let trigger: CCVision.Inference.Trigger = CCVision.Inference.Trigger()
18 | public let pipe: CCVision.Inference.Pipe = CCVision.Inference.Pipe()
19 | public var debug: CCComponentDebug?
20 |
21 | private let inferenceProcessQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCVision.Inference")
22 | private let inferenceProcessCompleteQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCVision.Inference.Complete")
23 |
24 | fileprivate var currentItem: CCVariable = CCVariable(nil)
25 | fileprivate(set) var isProcess: CCVariable = CCVariable(false)
26 | fileprivate(set) var processTimeStamp: CCVariable = CCVariable(CMTime.zero)
27 |
28 | public override init() {
29 | super.init()
30 | self.isLoop = false
31 | self.setup.inference = self
32 | self.trigger.inference = self
33 | self.pipe.inference = self
34 | }
35 |
36 | fileprivate func process(item: Item) {
37 | guard
38 | self.isProcess.value != true,
39 | self.processTimeStamp.value != item.timeStamp
40 | else { /* 画像データではないBuffer */ return }
41 |
42 | self.processTimeStamp.value = item.timeStamp
43 | self.isProcess.value = true
44 |
45 | var userInfo: [String : Any] = [:]
46 |
47 | for index in self.setup.process.indices {
48 | guard self.setup.process.indices.contains(index) else { continue }
49 | do {
50 | try self.setup.process[index].process(
51 | pixelBuffer: item.pixelBuffer,
52 | timeStamp: item.timeStamp,
53 | metadataObjects: item.metadataObjects,
54 | userInfo: &userInfo
55 | )
56 | } catch {
57 | }
58 | }
59 |
60 | self.pipe.outUpdate(userInfo: userInfo)
61 | self.debug?.update()
62 | self.isProcess.value = false
63 | }
64 |
65 |
66 | fileprivate var isLoop: Bool = false
67 |
68 | fileprivate func runLoop() {
69 | self.inferenceProcessQueue.async { [weak self] in
70 | guard let self = self else { return }
71 | let interval: TimeInterval = 1.0 / (60 * 2)
72 | let timer: Timer = Timer.scheduledTimer(timeInterval: interval, target: self, selector: #selector(self.updateDisplay), userInfo: nil, repeats: true)
73 | RunLoop.current.add(timer, forMode: RunLoop.Mode.tracking)
74 | while self.isLoop {
75 | RunLoop.current.run(until: Date(timeIntervalSinceNow: interval))
76 | }
77 | }
78 | }
79 |
80 | @objc private func updateDisplay() {
81 | guard
82 | let item: CCVision.Inference.Item = self.currentItem.value
83 | else { return }
84 |
85 | self.process(item: item)
86 | }
87 |
88 |
89 | deinit {
90 | self.dispose()
91 | ProcessLogger.deinitLog(self)
92 | }
93 |
94 | }
95 | }
96 |
97 | fileprivate extension CCVision.Inference {
98 | func dispose() {
99 | self.setup._dispose()
100 | self.trigger._dispose()
101 | self.pipe._dispose()
102 | }
103 | }
104 |
105 | extension CCVision.Inference {
106 | // MARK: - Setup
107 | public class Setup: CCComponentSetupProtocol {
108 | fileprivate var inference: CCVision.Inference?
109 |
110 | private var _process: [CCVisionInferenceProtocol] = []
111 | public var process: [CCVisionInferenceProtocol] {
112 | get {
113 | objc_sync_enter(self)
114 | defer { objc_sync_exit(self) }
115 | return self._process
116 | }
117 | set {
118 | objc_sync_enter(self)
119 | self._process = newValue
120 | objc_sync_exit(self)
121 | }
122 | }
123 |
124 | fileprivate func _dispose() {
125 | self.inference = nil
126 | }
127 | }
128 |
129 | // MARK: - Trigger
130 | public class Trigger: CCComponentTriggerProtocol {
131 | fileprivate var inference: CCVision.Inference?
132 |
133 | public func start() {
134 | self.inference?.isLoop = true
135 | self.inference?.runLoop()
136 | }
137 |
138 | public func stop() {
139 | self.inference?.isLoop = false
140 | }
141 |
142 | public func dispose() {
143 | self.inference?.dispose()
144 | }
145 |
146 | fileprivate func _dispose() {
147 | self.inference = nil
148 | }
149 | }
150 |
151 | }
152 |
153 | extension CCVision.Inference {
154 | // MARK: - Pipe
155 | public class Pipe: NSObject, CCComponentPipeProtocol {
156 | private let completeQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCVision.Inference.Complete")
157 |
158 | fileprivate var observations: [NSKeyValueObservation] = []
159 |
160 | //fileprivate var currentItem: CCVariable = CCVariable(nil)
161 | fileprivate var inference: CCVision.Inference?
162 |
163 | public var userInfo: CCVariable<[String : Any]> = CCVariable([:])
164 |
165 | // MARK: - Pipe - input
166 |
167 | // MARK: input - CCCapture.Camera
168 | func input(camera: CCCapture.Camera) throws -> CCVision.Inference {
169 | /*
170 | camera.pipe.videoCaptureItem.bind() { [weak self] (captureData: CCCapture.VideoCapture.CaptureData?) in
171 | guard
172 | let self = self,
173 | let captureData: CCCapture.VideoCapture.CaptureData = captureData,
174 | let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(captureData.sampleBuffer)
175 | else { return }
176 |
177 | self.inference?.currentItem.value = CCVision.Inference.Item.init(
178 | pixelBuffer: pixelBuffer,
179 | timeStamp: captureData.presentationTimeStamp,
180 | metadataObjects: captureData.metadataObjects
181 | )
182 | }
183 | */
184 | return self.inference!
185 | }
186 |
187 | // MARK: input - CCPlayer
188 | func input(player: CCPlayer) throws -> CCVision.Inference {
189 | player.pipe.outTexture.bind() { [weak self] (outTexture: CCTexture?) in
190 | guard
191 | let self = self,
192 | let outTexture: CCTexture = outTexture,
193 | let pixelBuffer: CVPixelBuffer = outTexture.pixelBuffer
194 | else { return }
195 |
196 | self.inference?.currentItem.value = CCVision.Inference.Item.init(
197 | pixelBuffer: pixelBuffer,
198 | timeStamp: outTexture.presentationTimeStamp,
199 | metadataObjects: []
200 | )
201 | }
202 |
203 | return self.inference!
204 | }
205 |
206 | fileprivate func outUpdate(userInfo: [String : Any]) {
207 | self.userInfo.value = userInfo
208 | self.completeQueue.async { [weak self] in
209 | self?.userInfo.notice()
210 | self?.userInfo.value.removeAll()
211 | }
212 | }
213 |
214 | fileprivate func _dispose() {
215 | self.inference = nil
216 | self.userInfo.dispose()
217 | self.observations.forEach { $0.invalidate() }
218 | self.observations.removeAll()
219 | }
220 | }
221 |
222 | }
223 |
224 | extension CCVision.Inference {
225 | struct Item {
226 | let pixelBuffer: CVPixelBuffer
227 | let timeStamp: CMTime
228 | let metadataObjects: [AVMetadataObject]
229 | }
230 | }
231 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCDebug/ComponentDebugger.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Debugger.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/03/24.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import MetalCanvas
11 | import UIKit
12 | import ProcessLogger_Swift
13 |
14 | public extension CCDebug {
15 | class ComponentDebugger: NSObject {
16 | public let outPut: CCDebug.ComponentDebugger.Output = CCDebug.ComponentDebugger.Output()
17 | public let fileWriter: CCDebug.ComponentDebugger.FileWriter = CCDebug.ComponentDebugger.FileWriter()
18 |
19 | fileprivate let debugLoopQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCDebug.ComponentDebugger.debugLoopQueue")
20 | fileprivate let writeQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCDebug.ComponentDebugger.writeQueue")
21 |
22 | public let setup: CCDebug.ComponentDebugger.Setup = CCDebug.ComponentDebugger.Setup()
23 | public let trigger: CCDebug.ComponentDebugger.Trigger = CCDebug.ComponentDebugger.Trigger()
24 |
25 | fileprivate var list: [CCComponentProtocol] = []
26 |
27 | private var displayLink: CADisplayLink?
28 | private var isDubugLoop: Bool = false
29 | private var startTime: TimeInterval = Date().timeIntervalSince1970
30 | private var mainthredFPSDebugger: ProcessLogger.Framerate = ProcessLogger.Framerate()
31 |
32 | public override init() {
33 | super.init()
34 | self.setup.debugger = self
35 | self.trigger.debugger = self
36 | }
37 |
38 | deinit {
39 | self.dispose()
40 | ProcessLogger.deinitLog(self)
41 | }
42 |
43 | fileprivate func start() {
44 | self.displayLink = CADisplayLink(target: self, selector: #selector(updateDisplay))
45 | self.displayLink?.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
46 |
47 | self.startTime = Date().timeIntervalSince1970
48 |
49 | self.isDubugLoop = true
50 | self.debugLoopQueue.async { [weak self] in
51 | guard let self = self else { return }
52 | let timer: Timer? = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(self.debugLoop), userInfo: nil, repeats: true)
53 | guard let mytimer: Timer = timer else { return }
54 | RunLoop.current.add(mytimer, forMode: RunLoop.Mode.tracking)
55 | while self.isDubugLoop {
56 | RunLoop.current.run(until: Date(timeIntervalSinceNow: 1.0))
57 | }
58 | }
59 | }
60 |
61 | fileprivate func stop() {
62 | self.displayLink?.invalidate()
63 | self.startTime = Date().timeIntervalSince1970
64 | self.isDubugLoop = false
65 | }
66 |
67 | @objc private func debugLoop() {
68 | let currentTime: TimeInterval = Date().timeIntervalSince1970
69 | let mainthredFPS: Int = self.mainthredFPSDebugger.fps()
70 |
71 | let usedCPU: Int = Int(ProcessLogger.Device.usedCPU())
72 | let usedMemory: Int = Int(ProcessLogger.Device.usedMemory() ?? 0)
73 | let thermalState: Int = ProcessInfo.processInfo.thermalState.rawValue
74 |
75 | var compornetFPSList: [CCDebug.ComponentDebugger.Output.Data.CompornetFPS] = []
76 | for i in self.list {
77 | let name: String = String(describing: type(of: i))
78 | let fps: Int = i.debug?.fps() ?? 0
79 | //i.debug?.cpu()
80 | compornetFPSList.append(CCDebug.ComponentDebugger.Output.Data.CompornetFPS(name: name, fps: fps))
81 | }
82 |
83 | self.outPut.data.value = CCDebug.ComponentDebugger.Output.Data(
84 | time: Int(currentTime - self.startTime),
85 | mainthredFPS: mainthredFPS,
86 | compornetFPSList: compornetFPSList,
87 | usedCPU: usedCPU,
88 | usedMemory: usedMemory,
89 | thermalState: thermalState
90 | )
91 |
92 | /*
93 | self.writeQueue.async { [weak self] in
94 | guard let self = self else { return }
95 | }
96 | */
97 | self.outPut.data.notice()
98 | }
99 |
100 | @objc private func updateDisplay() {
101 | self.mainthredFPSDebugger.update()
102 | }
103 |
104 | }
105 | }
106 |
107 | fileprivate extension CCDebug.ComponentDebugger {
108 | func dispose() {
109 | self.stop()
110 | self.list = []
111 | self.outPut._dispose()
112 | self.displayLink?.invalidate()
113 | self.setup._dispose()
114 | self.trigger._dispose()
115 | }
116 | }
117 |
118 |
119 | extension CCDebug.ComponentDebugger {
120 | // MARK: - Setup
121 | public class Setup: CCComponentSetupProtocol {
122 | fileprivate var debugger: CCDebug.ComponentDebugger?
123 |
124 | public func set(component: CCComponentProtocol) throws {
125 | component.isDebugMode = true
126 | self.debugger?.list.append(component)
127 | }
128 |
129 | fileprivate func _dispose() {
130 | self.debugger = nil
131 | }
132 | }
133 |
134 | // MARK: - Trigger
135 | public class Trigger: CCComponentTriggerProtocol {
136 | fileprivate var debugger: CCDebug.ComponentDebugger?
137 |
138 | public func start() {
139 | self.debugger?.start()
140 | }
141 |
142 | public func stop() {
143 | self.debugger?.stop()
144 | }
145 |
146 | public func dispose() {
147 | self.debugger?.dispose()
148 | }
149 |
150 | fileprivate func _dispose() {
151 | self.debugger = nil
152 | }
153 | }
154 | }
155 |
156 | public extension CCDebug.ComponentDebugger {
157 | class Output: NSObject {
158 | public struct Data {
159 | public struct CompornetFPS {
160 | public let name: String
161 | public let fps: Int
162 | }
163 |
164 | public var time: Int = 0
165 | public var mainthredFPS: Int = 0
166 | public var compornetFPSList: [CCDebug.ComponentDebugger.Output.Data.CompornetFPS] = []
167 | public var usedCPU: Int = 0
168 | public var usedMemory: Int = 0
169 | public var thermalState: Int = 0
170 |
171 | fileprivate func toArray() -> [String] {
172 | return [
173 | String(self.time),
174 | String(self.mainthredFPS),
175 | String(self.usedCPU),
176 | String(self.usedMemory),
177 | String(self.thermalState),
178 | ]
179 | }
180 | }
181 |
182 | public var data: CCVariable = CCVariable(Data())
183 |
184 | fileprivate func _dispose() {
185 | self.data.dispose()
186 | }
187 | }
188 | }
189 |
190 | public extension CCDebug.ComponentDebugger {
191 | class FileWriter: NSObject {
192 | let lebels: [String] = ["time", "mainthredFPS", "cameraFPS", "imageProcessFPS", "liveViewFPS", "usedCPU", "usedMemory", "thermalState"]
193 | let documentsPath: String = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
194 | private var dirName: String = "test"
195 | let fileName: String = "data.csv"
196 |
197 | func createDir() throws {
198 | self.dirName = "Debug/\(Date().timeIntervalSince1970)"
199 | let dirPath = self.documentsPath + "/" + self.dirName
200 | try FileManager.default.createDirectory(atPath: dirPath, withIntermediateDirectories: true, attributes: nil)
201 | let filePath = self.documentsPath + "/" + self.dirName + "/" + self.fileName
202 | if FileManager.default.createFile(atPath: filePath, contents: nil, attributes: nil) {
203 | if let file: FileHandle = FileHandle(forWritingAtPath: filePath) {
204 | self.write(list: self.lebels)
205 | }
206 | }
207 | }
208 |
209 | func write(list: [String]) {
210 | let filePath = self.documentsPath + "/" + self.dirName + "/" + self.fileName
211 | let listString: String = list.joined(separator: ",") + "\n"
212 | let contentData: Data = listString.data(using: String.Encoding.utf8)!
213 | if let file: FileHandle = FileHandle(forWritingAtPath: filePath) {
214 | file.seekToEndOfFile()
215 | file.write(contentData)
216 | file.closeFile()
217 | }
218 | }
219 | }
220 | }
221 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCPlayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CCPlayer.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2020/03/31.
6 | // Copyright © 2020 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 | import MetalCanvas
12 | import MetalKit
13 | import ProcessLogger_Swift
14 |
15 | public class CCPlayer: NSObject, CCComponentProtocol {
16 | fileprivate let queue: DispatchQueue = DispatchQueue(label: "CameraCore.CCPlayer.Queue")
17 |
18 | public let setup: CCPlayer.Setup = CCPlayer.Setup()
19 | public let trigger: CCPlayer.Trigger = CCPlayer.Trigger()
20 | public let pipe: CCPlayer.Pipe = CCPlayer.Pipe()
21 | public let event: CCPlayer.Event = CCPlayer.Event()
22 | public var debug: CCComponentDebug?
23 |
24 | private var timeObserverToken: Any?
25 |
26 | private var displayLink: CADisplayLink?
27 | private var isLoop: Bool = false
28 | private var player: AVPlayer = AVPlayer()
29 | private let output: AVPlayerItemVideoOutput = {
30 | let attributes = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA]
31 | return AVPlayerItemVideoOutput(pixelBufferAttributes: attributes)
32 | }()
33 |
34 | public override init() {
35 | super.init()
36 |
37 | if let timeObserverToken = self.timeObserverToken {
38 | self.player.removeTimeObserver(timeObserverToken)
39 | }
40 |
41 | self.setup.player = self
42 | self.trigger.player = self
43 | self.pipe.player = self
44 | self.event.player = self
45 | }
46 |
47 | deinit {
48 | self.dispose()
49 | ProcessLogger.deinitLog(self)
50 | }
51 |
52 | func update(url: URL) {
53 | let avAsset: AVURLAsset = AVURLAsset(url: url)
54 | let playerItem: AVPlayerItem = AVPlayerItem(asset: avAsset)
55 | self.player = AVPlayer(playerItem: playerItem)
56 |
57 | let time: CMTime = CMTime(seconds: 1.0 / 120.0, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
58 | self.timeObserverToken = self.player.addPeriodicTimeObserver(forInterval: time, queue: self.queue) { [weak self] (time: CMTime) in
59 | self?.updatePlayerTime(time: time)
60 | }
61 | playerItem.observe(\.status, options: [.initial, .new], changeHandler: { [weak self](item: AVPlayerItem, status: NSKeyValueObservedChange) in
62 | print("value", status)
63 | switch status.newValue {
64 | case .readyToPlay: print("readyToPlay")
65 | case .failed: print("failed")
66 | case .unknown: print("unknown")
67 | case .some(_): print("some")
68 | case .none: print("none")
69 | }
70 | })
71 |
72 |
73 | //self.player.actionAtItemEnd = .none
74 | //playerItem.add(self.output)
75 | self.player.actionAtItemEnd = AVPlayer.ActionAtItemEnd.none
76 | self.player.currentItem?.add(self.output)
77 |
78 |
79 | NotificationCenter.default.addObserver(
80 | self,
81 | selector: #selector(avPlayerItemDidPlayToEndTime),
82 | name: .AVPlayerItemDidPlayToEndTime,
83 | object: playerItem
84 | )
85 | self.event.status.value = CCPlayer.Status.ready
86 | self.event.status.notice()
87 | }
88 |
89 | fileprivate func play() {
90 | self.isLoop = true
91 | self.player.play()
92 | self.event.status.value = CCPlayer.Status.play
93 | self.event.status.notice()
94 | }
95 |
96 | fileprivate func pause() {
97 | self.player.pause()
98 | self.event.status.value = CCPlayer.Status.pause
99 | self.event.status.notice()
100 | }
101 |
102 | public func seek(progress: Float) {
103 | guard let totalDuration: CMTime = self.player.currentItem?.duration else { return }
104 | //self.event.status = CCPlayer.Status.seek.rawValue
105 | let s: Double = Double(totalDuration.value) / Double(totalDuration.timescale)
106 | let time: Double = s * Double(progress)
107 | let cmtime: CMTime = CMTime(seconds: time, preferredTimescale: totalDuration.timescale).convertScale(30, method: .roundHalfAwayFromZero)
108 |
109 | self.player.pause()
110 | self.player.seek(to: cmtime, toleranceBefore: CMTime.zero, toleranceAfter: CMTime.zero)
111 | self.event.status.value = CCPlayer.Status.seek
112 | self.event.status.notice()
113 | }
114 |
115 | private func updatePlayerTime(time currentTime: CMTime) {
116 | guard
117 | let duration: CMTime = self.player.currentItem?.duration,
118 | self.output.hasNewPixelBuffer(forItemTime: currentTime),
119 | let pixelBuffer: CVPixelBuffer = self.output.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil)
120 | else { return }
121 | do {
122 | var texture: CCTexture = try CCTexture.init(pixelBuffer: pixelBuffer, planeIndex: 0)
123 | texture.presentationTimeStamp = currentTime
124 | self.pipe.outUpdate(texture: texture)
125 | self.event.outPresentationTimeStamp.value = currentTime
126 | self.event.outProgress.value = currentTime.seconds / duration.seconds
127 | self.event.outPresentationTimeStamp.notice()
128 | self.event.outProgress.notice()
129 |
130 | // デバッグ
131 | self.debug?.update(thred: Thread.current, queue: CCCapture.videoOutputQueue)
132 | self.debug?.update()
133 | } catch {
134 |
135 | }
136 | }
137 |
138 | /// 再生が終了したときの処理
139 | @objc private func avPlayerItemDidPlayToEndTime(_ notification: Notification) {
140 | let item: AVPlayerItem = notification.object as! AVPlayerItem
141 |
142 | self.event.outProgress.value = 1.0
143 | self.event.outProgress.notice()
144 | self.event.status.value = CCPlayer.Status.endTime
145 | self.event.status.notice()
146 |
147 | // ループ
148 | item.seek(to: CMTime.zero, completionHandler: nil)
149 | }
150 | }
151 |
152 | fileprivate extension CCPlayer {
153 | func dispose() {
154 | //self.event.status = CCPlayer.Status.dispose.rawValue
155 | self.displayLink?.invalidate()
156 | self.player.pause()
157 | self.isLoop = false
158 | self.setup._dispose()
159 | self.trigger._dispose()
160 | self.pipe._dispose()
161 | self.event._dispose()
162 | NotificationCenter.default.removeObserver(self)
163 | }
164 | }
165 |
166 | extension CCPlayer {
167 | public enum Status: Int {
168 | case setup = 0
169 | case update
170 | case ready
171 | case play
172 | case pause
173 | case seek
174 | case dispose
175 | case endTime
176 | }
177 | }
178 |
179 | extension CCPlayer {
180 | // MARK: - Setup
181 | public class Setup: CCComponentSetupProtocol {
182 | fileprivate var player: CCPlayer?
183 |
184 | public func update(url: URL) {
185 | self.player?.update(url: url)
186 | }
187 |
188 | public func seek(url: URL) {
189 | self.player?.update(url: url)
190 | }
191 |
192 | fileprivate func _dispose() {
193 | self.player = nil
194 | }
195 | }
196 |
197 | // MARK: - Trigger
198 | public class Trigger: CCComponentTriggerProtocol {
199 | fileprivate var player: CCPlayer?
200 |
201 | public func play() {
202 | self.player?.play()
203 | }
204 |
205 | public func pause() {
206 | self.player?.pause()
207 | }
208 |
209 | public func seek(progress: Float) {
210 | self.player?.seek(progress: progress)
211 | }
212 |
213 | public func dispose() {
214 | self.player?.dispose()
215 | }
216 |
217 | fileprivate func _dispose() {
218 | self.player = nil
219 | }
220 | }
221 |
222 | // MARK: - Pipe
223 | public class Pipe: NSObject, CCComponentPipeProtocol {
224 | private let completeQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCPlayer.Complete")
225 |
226 | fileprivate var player: CCPlayer?
227 |
228 | public var outTexture: CCVariable = CCVariable(nil)
229 |
230 | fileprivate func outUpdate(texture: CCTexture) {
231 | self.outTexture.value = texture
232 | self.completeQueue.async { [weak self] in
233 | self?.outTexture.notice()
234 | self?.outTexture.value = nil
235 | }
236 | }
237 |
238 | fileprivate func _dispose() {
239 | self.player = nil
240 | self.outTexture.dispose()
241 | }
242 | }
243 |
244 | // MARK: - Event
245 | public class Event: NSObject, CCComponentEventProtocol {
246 | fileprivate var player: CCPlayer?
247 | public var outPresentationTimeStamp: CCVariable = CCVariable(CMTime.zero)
248 | public var outProgress: CCVariable = CCVariable(TimeInterval.zero)
249 | public var status: CCVariable = CCVariable(CCPlayer.Status.setup)
250 |
251 | fileprivate func _dispose() {
252 | self.player = nil
253 | self.outPresentationTimeStamp.dispose()
254 | self.outProgress.dispose()
255 | self.status.dispose()
256 | }
257 | }
258 | }
259 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCCapture/Camera.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Camera.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2019/12/31.
6 | // Copyright © 2019 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Foundation
11 | import MetalCanvas
12 | import MetalKit
13 | import ProcessLogger_Swift
14 | import Combine
15 |
16 | extension CCCapture {
17 | @objc public class Camera: NSObject, CCComponentProtocol {
18 |
19 | // MARK: - CCComponentProtocol
20 | public let setup: CCCapture.Camera.Setup = CCCapture.Camera.Setup()
21 | public let trigger: CCCapture.Camera.Trigger = CCCapture.Camera.Trigger()
22 | public let pipe: CCCapture.Camera.Pipe = CCCapture.Camera.Pipe()
23 | public var debug: CCComponentDebug?
24 |
25 |
26 | public fileprivate(set) var property: CCCapture.VideoCapture.Property
27 |
28 | public var event: Event?
29 | public var status: Camera.Status = .setup {
30 | willSet {
31 | self.event?.onStatusChange.value = newValue
32 | }
33 | }
34 |
35 | public var capture: CCCapture.VideoCapture.VideoCaptureManager?
36 | public var depthData: CCVariable = CCVariable(nil)
37 | public var metadataObjects: CCVariable<[AVMetadataObject]> = CCVariable([])
38 | fileprivate var cancellableBag: [AnyCancellable] = []
39 |
40 | public init(property: CCCapture.VideoCapture.Property) throws {
41 | self.property = property
42 |
43 | super.init()
44 | try self.setupProperty(property: property)
45 |
46 | self.setup.camera = self
47 | self.trigger.camera = self
48 | self.pipe.camera = self
49 | }
50 |
51 | deinit {
52 | self.dispose()
53 | ProcessLogger.deinitLog(self)
54 | }
55 |
56 | }
57 | }
58 |
59 |
60 | fileprivate extension CCCapture.Camera {
61 | func start() {
62 | guard self.status != .play else { return }
63 | ProcessLogger.log("CameraCore.Camera.play")
64 | self.depthData.value = nil
65 | self.capture?.play()
66 | self.status = .play
67 | }
68 |
69 | func stop() {
70 | ProcessLogger.log("CameraCore.Camera.pause")
71 | self.capture?.stop()
72 | self.status = .pause
73 | }
74 |
75 | func dispose() {
76 | self.capture?.stop()
77 | self.status = .setup
78 | self.capture = nil
79 | self.setup._dispose()
80 | self.trigger._dispose()
81 | self.pipe._dispose()
82 | }
83 | }
84 |
85 | fileprivate extension CCCapture.Camera {
86 | func setupProperty(property: CCCapture.VideoCapture.Property) throws {
87 | self.property = property
88 |
89 | ///////////////////////////////////////////////////////////////////////////////////////////////////
90 | do {
91 | let capture: CCCapture.VideoCapture.VideoCaptureManager = try CCCapture.VideoCapture.VideoCaptureManager(property: property)
92 | self.capture = capture
93 | ///////////////////////////////////////////////////////////////////////////////////////////////////
94 | capture.sampleBuffer.sink(receiveValue: { [weak self] (item: CCCapture.VideoCapture.VideoCaptureOutput.Item) in
95 | guard
96 | let self = self,
97 | let captureInfo: CCCapture.VideoCapture.CaptureInfo = self.capture?.property.captureInfo
98 | else { return }
99 |
100 | if CMSampleBufferGetImageBuffer(item.sampleBuffer) != nil {
101 | // ピクセルデータ
102 | let currentCaptureItem: CCCapture.VideoCapture.CaptureData = CCCapture.VideoCapture.CaptureData(
103 | sampleBuffer: item.sampleBuffer,
104 | captureInfo: captureInfo,
105 | depthData: self.depthData.value,
106 | metadataObjects: self.metadataObjects.value,
107 | mtlPixelFormat: MTLPixelFormat.bgra8Unorm,
108 | outPutPixelFormatType: captureInfo.outPutPixelFormatType,
109 | captureVideoOrientation: item.devicePosition
110 | )
111 |
112 | self.pipe.videoCaptureItem.send(currentCaptureItem)
113 |
114 | // デバッグ
115 | self.debug?.update(thred: Thread.current, queue: CCCapture.videoOutputQueue)
116 | self.debug?.update()
117 | } else {
118 | let currentCaptureItem: CCCapture.VideoCapture.CaptureData = CCCapture.VideoCapture.CaptureData(
119 | sampleBuffer: item.sampleBuffer,
120 | captureInfo: captureInfo,
121 | depthData: nil,
122 | metadataObjects: self.metadataObjects.value,
123 | mtlPixelFormat: MTLPixelFormat.bgra8Unorm,
124 | outPutPixelFormatType: captureInfo.outPutPixelFormatType,
125 | captureVideoOrientation: item.devicePosition
126 | )
127 |
128 | self.pipe.audioCaptureItem.send(currentCaptureItem)
129 | }
130 | }).store(in: &self.cancellableBag)
131 | ///////////////////////////////////////////////////////////////////////////////////////////////////
132 |
133 | ///////////////////////////////////////////////////////////////////////////////////////////////////
134 | // AVDepthData & AVMetadataObject 取得
135 | var depthData: PassthroughSubject {
136 | get {
137 | return capture.depthData
138 | }
139 | }
140 | var metadataObjects: PassthroughSubject<[AVMetadataObject], Never> {
141 | get {
142 | return capture.metadataObjects
143 | }
144 | }
145 |
146 | } catch {
147 | self.capture = nil
148 | throw CCCapture.ErrorType.setup
149 | }
150 | ///////////////////////////////////////////////////////////////////////////////////////////////////
151 |
152 |
153 | /*
154 | self.capture?.onUpdateDepthData = { [weak self] (depthData: AVDepthData) in
155 | self?.depthData.value = depthData
156 | }
157 |
158 | self.capture?.onUpdateMetadataObjects = { [weak self] (metadataObjects: [AVMetadataObject]) in
159 | self?.metadataObjects.value = metadataObjects
160 | }
161 | */
162 | ///////////////////////////////////////////////////////////////////////////////////////////////////
163 | }
164 |
165 | func updateProperty(property: CCCapture.VideoCapture.Property) throws {
166 | try self.capture?.update(property: property)
167 | }
168 | }
169 |
170 | extension CCCapture.Camera {
171 | public enum Status {
172 | case setup
173 | case update
174 | case ready
175 | case play
176 | case pause
177 | case seek
178 | case dispose
179 | }
180 |
181 | public class Event: NSObject {
182 | public var onStatusChange: CCVariable = CCVariable(nil)
183 | public var onUpdate: CCVariable = CCVariable(nil)
184 | }
185 | }
186 |
187 | extension CCCapture.Camera {
188 | // MARK: - Setup
189 | public class Setup: CCComponentSetupProtocol {
190 | fileprivate var camera: CCCapture.Camera?
191 |
192 | public func setup(property: CCCapture.VideoCapture.Property) throws {
193 | try self.camera?.setupProperty(property: property)
194 | }
195 | public func update(property: CCCapture.VideoCapture.Property) throws {
196 | try self.camera?.updateProperty(property: property)
197 | }
198 |
199 | fileprivate func _dispose() {
200 | self.camera = nil
201 | }
202 | }
203 |
204 | // MARK: - Trigger
205 | public class Trigger: CCComponentTriggerProtocol {
206 | fileprivate var camera: CCCapture.Camera?
207 |
208 | public func start() {
209 | self.camera?.start()
210 | }
211 |
212 | public func stop() {
213 | self.camera?.stop()
214 | }
215 |
216 | public func dispose() {
217 | self.camera?.dispose()
218 | }
219 |
220 | fileprivate func _dispose() {
221 | self.camera = nil
222 | }
223 | }
224 |
225 | // MARK: - Pipe
226 | public class Pipe: NSObject, CCComponentPipeProtocol {
227 |
228 | // MARK: - Queue
229 | fileprivate let completeQueue: DispatchQueue = DispatchQueue(label: "CameraCore.CCCapture.Camera.completeQueue")
230 |
231 | fileprivate var camera: CCCapture.Camera?
232 |
233 | //public var videoCaptureItem: PassthroughSubject
234 | //public var audioCaptureItem: CCCapture.VideoCapture.CaptureData?
235 | public let videoCaptureItem: PassthroughSubject = PassthroughSubject()
236 | public let audioCaptureItem: PassthroughSubject = PassthroughSubject()
237 |
238 | override init() {
239 | super.init()
240 | }
241 |
242 | fileprivate func _dispose() {
243 | //self.videoCaptureItem.dispose()
244 | self.camera = nil
245 | }
246 | }
247 | }
248 |
--------------------------------------------------------------------------------
/Sources/CameraCore/CCImageProcess/Layer/ImageLayer.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageLayer.swift
3 | // CameraCore
4 | //
5 | // Created by hideyuki machida on 2018/08/22.
6 | // Copyright © 2018 hideyuki machida. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import MetalCanvas
11 | import CoreImage
12 |
13 | public extension CCImageProcess {
14 | /// ImageLayer 画像オーバーレイエフェクト
15 | final class ImageLayer: RenderLayerProtocol {
16 | public let type: RenderLayerType = RenderLayerType.image
17 | public let id: RenderLayerId
18 | public let customIndex: Int = 0
19 | public let transform: CGAffineTransform
20 | private let imagePath: URL
21 | private let blendMode: Blendmode
22 | private let alpha: CGFloat
23 | private let renderSize: CGSize
24 | private let contentMode: CompositionImageLayerContentMode
25 | private var contentModeTransform: CGAffineTransform?
26 | private var _image: CIImage?
27 | private var overTexture: CCTexture?
28 |
29 | public convenience init(imagePath: URL, blendMode: Blendmode, alpha: CGFloat = 1.0, renderSize: CGSize, contentMode: CompositionImageLayerContentMode = .none, transform: CGAffineTransform? = nil) {
30 | self.init(id: RenderLayerId(), imagePath: imagePath, blendMode: blendMode, alpha: alpha, renderSize: renderSize, contentMode: contentMode, transform: transform)
31 | }
32 |
33 | public init(id: RenderLayerId, imagePath: URL, blendMode: Blendmode, alpha: CGFloat = 1.0, renderSize: CGSize, contentMode: CompositionImageLayerContentMode = .none, transform: CGAffineTransform? = nil) {
34 | self.id = id
35 | self.imagePath = imagePath
36 | self.blendMode = blendMode
37 | self.alpha = alpha
38 | self.renderSize = renderSize
39 | self.contentMode = contentMode
40 | self.transform = transform ?? CGAffineTransform.identity
41 | }
42 |
43 | public func dispose() {
44 | self._image = nil
45 | }
46 | }
47 | }
48 |
49 | public extension CCImageProcess.ImageLayer {
50 | func process(commandBuffer: MTLCommandBuffer, source: CCTexture, destination: inout CCTexture, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws {
51 | guard var inputImage: CIImage = CIImage(mtlTexture: source.texture, options: nil) else { throw RenderLayerErrorType.renderingError }
52 | inputImage = try self.process(image: inputImage, renderLayerCompositionInfo: &renderLayerCompositionInfo)
53 | let colorSpace: CGColorSpace = inputImage.colorSpace ?? CGColorSpaceCreateDeviceRGB()
54 | guard let commandBuffer: MTLCommandBuffer = MCCore.commandQueue.makeCommandBuffer() else { return }
55 | MCCore.ciContext.render(inputImage, to: destination.texture, commandBuffer: commandBuffer, bounds: inputImage.extent, colorSpace: colorSpace)
56 | commandBuffer.commit()
57 | }
58 | }
59 |
60 |
61 | fileprivate extension CCImageProcess.ImageLayer {
62 | func process(image: CIImage, renderLayerCompositionInfo: inout RenderLayerCompositionInfo) throws -> CIImage {
63 | // フィルターイメージ生成
64 | if let selfImage = self._image {
65 | // フィルター合成
66 | guard let result: CIFilter = CIFilter(name: self.blendMode.CIFilterName) else { throw RenderLayerErrorType.renderingError }
67 | result.setValue(image, forKey: kCIInputBackgroundImageKey)
68 | result.setValue(selfImage, forKey: kCIInputImageKey)
69 |
70 | return result.outputImage ?? image
71 | } else {
72 | // フィルターイメージ作成
73 | guard var effect: CIImage = CIImage(contentsOf: self.imagePath) else { throw RenderLayerErrorType.renderingError }
74 |
75 | // 上下反転
76 | effect = effect.transformed(by: CGAffineTransform(scaleX: 1, y: -1.0).translatedBy(x: 0, y: -CGFloat(effect.extent.height)))
77 |
78 | if self.contentModeTransform == nil {
79 | let imageSize: CGSize = effect.extent.size
80 | self.contentModeTransform = self.contentMode.transform(imageSize: imageSize, renderSize: self.renderSize)
81 | }
82 |
83 | guard let colorMatrixFilter: CIFilter = CIFilter(name: "CIColorMatrix") else { throw RenderLayerErrorType.renderingError }
84 | colorMatrixFilter.setValue(effect, forKey: kCIInputImageKey)
85 | colorMatrixFilter.setValue(CIVector(x: 0.0, y: 0.0, z: 0.0, w: self.alpha), forKey: "inputAVector")
86 |
87 | ///////////////////////////////////////////////////////////////////////////////////////////////////
88 | // まずcontentModeの設定を反映
89 | guard
90 | let alphaImage: CIImage = colorMatrixFilter.outputImage,
91 | let contentModeTransform: CGAffineTransform = self.contentModeTransform,
92 | let contentModeTransformFilter: CIFilter = CIFilter(name: "CIAffineTransform")
93 | else { throw RenderLayerErrorType.renderingError }
94 |
95 | contentModeTransformFilter.setValue(alphaImage, forKey: kCIInputImageKey)
96 | contentModeTransformFilter.setValue(NSValue(cgAffineTransform: contentModeTransform), forKey: "inputTransform")
97 | guard let contentModeTransformImage: CIImage = contentModeTransformFilter.outputImage else { throw RenderLayerErrorType.renderingError }
98 | ///////////////////////////////////////////////////////////////////////////////////////////////////
99 |
100 | ///////////////////////////////////////////////////////////////////////////////////////////////////
101 | // まずcontentModeの設定を反映
102 | let transform: CGAffineTransform = TransformUtils.convertTransformSKToCI(
103 | userTransform: self.transform,
104 | videoSize: image.extent.size,
105 | renderSize: renderSize,
106 | preferredTransform: CGAffineTransform.identity
107 | )
108 |
109 | guard let transformFilter: CIFilter = CIFilter(name: "CIAffineTransform") else { throw RenderLayerErrorType.renderingError }
110 | transformFilter.setValue(contentModeTransformImage, forKey: kCIInputImageKey)
111 | transformFilter.setValue(NSValue(cgAffineTransform: transform), forKey: "inputTransform")
112 | guard var transformImage: CIImage = transformFilter.outputImage else { throw RenderLayerErrorType.renderingError }
113 | transformImage = transformImage.cropped(to: CGRect(origin: CGPoint(x: 0, y: 0), size: self.renderSize))
114 | ///////////////////////////////////////////////////////////////////////////////////////////////////
115 |
116 | self._image = transformImage
117 |
118 | // フィルター合成
119 | guard let result: CIFilter = CIFilter(name: self.blendMode.CIFilterName) else { throw RenderLayerErrorType.renderingError }
120 |
121 | result.setValue(image, forKey: kCIInputBackgroundImageKey)
122 | result.setValue(transformImage, forKey: kCIInputImageKey)
123 | return result.outputImage ?? image
124 | }
125 | }
126 | }
127 |
128 | extension CCImageProcess.ImageLayer {
129 | // 設計されているが、まだ未実装のenum項目をコメントアウト
130 | public enum CompositionImageLayerContentMode: Int, Codable {
131 | // case scaleToFill = 0
132 | // case scaleAspectFit
133 | case scaleAspectFill = 2
134 | // case redraw
135 | // case center
136 | // case top
137 | // case bottom
138 | // case left
139 | // case right
140 | // case topLeft
141 | // case topRight
142 | // case bottomLeft
143 | // case bottomRight
144 | case none
145 |
146 | public func transform(imageSize: CGSize, renderSize: CGSize) -> CGAffineTransform {
147 | switch self {
148 | // case .scaleToFill: return CGAffineTransform.identity
149 | // case .scaleAspectFit: return CGAffineTransform.identity
150 | case .scaleAspectFill: return CompositionImageLayerContentModeTransform.scaleAspectFill(imageSize: imageSize, renderSize: renderSize)
151 | // case .scaleAspectFill: return CGAffineTransform.identity
152 | // case .redraw: return CGAffineTransform.identity
153 | // case .center: return CGAffineTransform.identity
154 | // case .top: return CGAffineTransform.identity
155 | // case .bottom: return CGAffineTransform.identity
156 | // case .left: return CGAffineTransform.identity
157 | // case .right: return CGAffineTransform.identity
158 | // case .topLeft: return CGAffineTransform.identity
159 | // case .topRight: return CGAffineTransform.identity
160 | // case .bottomLeft: return CGAffineTransform.identity
161 | // case .bottomRight: return CGAffineTransform.identity
162 | case .none: return CGAffineTransform.identity
163 | }
164 | }
165 | }
166 |
167 | // swiftlint:disable:next type_name
168 | class CompositionImageLayerContentModeTransform {
169 | // swiftlint:disable:previous type_name
170 | public static func scaleAspectFill(imageSize: CGSize, renderSize: CGSize) -> CGAffineTransform {
171 | guard renderSize != imageSize else { return CGAffineTransform(scaleX: 1.0, y: 1.0) }
172 |
173 | let originalSize: CGSize = CGSize(width: imageSize.width, height: imageSize.height)
174 |
175 | // スケールを設定
176 | let scaleW: CGFloat = renderSize.width / originalSize.width
177 | let scaleH: CGFloat = renderSize.height / originalSize.height
178 |
179 | let scale: CGFloat = scaleW > scaleH ? scaleW : scaleH
180 | let resizeSize: CGSize = CGSize(width: originalSize.width * scale, height: originalSize.height * scale)
181 |
182 | let resultTransform: CGAffineTransform = CGAffineTransform(scaleX: scale, y: scale)
183 | .translatedBy(x: ((renderSize.width / 2) - (resizeSize.width / 2)) * (1 / scale),
184 | y: ((renderSize.height / 2) - (resizeSize.height / 2)) * (1 / scale))
185 | return resultTransform
186 | }
187 | }
188 |
189 | }
190 |
--------------------------------------------------------------------------------