├── .gitignore
├── Images
├── concept1.png
└── concept2.png
├── Tests
├── LinuxMain.swift
└── BrazilNutTests
│ ├── XCTestManifests.swift
│ └── BrazilNutTests.swift
├── .swiftpm
└── xcode
│ └── package.xcworkspace
│ └── contents.xcworkspacedata
├── Sources
└── BrazilNut
│ ├── Transitions
│ ├── ModTransition.swift
│ ├── DissolveTransition.swift
│ ├── AccordingFoldTransition.swift
│ ├── BarSwipeTranstion.swift
│ ├── FlashTransition.swift
│ ├── SwipeTransition.swift
│ ├── CopyMachineTransition.swift
│ └── TransitionOperator.swift
│ ├── Base
│ ├── CIImage+Extension.swift
│ ├── Image.swift
│ ├── MetalDevice.swift
│ ├── Pipeline.swift
│ └── ImageRelay.swift
│ ├── Filters
│ ├── FilterB.swift
│ ├── FilterC.swift
│ ├── FilterA.swift
│ ├── FilterD.swift
│ └── BaseCollageFilter.swift
│ ├── Outputs
│ ├── RenderView.swift
│ └── VideoWriter.swift
│ └── Inputs
│ ├── VideoSource.swift
│ └── Camera.swift
├── LICENSE
├── Package.swift
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | /.build
3 | /Packages
4 | /*.xcodeproj
5 | xcuserdata/
6 |
--------------------------------------------------------------------------------
/Images/concept1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hxperl/BrazilNut/HEAD/Images/concept1.png
--------------------------------------------------------------------------------
/Images/concept2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hxperl/BrazilNut/HEAD/Images/concept2.png
--------------------------------------------------------------------------------
/Tests/LinuxMain.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | import BrazilNutTests
4 |
5 | var tests = [XCTestCaseEntry]()
6 | tests += BrazilNutTests.allTests()
7 | XCTMain(tests)
8 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Tests/BrazilNutTests/XCTestManifests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | #if !canImport(ObjectiveC)
4 | public func allTests() -> [XCTestCaseEntry] {
5 | return [
6 | testCase(BrazilNutTests.allTests),
7 | ]
8 | }
9 | #endif
10 |
--------------------------------------------------------------------------------
/Tests/BrazilNutTests/BrazilNutTests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import BrazilNut
3 |
4 | final class BrazilNutTests: XCTestCase {
5 | func testExample() {
6 | // This is an example of a functional test case.
7 | // Use XCTAssert and related functions to verify your tests produce the correct
8 | // results.
9 | }
10 |
11 | static var allTests = [
12 | ("testExample", testExample),
13 | ]
14 | }
15 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Transitions/ModTransition.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModTransition.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class ModTransition: TransitionOperator {
12 |
13 | public init() {
14 | let filter = CIFilter(name: "CIModTransition")
15 | filter?.setDefaults()
16 | super.init(filter: filter)
17 |
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Transitions/DissolveTransition.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DissolveTransition.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class DissolveTransition: TransitionOperator {
12 |
13 | public init() {
14 | let filter = CIFilter(name: "CIDissolveTransition")
15 | filter?.setDefaults()
16 | super.init(filter: filter)
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Transitions/AccordingFoldTransition.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AccordingFoldTransition.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class AccordingFoldTransition: TransitionOperator {
12 |
13 | public init() {
14 | let filter = CIFilter(name: "CIAccordionFoldTransition")
15 | filter?.setDefaults()
16 | super.init(filter: filter)
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Transitions/BarSwipeTranstion.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BarSwipeTranstion.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/05.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 | import CoreImage
9 |
10 | public class BarSwipeTransition: TransitionOperator {
11 |
12 | public init() {
13 | let filter = CIFilter(name: "CIBarsSwipeTransition")
14 | filter?.setValue(60, forKey: kCIInputWidthKey)
15 | filter?.setValue(3, forKey: kCIInputAngleKey)
16 | super.init(filter: filter)
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Transitions/FlashTransition.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlashTransition.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class FlashTransition: TransitionOperator {
12 |
13 | public init() {
14 | let filter = CIFilter(name: "CIFlashTransition")
15 | filter?.setDefaults()
16 | filter?.setValue(CIColor.init(red: 1, green: 1, blue: 1), forKey: kCIInputColorKey)
17 | super.init(filter: filter)
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Transitions/SwipeTransition.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SwipeTransition.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class SwipeTransition: TransitionOperator {
12 |
13 | public init() {
14 | let filter = CIFilter(name: "CISwipeTransition")
15 | filter?.setDefaults()
16 | filter?.setValue(CIColor.init(red: 1, green: 1, blue: 1), forKey: kCIInputColorKey)
17 | super.init(filter: filter)
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Transitions/CopyMachineTransition.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CopyMachineTransition.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class CopyMachineTransition: TransitionOperator {
12 |
13 | public init() {
14 | let filter = CIFilter(name: "CICopyMachineTransition")
15 | filter?.setDefaults()
16 | filter?.setValue(CIColor.init(red: 0, green: 0, blue: 1), forKey: kCIInputColorKey)
17 | super.init(filter: filter)
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Base/CIImage+Extension.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CIImage+Extension.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2019/11/13.
6 | // Copyright © 2019 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | extension CIImage {
12 | func transformToOrigin(withSize size: CGSize) -> CIImage {
13 | // let originX = extent.origin.x
14 | // let originY = extent.origin.y
15 |
16 | let scaleX = size.width / extent.width
17 | let scaleY = size.height / extent.height
18 | let scale = max(scaleX, scaleY)
19 |
20 | // return transformed(by: CGAffineTransform(translationX: -originX, y: -originY))
21 | // .transformed(by: CGAffineTransform(scaleX: scale, y: scale))
22 | return transformed(by: CGAffineTransform(scaleX: scale, y: scale))
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Base/Image.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Image.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2019/11/13.
6 | // Copyright © 2019 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 | import AVFoundation
11 |
12 | public enum BNImageType {
13 | case photo
14 | case videoFrame(timestamp: CMTime)
15 |
16 | var timestamp: CMTime? {
17 | get {
18 | switch self {
19 | case .photo: return nil
20 | case let .videoFrame(timestamp): return timestamp
21 | }
22 | }
23 | }
24 | }
25 |
26 | public struct BNImage {
27 | public let image: CIImage
28 | public let type: BNImageType
29 | public init(image: CIImage, type: BNImageType) {
30 | self.image = image
31 | self.type = type
32 | }
33 | }
34 |
35 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Base/MetalDevice.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MetalDevice.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2019/11/13.
6 | // Copyright © 2019 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import Metal
10 | import CoreGraphics
11 |
12 | public class MetalDevice {
13 |
14 | public static var shared = MetalDevice()
15 |
16 | public var device: MTLDevice!
17 | var library: MTLLibrary!
18 | public var colorSpace: CGColorSpace!
19 |
20 | var commandQueue: MTLCommandQueue!
21 |
22 | private var renderPipelineState: MTLRenderPipelineState?
23 |
24 |
25 | init() {
26 | guard let device = MTLCreateSystemDefaultDevice(), let commandQueue = device.makeCommandQueue() else {
27 | return
28 | }
29 | self.device = device
30 | self.library = device.makeDefaultLibrary()
31 | self.commandQueue = commandQueue
32 | self.colorSpace = CGColorSpaceCreateDeviceRGB()
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 hxperl
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:5.1
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "BrazilNut",
8 | platforms: [
9 | .iOS(.v13)
10 | ],
11 | products: [
12 | // Products define the executables and libraries produced by a package, and make them visible to other packages.
13 | .library(
14 | name: "BrazilNut",
15 | targets: ["BrazilNut"]),
16 | ],
17 | dependencies: [
18 | // Dependencies declare other packages that this package depends on.
19 | // .package(url: /* package url */, from: "1.0.0"),
20 | ],
21 | targets: [
22 | // Targets are the basic building blocks of a package. A target can define a module or a test suite.
23 | // Targets can depend on other targets in this package, and on products in packages which this package depends on.
24 | .target(
25 | name: "BrazilNut",
26 | dependencies: []),
27 | .testTarget(
28 | name: "BrazilNutTests",
29 | dependencies: ["BrazilNut"]),
30 | ]
31 | )
32 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Filters/FilterB.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FilterB.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/01/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 | import CoreImage
9 |
10 | public class FilterB: ImageRelay {
11 |
12 | override public func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
13 | let inputImage = bnImage.image
14 | let inputType = bnImage.type
15 |
16 | let colorControls = CIFilter(name: "CIColorControls")
17 | colorControls?.setValue(inputImage, forKey: kCIInputImageKey)
18 | colorControls?.setDefaults()
19 |
20 | let vibrance = CIFilter(name: "CIVibrance")
21 | vibrance?.setValue(colorControls?.outputImage, forKey: kCIInputImageKey)
22 | vibrance?.setDefaults()
23 |
24 | let hueAdjust = CIFilter(name: "CIHueAdjust")
25 | hueAdjust?.setValue(vibrance?.outputImage, forKey: kCIInputImageKey)
26 | hueAdjust?.setValue(CGFloat(0.1619718372821808), forKey: "inputAngle")
27 |
28 | if let outputImage = hueAdjust?.outputImage {
29 | let newImage = BNImage(image: outputImage, type: inputType)
30 | for consumer in consumers {
31 | consumer.newImageAvailable(newImage, from: self)
32 | }
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Base/Pipeline.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Pipeline.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2019/11/13.
6 | // Copyright © 2019 Geonseok Lee. All rights reserved.
7 | //
8 | import CoreImage
9 |
10 | infix operator --> : AdditionPrecedence
11 |
12 | @discardableResult public func -->(source: ImageSource, destination:T) -> T {
13 | return source.add(consumer: destination)
14 | }
15 |
16 | public protocol ImageSource: AnyObject {
17 | /// Output 텍스처를 출력하기 위한 image consumer 추가
18 | ///
19 | /// - Parameter consumer: image consumer object to add
20 | /// - Returns: image consumer object
21 | func add(consumer: T) -> T
22 |
23 | /// Adds an image consumer at the specific index
24 | ///
25 | /// - Parameters:
26 | /// - consumer: image consumer object to add
27 | /// - index: index for the image consumer object
28 | func add(consumer: ImageConsumer, at index: Int)
29 |
30 | /// image consumer 제거
31 | ///
32 | /// - Parameters consumer: image consumer object to remove
33 | func remove(consumer: ImageConsumer)
34 |
35 | /// Removs all image consumers
36 | func removeAllConsumers()
37 | }
38 |
39 | public protocol ImageConsumer: AnyObject {
40 | /// 텍스처를 제공 받기 위한 image source 추가
41 | ///
42 | /// - Parameter source: image source object to add
43 | func add(source: ImageSource)
44 |
45 | /// image source 제거
46 | ///
47 | /// - Parameter source: image source object to remove
48 | func remove(source: ImageSource)
49 |
50 | /// image source로 부터 새로운 텍스처를 받음
51 | ///
52 | /// - Parameters:
53 | /// - texture: 새로운 텍스처
54 | /// - source: 새로운 텍스처를 전달한 image source object
55 | func newImageAvailable(_ bnImage: BNImage, from source: ImageSource)
56 | }
57 |
58 | public struct WeakImageSource {
59 | public weak var source: ImageSource?
60 | public var ciImage: CIImage?
61 |
62 | public init(source: ImageSource) { self.source = source }
63 | }
64 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Transitions/TransitionOperator.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TransitionOperator.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/12.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 | import QuartzCore.CoreAnimation
11 |
12 | public class TransitionOperator: ImageRelay {
13 |
14 | private var filter: CIFilter?
15 | private var displayLink: CADisplayLink? = nil
16 | private var progress: Float = 0.0
17 | private var started = false
18 |
19 | public init(filter: CIFilter?) {
20 | self.filter = filter
21 | super.init()
22 | }
23 |
24 | private func startDisplayLink() {
25 | displayLink = CADisplayLink(target: self, selector: #selector(updateDisplayLink))
26 | displayLink?.preferredFramesPerSecond = 20
27 | displayLink?.add(to: .main, forMode: .common)
28 | }
29 |
30 | private func stopDisplayLink() {
31 | displayLink?.invalidate()
32 | displayLink = nil
33 | progress = 0
34 | started = false
35 | if let source = sources[safe: 0]?.source {
36 | source.remove(consumer: self)
37 | remove(source: source)
38 | }
39 | }
40 |
41 | @objc private func updateDisplayLink() {
42 | progress += 0.05
43 | if progress >= 1.0 {
44 | stopDisplayLink()
45 | }
46 | }
47 |
48 |
49 | public override func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
50 | super.newImageAvailable(bnImage, from: source)
51 | let type = bnImage.type
52 | guard _sources[safe: 0]?.ciImage != nil && _sources[safe: 1]?.ciImage != nil else {
53 | for consumer in consumers { consumer.newImageAvailable(bnImage, from: self) }
54 | return
55 | }
56 |
57 | if !started {
58 | startDisplayLink()
59 | started.toggle()
60 | }
61 |
62 | filter?.setValue(_sources[safe: 0]?.ciImage, forKey: kCIInputImageKey)
63 | filter?.setValue(_sources[safe: 1]?.ciImage, forKey: kCIInputTargetImageKey)
64 | filter?.setValue(progress, forKey: kCIInputTimeKey)
65 | if let output = filter?.outputImage {
66 | let newImage = BNImage(image: output, type: type)
67 | for consumer in consumers { consumer.newImageAvailable(newImage, from: self) }
68 | }
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Outputs/RenderView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RenderView.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2019/11/13.
6 | // Copyright © 2019 Geonseok Lee. All rights reserved.
7 | //
8 | import MetalKit
9 | import CoreImage
10 |
11 | public class RenderView: MTKView, ImageConsumer {
12 | public func add(source: ImageSource) {
13 | //
14 | }
15 |
16 | public func remove(source: ImageSource) {
17 | //
18 | }
19 |
20 |
21 | private var lock: DispatchSemaphore!
22 |
23 | fileprivate var ciimage: CIImage?
24 |
25 | /// Render only video frame type image
26 | ///
27 | /// - Parameter image: image object
28 | public func newImageAvailable(_ image: BNImage, from source: ImageSource) {
29 | // Render if image type is video frame
30 | guard case .videoFrame = image.type else { return }
31 | self.ciimage = image.image
32 | }
33 |
34 | override init(frame frameRect: CGRect, device: MTLDevice?) {
35 | super.init(frame: frameRect, device: device)
36 | commonInit()
37 | }
38 |
39 | required init(coder: NSCoder) {
40 | super.init(coder: coder)
41 | commonInit()
42 |
43 | }
44 |
45 | private func commonInit() {
46 | framebufferOnly = false
47 | autoResizeDrawable = true
48 | self.device = MetalDevice.shared.device
49 | self.lock = DispatchSemaphore(value: 1)
50 | }
51 |
52 | private lazy var ciContext: CIContext = { [unowned self] in
53 | return CIContext(mtlDevice: self.device!)
54 | }()
55 |
56 | public override func draw(_ rect: CGRect) {
57 | _ = lock.wait(wallTimeout: .distantFuture)
58 | guard let currentDrawable = self.currentDrawable,
59 | let image = self.ciimage else {
60 | lock.signal()
61 | return }
62 | let resized = image.transformToOrigin(withSize: drawableSize)
63 | let commandBuffer = MetalDevice.shared.commandQueue.makeCommandBuffer()
64 | let destination = CIRenderDestination(width: Int(drawableSize.width), height: Int(drawableSize.height), pixelFormat: colorPixelFormat, commandBuffer: commandBuffer, mtlTextureProvider: {
65 | () -> MTLTexture in
66 | currentDrawable.texture
67 | })
68 | do {
69 | try ciContext.startTask(toRender: resized, to: destination)
70 | } catch {
71 | print(error)
72 | }
73 | commandBuffer?.present(currentDrawable)
74 | commandBuffer?.commit()
75 | lock.signal()
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # BrazilNut
2 |
3 | Swift library for image/video processing based on Core Image.
4 |
5 | This library is highly inspired by GPUImage and BBMetalImage, The difference is that it is based on CoreImage.
6 | So using this library makes it very easy to apply Apple CIFilters.
7 |
8 | # Requirements
9 |
10 | - iOS 13.0+
11 | - Swift 5
12 |
13 | # Installation
14 |
15 | Swift Package Manager
16 |
17 | ```
18 | https://github.com/hxperl/BrazilNut.git
19 | ```
20 |
21 | # Basic Concepts
22 |
23 | 1. Source -> Consumer
24 |
25 | 
26 |
27 | 2. Source -> Filter -> Consumer
28 |
29 | 
30 |
31 |
32 | # How to Create Filter & Apply
33 |
34 | ### 1. Open Camera
35 |
36 | ```swift
37 | var camera: Camera!
38 | var renderView: RenderView!
39 |
40 | override func viewDidLoad() {
41 | ...
42 | camera = try! Camera(sessionPreset: .hd1920x1080)
43 | camera.add(consumer: renderView)
44 | }
45 |
46 | override func viewWillAppear(_ animated: Bool) {
47 | ...
48 | camera.startCapture()
49 | }
50 | ```
51 |
52 | #### CURRENT Chain State
53 | camera --> renderView
54 |
55 | ### 2. Create a Filter Class
56 |
57 | Create a custom filter by inheriting the `ImageRelay` class,
58 | Just override the `newImageAvailable` method.
59 | (This example shows how to use the CIFilters.)
60 |
61 | ```swift
62 | class MyFilter: ImageRelay {
63 | override func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
64 | let inputImage = bnImage.image
65 | let inputType = bnImage.type
66 |
67 | let comic = CIFilter(name: "CIComicEffect")
68 | comic?.setValue(inputImage, forKey: kCIInputImageKey)
69 |
70 | if let outputImage = comic?.outputImage {
71 | /// Create a new BNImage
72 | let newImage = BNImage(image: outputImage, type: inputType)
73 | /// Deliver images to the next consumers
74 | for consumer in consumers {
75 | consumer.newImageAvailable(newImage, from: self)
76 | }
77 | }
78 | }
79 | }
80 | ```
81 |
82 | ### 3. Add chain to Camera
83 |
84 | Using the `add(chain :)`, the chain state is applied as follows.
85 |
86 | ```swift
87 | var myFilter = MyFilter()
88 | camera.add(chain: myFilter)
89 | ```
90 |
91 | #### CURRENT Chain State
92 | camera --> myFilter --> renderView
93 |
94 | ### 4. Remove chain from Camera
95 |
96 | Only the chain itself is removed and the rest of the chain is connected.
97 | ```swift
98 | myFilter.removeSelf()
99 | ```
100 |
101 | #### CURRENT Chain State
102 | camera --> renderView
--------------------------------------------------------------------------------
/Sources/BrazilNut/Filters/FilterC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FilterC.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/01/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class FilterC: ImageRelay {
12 |
13 | override public func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
14 | let inputImage = bnImage.image
15 | let inputType = bnImage.type
16 |
17 | let vibrance = CIFilter(name: "CIVibrance")
18 | vibrance?.setValue(inputImage, forKey: kCIInputImageKey)
19 | vibrance?.setDefaults()
20 |
21 | let hueAdjust = CIFilter(name: "CIHueAdjust")
22 | hueAdjust?.setValue(vibrance?.outputImage, forKey: kCIInputImageKey)
23 | hueAdjust?.setValue(CGFloat(0.1619718372821808), forKey: "inputAngle")
24 |
25 | let toneCurve = CIFilter(name: "CIToneCurve")
26 | toneCurve?.setValue(hueAdjust?.outputImage, forKey: kCIInputImageKey)
27 | toneCurve?.setValue(CIVector(x: 0.0241312, y: 0.124758), forKey: "inputPoint0")
28 | toneCurve?.setValue(CIVector(x: 0.190154, y: 0.205996), forKey: "inputPoint1")
29 | toneCurve?.setValue(CIVector(x: 0.427606, y: 0.383946), forKey: "inputPoint2")
30 | toneCurve?.setValue(CIVector(x: 0.686293, y: 0.75), forKey: "inputPoint3")
31 | toneCurve?.setValue(CIVector(x: 0.90444, y: 0.913926), forKey: "inputPoint4")
32 |
33 | let colorControls = CIFilter(name: "CIColorControls")
34 | colorControls?.setValue(toneCurve?.outputImage, forKey: kCIInputImageKey)
35 | colorControls?.setValue(CGFloat(0.7026603817939758), forKey: "inputSaturation")
36 | colorControls?.setValue(CGFloat(1.080398917198181), forKey: "inputContrast")
37 | colorControls?.setValue(CGFloat(0.04381836950778961), forKey: "inputBrightness")
38 |
39 | let colorPolynomial = CIFilter(name: "CIColorPolynomial")
40 | colorPolynomial?.setValue(colorControls?.outputImage, forKey: kCIInputImageKey)
41 | colorPolynomial?.setValue(CIVector(x: 0, y: 0.864929, z: 0.240476, w: 0.0592416), forKey: "inputAlphaCoefficients")
42 | colorPolynomial?.setValue(CIVector(x: 0, y: 0.471564, z: 0.0547619, w: 0), forKey: "inputGreenCoefficients")
43 | colorPolynomial?.setValue(CIVector(x: 0, y: 0.419431, z: 0.164286, w: 0), forKey: "inputRedCoefficients")
44 | colorPolynomial?.setValue(CIVector(x: 0, y: 0.447867, z: 0.321428, w: 0), forKey: "inputBlueCoefficients")
45 |
46 | if let outputImage = colorPolynomial?.outputImage {
47 | let newImage = BNImage(image: outputImage, type: inputType)
48 | for consumer in consumers {
49 | consumer.newImageAvailable(newImage, from: self)
50 | }
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Filters/FilterA.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Filter1.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/01/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class FilterA: ImageRelay {
12 | override public func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
13 | let inputImage = bnImage.image
14 | let inputType = bnImage.type
15 |
16 | let toneCurve = CIFilter(name: "CIToneCurve")
17 | toneCurve?.setValue(inputImage, forKey: kCIInputImageKey)
18 | toneCurve?.setValue(CIVector(x: 0.0395753, y: 0.124758), forKey: "inputPoint0")
19 | toneCurve?.setValue(CIVector(x: 0.17471, y: 0.205996), forKey: "inputPoint1")
20 | toneCurve?.setValue(CIVector(x: 0.340734, y: 0.443907), forKey: "inputPoint2")
21 | toneCurve?.setValue(CIVector(x: 0.593629, y: 0.75), forKey: "inputPoint3")
22 | toneCurve?.setValue(CIVector(x: 0.90444, y: 0.913926), forKey: "inputPoint4")
23 |
24 | let colorClamp = CIFilter(name: "CIColorControls")
25 | colorClamp?.setValue(toneCurve?.outputImage, forKey: kCIInputImageKey)
26 | colorClamp?.setValue(CGFloat(0.9937402009963989), forKey: "inputSaturation")
27 | colorClamp?.setValue(CGFloat(0.03755868598818779), forKey: "inputBrightness")
28 | colorClamp?.setValue(CGFloat(1.098004579544067), forKey: "inputContrast")
29 |
30 | let colorPolynomial = CIFilter(name: "CIColorPolynomial")
31 | colorPolynomial?.setValue(colorClamp?.outputImage, forKey: kCIInputImageKey)
32 | colorPolynomial?.setValue(CIVector(x: 0, y: 0.727488, z: 0.25, w: 0.158768), forKey: "inputRedCoefficients")
33 | colorPolynomial?.setValue(CIVector(x: 0, y: 0.722749, z: 0.116667, w: 0.154028), forKey: "inputGreenCoefficients")
34 | colorPolynomial?.setValue(CIVector(x: 0, y: 0.874408, z: 0.435714, w: 0), forKey: "inputBlueCoefficients")
35 | colorPolynomial?.setValue(CIVector(x: 0, y: 0.912322, z: 0.0357141, w: 0), forKey: "inputAlphaCoefficients")
36 |
37 | let highlightShadowAdjust = CIFilter(name: "CIHighlightShadowAdjust")
38 | highlightShadowAdjust?.setValue(colorPolynomial?.outputImage, forKey: kCIInputImageKey)
39 | highlightShadowAdjust?.setValue(CGFloat(0.2723004817962646), forKey: "inputShadowAmount")
40 | highlightShadowAdjust?.setValue(CGFloat(0.8302034139633179), forKey: "inputHighlightAmount")
41 | highlightShadowAdjust?.setValue(CGFloat(0.5868544578552246), forKey: "inputRadius")
42 |
43 | let temperatureAndTint = CIFilter(name: "CITemperatureAndTint")
44 | temperatureAndTint?.setValue(highlightShadowAdjust?.outputImage, forKey: kCIInputImageKey)
45 | temperatureAndTint?.setValue(CIVector(x: 4652.18, y: 8.99814), forKey: "inputTargetNeutral")
46 | temperatureAndTint?.setValue(CIVector(x: 5775.57, y: 0), forKey: "inputNeutral")
47 |
48 | if let outputImage = temperatureAndTint?.outputImage {
49 | let newImage = BNImage(image: outputImage, type: inputType)
50 | for consumer in consumers {
51 | consumer.newImageAvailable(newImage, from: self)
52 | }
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Filters/FilterD.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FilterD.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/01/07.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import CoreImage
10 |
11 | public class FilterD: ImageRelay {
12 |
13 | override public func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
14 | let inputImage = bnImage.image
15 | let inputType = bnImage.type
16 |
17 | let highlightShadowAdjust = CIFilter(name: "CIHighlightShadowAdjust")
18 | highlightShadowAdjust?.setValue(inputImage, forKey: kCIInputImageKey)
19 | highlightShadowAdjust?.setValue(CGFloat(0.5187793374061584), forKey: "inputHighlightAmount")
20 | highlightShadowAdjust?.setValue(CGFloat(3.638497591018677), forKey: "inputRadius")
21 | highlightShadowAdjust?.setValue(CGFloat(0.0594678707420826), forKey: "inputShadowAmount")
22 |
23 | let colorControls = CIFilter(name: "CIColorControls")
24 | colorControls?.setValue(highlightShadowAdjust?.outputImage, forKey: kCIInputImageKey)
25 | colorControls?.setValue(CGFloat(0.9154929518699646), forKey: "inputSaturation")
26 | colorControls?.setValue(CGFloat(0.9982394576072693), forKey: "inputContrast")
27 | colorControls?.setValue(CGFloat(-0.02816901355981828), forKey: "inputBrightness")
28 |
29 | let toneCurve = CIFilter(name: "CIToneCurve")
30 | toneCurve?.setValue(colorControls?.outputImage, forKey: kCIInputImageKey)
31 | toneCurve?.setValue(CIVector(x: 0.0723938, y: 0.0802708), forKey: "inputPoint0")
32 | toneCurve?.setValue(CIVector(x: 0.203668, y: 0.25), forKey: "inputPoint1")
33 | toneCurve?.setValue(CIVector(x: 0.464286, y: 0.509671), forKey: "inputPoint2")
34 | toneCurve?.setValue(CIVector(x: 0.722973, y: 0.766925), forKey: "inputPoint3")
35 | toneCurve?.setValue(CIVector(x: 0.929537, y: 1), forKey: "inputPoint4")
36 |
37 | let temperatureAndTint = CIFilter(name: "CITemperatureAndTint")
38 | temperatureAndTint?.setValue(toneCurve?.outputImage, forKey: kCIInputImageKey)
39 | temperatureAndTint?.setValue(CIVector(x: 6311.775390625, y: 0), forKey: "inputNeutral")
40 | temperatureAndTint?.setValue(CIVector(x: 5759.65234375, y: 0), forKey: "inputTargetNeutral")
41 |
42 |
43 | let colorMatrix = CIFilter(name: "CIColorMatrix")
44 | colorMatrix?.setValue(temperatureAndTint?.outputImage, forKey: kCIInputImageKey)
45 | colorMatrix?.setValue(CIVector(x: 0, y: 0, z: 0, w: 0), forKey: "inputBiasVector")
46 | colorMatrix?.setValue(CIVector(x: 0.0876776, y: 0.149289, z: 1, w: 0), forKey: "inputBVector")
47 | colorMatrix?.setValue(CIVector(x: 0.869668, y: 0.187204, z: 0, w: 0), forKey: "inputRVector")
48 | colorMatrix?.setValue(CIVector(x: 0, y: 0.902844, z: 0.159524, w: 0), forKey: "inputGVector")
49 | colorMatrix?.setValue(CIVector(x: 0, y: 0, z: 0, w: 0.93128), forKey: "inputAVector")
50 |
51 | if let outputImage = colorMatrix?.outputImage {
52 | let newImage = BNImage(image: outputImage, type: inputType)
53 | for consumer in consumers {
54 | consumer.newImageAvailable(newImage, from: self)
55 | }
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Filters/BaseCollageFilter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BaseCollageFilter.swift
3 | // BrazilNut
4 | //
5 | // Created by 김지수 on 2020/04/10.
6 | //
7 |
8 | import CoreImage
9 |
10 | /// normalized value
11 | public typealias Rect = (x: CGFloat, y: CGFloat, w: CGFloat, h: CGFloat)
12 |
13 | open class BaseCollageFilter: ImageRelay{
14 | public let rects: [Rect]
15 |
16 | public init(rects: [Rect]) {
17 | self.rects = rects
18 | super.init()
19 | }
20 |
21 | override public func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
22 | super.newImageAvailable(bnImage, from: source)
23 |
24 | let inputImage = bnImage.image
25 | let inputType = bnImage.type
26 | let originSize = inputImage.extent.size
27 |
28 | if _sources[0].source !== source { return }
29 |
30 | let outputRect = CGRect(x: 0, y: 0, width: originSize.width, height: originSize.height)
31 | let backgroundImage = CIFilter(name: "CIConstantColorGenerator", parameters: [kCIInputColorKey: CIColor(red:0, green: 0, blue: 0)])!.outputImage!.cropped(to: outputRect)
32 | var compositeImage = CIImage()
33 |
34 | for i in 0.. CIImage {
67 | var transformedImage: CIImage
68 | if rect.w == rect.h{
69 | let scaledImg = ciImage.transformed(by: CGAffineTransform(scaleX: rect.w, y: rect.h))
70 | transformedImage = scaledImg
71 | }else{
72 | let newRect = CGRect(x: ciImage.extent.minX,
73 | y: ciImage.extent.minY,
74 | width: ciImage.extent.width * rect.w,
75 | height: ciImage.extent.height * rect.h)
76 | let croppedImg = ciImage.cropped(to: newRect)
77 | transformedImage = croppedImg
78 | }
79 |
80 | return transformedImage
81 | }
82 |
83 | }
84 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Base/ImageRelay.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageRelay.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/15.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 | import Foundation
9 |
10 | open class ImageRelay: ImageSource, ImageConsumer {
11 |
12 | let lock = DispatchSemaphore(value: 1)
13 | /// Image consumers
14 | public var consumers: [ImageConsumer] {
15 | lock.wait()
16 | let c = _consumers
17 | lock.signal()
18 | return c
19 | }
20 | private var _consumers: [ImageConsumer]
21 |
22 | /// Image sources
23 | public var sources: [WeakImageSource] {
24 | lock.wait()
25 | let s = _sources
26 | lock.signal()
27 | return s
28 | }
29 | private(set) var _sources: [WeakImageSource]
30 |
31 | public init() {
32 | _consumers = []
33 | _sources = []
34 | }
35 |
36 | open func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
37 | lock.wait()
38 | for idx in 0..<_sources.count {
39 | if _sources[safe: idx]?.source === source {
40 | _sources[idx].ciImage = bnImage.image
41 | }
42 | }
43 | lock.signal()
44 | }
45 |
46 | // MARK: - ImageSource
47 | @discardableResult
48 | public func add(consumer: T) -> T {
49 | remove(consumer: consumer)
50 | lock.wait()
51 | _consumers.append(consumer)
52 | lock.signal()
53 | consumer.add(source: self)
54 | return consumer
55 | }
56 |
57 | public func add(consumer: ImageConsumer, at index: Int) {
58 | remove(consumer: consumer)
59 | lock.wait()
60 | _consumers.insert(consumer, at: index)
61 | lock.signal()
62 | consumer.add(source: self)
63 | }
64 |
65 | public func add(chain: ImageRelay) {
66 | for (idx, consumer) in consumers.enumerated() {
67 | chain.add(consumer: consumer, at: idx)
68 | }
69 | removeAllConsumers()
70 | add(consumer: chain, at: 0)
71 | }
72 |
73 | public func removeSelf() {
74 | for wSource in _sources {
75 | wSource.source?.remove(consumer: self)
76 | for (idx, consumer) in consumers.enumerated() {
77 | wSource.source?.add(consumer: consumer, at: idx)
78 | }
79 | }
80 | removeAllConsumers()
81 | self._sources.removeAll()
82 | }
83 |
84 | public func remove(consumer: ImageConsumer) {
85 | lock.wait()
86 | if let index = _consumers.firstIndex(where: { $0 === consumer }) {
87 | _consumers.remove(at: index)
88 | lock.signal()
89 | consumer.remove(source: self)
90 | } else {
91 | lock.signal()
92 | }
93 | }
94 |
95 | public func removeAllConsumers() {
96 | lock.wait()
97 | let consumers = _consumers
98 | _consumers.removeAll()
99 | lock.signal()
100 | for consumer in consumers {
101 | consumer.remove(source: self)
102 | }
103 | }
104 |
105 | // MARK: - ImageConsumer
106 |
107 | public func add(source: ImageSource) {
108 | remove(source: source)
109 | lock.wait()
110 | _sources.append(WeakImageSource(source: source))
111 | lock.signal()
112 | }
113 |
114 | public func remove(source: ImageSource) {
115 | lock.wait()
116 | if let index = _sources.firstIndex(where: { $0.source === source }) {
117 | _sources.remove(at: index)
118 | }
119 | lock.signal()
120 | }
121 |
122 | }
123 |
124 | public extension Collection {
125 | subscript (safe index: Index) -> Element? {
126 | return indices.contains(index) ? self[index] : nil
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Outputs/VideoWriter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MovieOutput.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2019/11/13.
6 | // Copyright © 2019 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import Photos
11 |
12 | /* add Audio */
13 | public protocol AudioEncodingTarget {
14 | func activateAudioTrack()
15 | func processAudioBuffer(_ sampleBuffer:CMSampleBuffer)
16 | }
17 |
18 | public protocol VideoWriterDelegate {
19 | func currentDuration(duration: CMTime)
20 | }
21 |
22 | public class VideoWriter: ImageConsumer, AudioEncodingTarget {
23 |
24 | public func add(source: ImageSource) {
25 | //
26 | }
27 |
28 | public func remove(source: ImageSource) {
29 | //
30 | }
31 |
32 | private lazy var ciContext: CIContext = { [unowned self] in
33 | return CIContext(mtlDevice: MetalDevice.shared.device)
34 | }()
35 |
36 | public var delegate: VideoWriterDelegate?
37 |
38 | let assetWriter:AVAssetWriter
39 | let assetWriterVideoInput:AVAssetWriterInput
40 | var assetWriterAudioInput:AVAssetWriterInput?
41 |
42 | let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor
43 | let size:CGSize
44 | private var isRecording = false
45 | private var videoEncodingIsFinished = false
46 | private var audioEncodingIsFinished = false
47 | private var startTime:CMTime?
48 | private var previousFrameTime = CMTime.negativeInfinity
49 | private var previousAudioTime = CMTime.negativeInfinity
50 | private var encodingLiveVideo:Bool
51 | var pixelBuffer:CVPixelBuffer? = nil
52 |
53 | var transform:CGAffineTransform {
54 | get {
55 | return assetWriterVideoInput.transform
56 | }
57 | set {
58 | assetWriterVideoInput.transform = newValue
59 | }
60 | }
61 |
62 | public var frameTime:CMTime? // add Current recording time
63 |
64 | public init(URL:Foundation.URL, size:CGSize, fileType:AVFileType = AVFileType.mov) throws {
65 | self.size = size
66 | assetWriter = try AVAssetWriter(url:URL, fileType:fileType)
67 |
68 | assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaType.video, outputSettings:[
69 | AVVideoCodecKey : AVVideoCodecType.h264,
70 | AVVideoWidthKey : size.width,
71 | AVVideoHeightKey : size.height,
72 | AVVideoCompressionPropertiesKey : [
73 | AVVideoAverageBitRateKey : 4166400,
74 | ],
75 | ])
76 | assetWriterVideoInput.expectsMediaDataInRealTime = true
77 | encodingLiveVideo = true
78 |
79 | let sourcePixelBufferSetting = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA, kCVPixelBufferWidthKey: size.width, kCVPixelBufferHeightKey: size.height ] as [String: Any]
80 |
81 | assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes: sourcePixelBufferSetting)
82 | assetWriter.add(assetWriterVideoInput)
83 |
84 | }
85 |
86 | public func startRecording(transform:CGAffineTransform? = nil) {
87 | if let transform = transform {
88 | assetWriterVideoInput.transform = transform
89 | }
90 | startTime = nil
91 | self.isRecording = self.assetWriter.startWriting()
92 | }
93 |
94 | public func finishRecording(_ completionCallback:((AVAsset?, CMTime?) -> ())? = nil) {
95 | self.isRecording = false
96 | if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) {
97 | DispatchQueue.global().async{
98 | completionCallback?(nil, nil)
99 | }
100 | return
101 | }
102 | if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) {
103 | self.videoEncodingIsFinished = true
104 | self.assetWriterVideoInput.markAsFinished()
105 | }
106 | if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) {
107 | self.audioEncodingIsFinished = true
108 | self.assetWriterAudioInput?.markAsFinished()
109 | }
110 |
111 | // Why can't I use ?? here for the callback?
112 | if let callback = completionCallback {
113 | self.assetWriter.finishWriting {
114 | let url = self.assetWriter.outputURL
115 | let asset = AVAsset(url: url)
116 | callback(asset, self.startTime)
117 | }
118 | } else {
119 | self.assetWriter.finishWriting{}
120 | }
121 | }
122 |
123 | public func newImageAvailable(_ bnImage: BNImage, from source: ImageSource) {
124 | guard isRecording else { return }
125 | guard let frameTime = bnImage.type.timestamp,
126 | (frameTime != previousFrameTime) else { return }
127 |
128 | let resized = bnImage.image.transformToOrigin(withSize: self.size)
129 |
130 |
131 | if (startTime == nil) {
132 | if (assetWriter.status != .writing) {
133 | assetWriter.startWriting()
134 | }
135 | assetWriter.startSession(atSourceTime: frameTime)
136 | startTime = frameTime
137 | }
138 | self.frameTime = frameTime
139 | // TODO: Run the following on an internal movie recording dispatch queue, context
140 | guard (assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo)) else {
141 | debugPrint("Had to drop a frame at time \(frameTime)")
142 | return
143 | }
144 |
145 | let duration = CMTimeSubtract(frameTime, self.startTime!)
146 | delegate?.currentDuration(duration: duration)
147 |
148 | let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
149 | var pixelBuffer : CVPixelBuffer?
150 | let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(resized.extent.width), Int(resized.extent.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
151 | guard status == kCVReturnSuccess, let unwrappedPixelBuffer = pixelBuffer else {
152 | return
153 | }
154 |
155 | ciContext.render(resized, to: unwrappedPixelBuffer)
156 |
157 | CVPixelBufferLockBaseAddress(unwrappedPixelBuffer, [])
158 |
159 | if (!assetWriterPixelBufferInput.append(unwrappedPixelBuffer, withPresentationTime:frameTime)) {
160 | print("Problem appending pixel buffer at time: \(frameTime)")
161 | }
162 |
163 | CVPixelBufferUnlockBaseAddress(unwrappedPixelBuffer, [])
164 | }
165 |
166 |
167 |
168 | /* add audio */
169 | // MARK: -
170 | // MARK: Audio support
171 |
172 | public func activateAudioTrack() {
173 | assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaType.audio, outputSettings:[
174 | AVFormatIDKey : kAudioFormatMPEG4AAC,
175 | AVNumberOfChannelsKey : 2,
176 | AVSampleRateKey : 44100.0,
177 | AVEncoderBitRateKey: 192000])
178 | assetWriter.add(assetWriterAudioInput!)
179 | assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo
180 | }
181 |
182 | public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) {
183 | guard let assetWriterAudioInput = assetWriterAudioInput, (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else {
184 | return
185 | }
186 |
187 | if (!assetWriterAudioInput.append(sampleBuffer)) {
188 | print("Trouble appending audio sample buffer")
189 | }
190 | }
191 |
192 | }
193 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Inputs/VideoSource.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VideoSource.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2020/02/03.
6 | // Copyright © 2020 Geonseok Lee. All rights reserved.
7 | //
8 | import AVFoundation
9 | import CoreImage
10 | import Combine
11 |
12 | public typealias VideoSourceProgress = (CMTime) -> Void
13 | public typealias VideoSourceCompletion = (Bool) -> Void
14 |
15 | /// Video source reading video frame and providing Core Image
16 | public class VideoSource: ImageSource {
17 |
18 | /// Image consumers
19 | public var consumers: [ImageConsumer] {
20 | lock.wait()
21 | let c = _consumers
22 | lock.signal()
23 | return c
24 | }
25 | private var _consumers: [ImageConsumer]
26 | private let url: URL
27 | private let lock: DispatchSemaphore
28 | private var asset: AVAsset!
29 | private var assetReader: AVAssetReader!
30 | private var videoOutput: AVAssetReaderTrackOutput!
31 | private var audioOutput: AVAssetReaderTrackOutput!
32 | private var lastAudioBuffer: CMSampleBuffer?
33 |
34 | let queue = DispatchQueue(label: "VideoProcessingQueue")
35 | var subscription: Cancellable?
36 | /// Audio consumer processing audio sample buffer.
37 | /// Set this property to nil (default value) if not processing audio.
38 | /// Set this property to a given audio consumer if processing audio.
39 | public var audioEncodingTarget: AudioEncodingTarget? {
40 | didSet {
41 | audioEncodingTarget?.activateAudioTrack()
42 | }
43 | }
44 |
45 | /// Whether to process video with the actual rate. False by default, meaning the
46 | /// processing speed is faster than the actual video rate.
47 | public var playWithVideoRate: Bool {
48 | get {
49 | lock.wait()
50 | let playRate = _playWithVideoRate
51 | lock.signal()
52 | return playRate
53 | }
54 | set {
55 | lock.wait()
56 | _playWithVideoRate = newValue
57 | lock.signal()
58 | }
59 | }
60 | private var _playWithVideoRate: Bool
61 |
62 | private var lastSampleFrameTime: CMTime!
63 | private var lastActualPlayTime: Double!
64 |
65 | public init(url: URL) {
66 | self.url = url
67 | lock = DispatchSemaphore(value: 1)
68 | _consumers = []
69 | _playWithVideoRate = false
70 | }
71 |
72 | /// Starts reading and processing video frame
73 | ///
74 | /// - Parameter completion: a closure to call after processing;
75 | /// The parameter of closure is true if succeed processing all video frames,
76 | /// or false if fail to processing all the video frames (due to user cancel or error)
77 | public func start(progress: VideoSourceProgress? = nil, completion: VideoSourceCompletion? = nil) {
78 | lock.wait()
79 | let isReading = (assetReader != nil)
80 | lock.signal()
81 | if isReading {
82 | print("Should not call \(#function) while asset reader is reading")
83 | return
84 | }
85 | let asset = AVAsset(url: url)
86 | asset.loadValuesAsynchronously(forKeys: ["tracks"]) { [weak self] in
87 | guard let self = self else { return }
88 | if asset.statusOfValue(forKey: "tracks", error: nil) == .loaded,
89 | asset.tracks(withMediaType: .video).first != nil {
90 | DispatchQueue.global().async { [weak self] in
91 | guard let self = self else { return }
92 | self.lock.wait()
93 | self.asset = asset
94 | if self.prepareAssetReader() {
95 | self.lock.signal()
96 | self.processAsset(progress: progress, completion: completion)
97 | } else {
98 | self.reset()
99 | self.lock.signal()
100 | }
101 | }
102 | } else {
103 | self.safeReset()
104 | }
105 | }
106 | }
107 |
108 | /// Cancels reading and processing video frame
109 | public func cancel() {
110 | lock.wait()
111 | if let reader = assetReader,
112 | reader.status == .reading {
113 | reader.cancelReading()
114 | reset()
115 | }
116 | lock.signal()
117 | }
118 |
119 | private func safeReset() {
120 | lock.wait()
121 | reset()
122 | lock.signal()
123 | }
124 |
125 | private func reset() {
126 | asset = nil
127 | assetReader = nil
128 | videoOutput = nil
129 | audioOutput = nil
130 | lastAudioBuffer = nil
131 | }
132 |
133 | private func prepareAssetReader() -> Bool {
134 | guard let reader = try? AVAssetReader(asset: asset),
135 | let videoTrack = asset.tracks(withMediaType: .video).first else { return false }
136 | assetReader = reader
137 | videoOutput = AVAssetReaderTrackOutput(track: videoTrack,
138 | outputSettings: [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA])
139 | videoOutput.alwaysCopiesSampleData = false
140 | if !assetReader.canAdd(videoOutput) { return false }
141 | assetReader.add(videoOutput)
142 |
143 | if audioEncodingTarget != nil,
144 | let audioTrack = asset.tracks(withMediaType: .audio).first {
145 | audioOutput = AVAssetReaderTrackOutput(track: audioTrack,
146 | outputSettings: [AVFormatIDKey: kAudioFormatLinearPCM])
147 | audioOutput.alwaysCopiesSampleData = false
148 | if !assetReader.canAdd(audioOutput) { return false }
149 | assetReader.add(audioOutput)
150 | }
151 | return true
152 | }
153 |
154 | private func processAsset(progress: VideoSourceProgress?, completion: VideoSourceCompletion?) {
155 | lock.wait()
156 | guard let reader = assetReader,
157 | reader.status == .unknown,
158 | reader.startReading() else {
159 | reset()
160 | lock.signal()
161 | return
162 | }
163 | lock.signal()
164 | // Read and process video buffer
165 | let useVideoRate = _playWithVideoRate
166 | var sleepTime: Double = 0
167 |
168 | subscription = queue.schedule(after: queue.now, interval: .milliseconds(1)) { [weak self] in
169 | guard reader.status == .reading, let sampleBuffer = self?.videoOutput.copyNextSampleBuffer(),
170 | let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
171 | self?.subscription?.cancel()
172 | // Read and process the rest audio buffers
173 | if let consumer = self?.audioEncodingTarget,
174 | let audioBuffer = self?.lastAudioBuffer {
175 | consumer.processAudioBuffer(audioBuffer)
176 | }
177 | while let consumer = self?.audioEncodingTarget,
178 | reader.status == .reading,
179 | self?.audioOutput != nil,
180 | let audioBuffer = self?.audioOutput.copyNextSampleBuffer() {
181 | consumer.processAudioBuffer(audioBuffer)
182 | }
183 | var finish = false
184 | if self?.assetReader != nil {
185 | self?.reset()
186 | finish = true
187 | }
188 | completion?(finish)
189 | return
190 | }
191 | let sampleFrameTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
192 | if useVideoRate {
193 | if let lastFrameTime = self?.lastSampleFrameTime,
194 | let lastPlayTime = self?.lastActualPlayTime {
195 | let detalFrameTime = CMTimeGetSeconds(CMTimeSubtract(sampleFrameTime, lastFrameTime))
196 | let detalPlayTime = CACurrentMediaTime() - lastPlayTime
197 | if detalFrameTime > detalPlayTime {
198 | sleepTime = detalFrameTime - detalPlayTime
199 | usleep(UInt32(1000000 * sleepTime))
200 | } else {
201 | sleepTime = 0
202 | }
203 | }
204 | self?.lastSampleFrameTime = sampleFrameTime
205 | self?.lastActualPlayTime = CACurrentMediaTime()
206 | }
207 |
208 | // Read and process audio buffer
209 | // Let video buffer go faster than audio buffer
210 | // Make sure audio and video buffer have similar output presentation timestamp
211 | var currentAudioBuffer: CMSampleBuffer?
212 | if self?.audioEncodingTarget != nil {
213 | if let last = self?.lastAudioBuffer,
214 | CMTimeCompare(CMSampleBufferGetOutputPresentationTimeStamp(last), sampleFrameTime) <= 0 {
215 | // Process audio buffer
216 | currentAudioBuffer = last
217 | self?.lastAudioBuffer = nil
218 |
219 | } else if self?.lastAudioBuffer == nil,
220 | self?.audioOutput != nil,
221 | let audioBuffer = self?.audioOutput.copyNextSampleBuffer() {
222 | if CMTimeCompare(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer), sampleFrameTime) <= 0 {
223 | // Process audio buffer
224 | currentAudioBuffer = audioBuffer
225 | } else {
226 | // Audio buffer goes faster than video
227 | // Process audio buffer later
228 | self?.lastAudioBuffer = audioBuffer
229 | }
230 | }
231 | }
232 | let bnImage = BNImage(image: CIImage(cvPixelBuffer: imageBuffer), type: .videoFrame(timestamp: sampleFrameTime))
233 | for consumer in self!.consumers {
234 | consumer.newImageAvailable(bnImage, from: self!)
235 | }
236 | if let audioBuffer = currentAudioBuffer { self?.audioEncodingTarget?.processAudioBuffer(audioBuffer) }
237 | progress?(sampleFrameTime)
238 | }
239 | }
240 |
241 | @discardableResult
242 | public func add(consumer: T) -> T {
243 | remove(consumer: consumer)
244 | lock.wait()
245 | _consumers.append(consumer)
246 | lock.signal()
247 | consumer.add(source: self)
248 | return consumer
249 | }
250 |
251 | public func add(consumer: ImageConsumer, at index: Int) {
252 | remove(consumer: consumer)
253 | lock.wait()
254 | _consumers.insert(consumer, at: index)
255 | lock.signal()
256 | consumer.add(source: self)
257 | }
258 |
259 | public func add(chain: ImageRelay) {
260 | for (idx, consumer) in consumers.enumerated() {
261 | chain.add(consumer: consumer, at: idx)
262 | }
263 | removeAllConsumers()
264 | add(consumer: chain, at: 0)
265 | }
266 |
267 | public func remove(consumer: ImageConsumer) {
268 | lock.wait()
269 | if let index = _consumers.firstIndex(where: { $0 === consumer }) {
270 | _consumers.remove(at: index)
271 | lock.signal()
272 | consumer.remove(source: self)
273 | } else {
274 | lock.signal()
275 | }
276 | }
277 |
278 | public func removeAllConsumers() {
279 | lock.wait()
280 | let consumers = _consumers
281 | _consumers.removeAll()
282 | lock.signal()
283 | for consumer in consumers {
284 | consumer.remove(source: self)
285 | }
286 | }
287 | }
288 |
--------------------------------------------------------------------------------
/Sources/BrazilNut/Inputs/Camera.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Camera.swift
3 | // BrazilNut
4 | //
5 | // Created by Geonseok Lee on 2019/11/13.
6 | // Copyright © 2019 Geonseok Lee. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 | import CoreImage
11 |
12 | public class Camera: NSObject, ImageSource {
13 |
14 | private let lock = DispatchSemaphore(value: 1)
15 |
16 | /// Image consumers
17 | public var consumers: [ImageConsumer] {
18 | lock.wait()
19 | let c = _consumers
20 | lock.signal()
21 | return c
22 | }
23 | private var _consumers: [ImageConsumer]
24 |
25 | // capture session
26 | var captureSession: AVCaptureSession?
27 |
28 | // Processing Queue
29 | let cameraProcessingQueue = DispatchQueue.global()
30 | let audioProcessingQueue = DispatchQueue.global()
31 | let cameraFrameProcessingQueue = DispatchQueue(label: "cameraFrameProcessingQueue")
32 | let cameraPhotoProcessingQueue = DispatchQueue(label: "cameraPhotoProcessingQueue")
33 |
34 | // Device
35 | var audioDevice: AVCaptureDevice?
36 | var audioDeviceInput: AVCaptureDeviceInput?
37 |
38 | var frontCamera: AVCaptureDevice?
39 | var frontCameraInput: AVCaptureDeviceInput?
40 |
41 | var frontWideCamera: AVCaptureDevice?
42 | var frontWideCameraInput: AVCaptureDeviceInput?
43 |
44 | var frontUltraWideCamera: AVCaptureDevice?
45 | var frontUltraWideCameraInput: AVCaptureDeviceInput?
46 |
47 | var backCamera: AVCaptureDevice?
48 | var backCameraInput: AVCaptureDeviceInput?
49 |
50 | var backWideCamera: AVCaptureDevice?
51 | var backWideCameraInput: AVCaptureDeviceInput?
52 |
53 | var backUltraWideCamera: AVCaptureDevice?
54 | var backUltraWideCameraInput: AVCaptureDeviceInput?
55 |
56 | public var isUltraWideAngleSupported: Bool {
57 | if self.cameraPosition == .back {
58 | return self.backUltraWideCamera != nil
59 | }
60 | return self.frontUltraWideCamera != nil
61 | }
62 |
63 | // Output
64 | var videoDataOutput: AVCaptureVideoDataOutput?
65 | var photoOutput: AVCapturePhotoOutput?
66 | var audioOutput: AVCaptureAudioDataOutput?
67 |
68 | let minimumZoom: CGFloat = 1
69 | let maximumZoom: CGFloat = 3
70 | var lastZoomFactor: CGFloat = 1
71 |
72 | public var audioEncodingTarget: AudioEncodingTarget? {
73 | didSet {
74 | audioEncodingTarget?.activateAudioTrack()
75 | }
76 | }
77 |
78 | let sessionPreset: AVCaptureSession.Preset
79 | var cameraPosition: AVCaptureDevice.Position
80 | private var orientation: AVCaptureVideoOrientation
81 |
82 | public init(
83 | sessionPreset: AVCaptureSession.Preset,
84 | position: AVCaptureDevice.Position = .back,
85 | orientation: AVCaptureVideoOrientation = .portrait) throws {
86 | self.sessionPreset = sessionPreset
87 | self.cameraPosition = position
88 | self.orientation = orientation
89 | _consumers = []
90 | super.init()
91 | createCaptureSession()
92 | try configureCaptureDevices()
93 | try configureDeviceInputs()
94 | try configureFrameOutput()
95 | try configureAudioOutput()
96 | try configurePhotoOutput()
97 | self.captureSession?.commitConfiguration()
98 | }
99 |
100 | public func startCapture() {
101 | if let session = self.captureSession, !session.isRunning {
102 | session.startRunning()
103 | }
104 | }
105 |
106 | public func stopCapture() {
107 | if let session = self.captureSession, session.isRunning {
108 | session.stopRunning()
109 | }
110 | }
111 | }
112 |
113 | // MARK: - Public Methods
114 | extension Camera {
115 |
116 | public func setUltraWideAngle() throws {
117 | guard let captureSession = self.captureSession,
118 | captureSession.isRunning else { throw CameraError.captureSessionIsMissing }
119 | captureSession.beginConfiguration()
120 | defer {
121 | captureSession.commitConfiguration()
122 | }
123 |
124 | func switchToUltraWideBack() throws {
125 | guard let backCameraInput = self.backCameraInput, captureSession.inputs.contains(backCameraInput),
126 | let backUltraWideCamera = self.backUltraWideCamera else { throw CameraError.inputsAreInvalid }
127 |
128 | self.backUltraWideCameraInput = try AVCaptureDeviceInput(device: backUltraWideCamera)
129 |
130 | captureSession.removeInput(backCameraInput)
131 |
132 | if captureSession.canAddInput(self.backUltraWideCameraInput!) {
133 | captureSession.addInput(self.backUltraWideCameraInput!)
134 |
135 | self.backCamera = self.backUltraWideCamera
136 | self.backCameraInput = self.backUltraWideCameraInput
137 | captureSession.outputs.first?.connections.first?.videoOrientation = self.orientation
138 | captureSession.outputs.first?.connections.first?.isVideoMirrored = false
139 | } else {
140 | throw CameraError.invalidOperation
141 | }
142 |
143 | }
144 |
145 | func switchToUltraWideFront() throws {
146 | guard let frontCameraInput = self.frontCameraInput, captureSession.inputs.contains(frontCameraInput),
147 | let frontUltraWideCamera = self.frontUltraWideCamera else { throw CameraError.inputsAreInvalid }
148 | self.frontUltraWideCameraInput = try AVCaptureDeviceInput(device: frontUltraWideCamera)
149 | captureSession.removeInput(frontCameraInput)
150 | if captureSession.canAddInput(self.frontUltraWideCameraInput!) {
151 | captureSession.addInput(self.frontUltraWideCameraInput!)
152 | self.frontCamera = self.frontUltraWideCamera
153 | self.frontCameraInput = self.frontUltraWideCameraInput
154 | } else {
155 | throw CameraError.invalidOperation
156 | }
157 | }
158 |
159 | switch cameraPosition {
160 | case .back:
161 | try switchToUltraWideBack()
162 | case .front:
163 | try switchToUltraWideFront()
164 | default:
165 | break
166 | }
167 | }
168 |
169 | public func setWideAngle() throws {
170 | guard let captureSession = self.captureSession,
171 | captureSession.isRunning else { throw CameraError.captureSessionIsMissing }
172 | captureSession.beginConfiguration()
173 | defer {
174 | captureSession.commitConfiguration()
175 | }
176 |
177 | func switchToWideBack() throws {
178 | guard let backCameraInput = self.backCameraInput, captureSession.inputs.contains(backCameraInput),
179 | let backWideCamera = self.backWideCamera else { throw CameraError.inputsAreInvalid }
180 |
181 | self.backWideCameraInput = try AVCaptureDeviceInput(device: backWideCamera)
182 |
183 | captureSession.removeInput(backCameraInput)
184 |
185 | if captureSession.canAddInput(self.backWideCameraInput!) {
186 | captureSession.addInput(self.backWideCameraInput!)
187 |
188 | self.backCamera = self.backWideCamera
189 | self.backCameraInput = self.backWideCameraInput
190 | captureSession.outputs.first?.connections.first?.videoOrientation = self.orientation
191 | captureSession.outputs.first?.connections.first?.isVideoMirrored = false
192 | } else {
193 | throw CameraError.invalidOperation
194 | }
195 |
196 | }
197 |
198 | func switchToWideFront() throws {
199 | guard let frontCameraInput = self.frontCameraInput, captureSession.inputs.contains(frontCameraInput),
200 | let frontWideCamera = self.frontWideCamera else { throw CameraError.inputsAreInvalid }
201 |
202 | self.frontWideCameraInput = try AVCaptureDeviceInput(device: frontWideCamera)
203 |
204 | captureSession.removeInput(frontCameraInput)
205 |
206 | if captureSession.canAddInput(self.frontWideCameraInput!) {
207 | captureSession.addInput(self.frontWideCameraInput!)
208 |
209 | self.frontCamera = self.frontWideCamera
210 | self.frontCameraInput = self.frontWideCameraInput
211 | captureSession.outputs.first?.connections.first?.videoOrientation = self.orientation
212 | captureSession.outputs.first?.connections.first?.isVideoMirrored = true
213 | } else {
214 | throw CameraError.invalidOperation
215 | }
216 |
217 | }
218 |
219 | switch cameraPosition {
220 | case .back:
221 | try switchToWideBack()
222 | case .front:
223 | try switchToWideFront()
224 | default:
225 | break
226 | }
227 | }
228 |
229 | public func takePhoto(with settings: AVCapturePhotoSettings? = nil) {
230 | let settings = settings ??
231 | AVCapturePhotoSettings(format: [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA])
232 | photoOutput?.capturePhoto(with: settings, delegate: self)
233 | }
234 |
235 | public func switchCameras() throws {
236 | guard let captureSession = self.captureSession,
237 | captureSession.isRunning else { throw CameraError.captureSessionIsMissing }
238 | captureSession.beginConfiguration()
239 | func switchToFrontCamera() throws {
240 | guard let rearCameraInput = self.backCameraInput, captureSession.inputs.contains(rearCameraInput),
241 | let frontCamera = self.frontCamera else { throw CameraError.invalidOperation }
242 | self.frontCameraInput = try AVCaptureDeviceInput(device: frontCamera)
243 | captureSession.removeInput(rearCameraInput)
244 | if captureSession.canAddInput(self.frontCameraInput!) {
245 | captureSession.addInput(self.frontCameraInput!)
246 | self.cameraPosition = .front
247 | captureSession.outputs.first?.connections.first?.videoOrientation = self.orientation
248 | captureSession.outputs.first?.connections.first?.isVideoMirrored = true
249 | } else {
250 | throw CameraError.invalidOperation
251 | }
252 | }
253 |
254 | func switchToRearCamera() throws {
255 | guard let frontCameraInput = self.frontCameraInput, captureSession.inputs.contains(frontCameraInput),
256 | let backCamera = self.backCamera else { throw CameraError.invalidOperation }
257 | self.backCameraInput = try AVCaptureDeviceInput(device: backCamera)
258 | captureSession.removeInput(frontCameraInput)
259 | if captureSession.canAddInput(self.backCameraInput!) {
260 | captureSession.addInput(self.backCameraInput!)
261 | self.cameraPosition = .back
262 | captureSession.outputs.first?.connections.first?.videoOrientation = self.orientation
263 | captureSession.outputs.first?.connections.first?.isVideoMirrored = false
264 | } else { throw CameraError.invalidOperation }
265 | }
266 |
267 | switch cameraPosition {
268 | case .front:
269 | try switchToRearCamera()
270 | case .back:
271 | try switchToFrontCamera()
272 | default:
273 | break
274 | }
275 | captureSession.commitConfiguration()
276 | }
277 |
278 | public func setFocusPoint(point: CGPoint) throws {
279 |
280 | guard let device = cameraPosition == .back ? self.backCamera : self.frontCamera else { return }
281 | try device.lockForConfiguration()
282 | defer {
283 | device.unlockForConfiguration()
284 | }
285 | if device.isFocusPointOfInterestSupported {
286 | device.focusPointOfInterest = point
287 | device.focusMode = .autoFocus
288 | }
289 |
290 | if device.isExposurePointOfInterestSupported {
291 | device.exposurePointOfInterest = point
292 | device.exposureMode = .continuousAutoExposure
293 | }
294 | }
295 |
296 | public func currentPosition() -> AVCaptureDevice.Position {
297 | return self.cameraPosition
298 | }
299 |
300 | public func changeZoom(scale: CGFloat) throws {
301 | guard let captureSession = self.captureSession, captureSession.isRunning else { throw CameraError.captureSessionIsMissing }
302 |
303 | func setZoomFactor(device: AVCaptureDevice) {
304 |
305 | func minMaxZoom(_ factor: CGFloat) -> CGFloat {
306 | return min(min(max(factor, minimumZoom), maximumZoom), device.activeFormat.videoMaxZoomFactor)
307 | }
308 |
309 | let newScaleFactor = minMaxZoom(scale * lastZoomFactor)
310 |
311 | do {
312 | try device.lockForConfiguration()
313 | defer { device.unlockForConfiguration() }
314 | device.videoZoomFactor = newScaleFactor
315 | } catch {
316 | print(error)
317 | }
318 |
319 | lastZoomFactor = minMaxZoom(newScaleFactor)
320 | }
321 |
322 | switch cameraPosition {
323 | case .front:
324 | if let device = self.frontCamera {
325 | setZoomFactor(device: device)
326 | }
327 | case .back:
328 | if let device = self.backCamera {
329 | setZoomFactor(device: device)
330 | }
331 | default:
332 | break
333 | }
334 | }
335 |
336 | public func changeBrightness(value: CGFloat) throws {
337 | guard let captureSession = self.captureSession, captureSession.isRunning else { throw CameraError.captureSessionIsMissing }
338 |
339 | func setBrightnessValue(device: AVCaptureDevice) {
340 | var newBias: Float = 0
341 | let minBias = device.minExposureTargetBias
342 | let maxBias = device.maxExposureTargetBias
343 | let range = maxBias - minBias
344 | let el = range / 100
345 |
346 | newBias = minBias + el * (Float(value) * 100)
347 |
348 | do {
349 | try device.lockForConfiguration()
350 | defer { device.unlockForConfiguration() }
351 | device.setExposureTargetBias(newBias, completionHandler: nil)
352 | }catch {
353 | print(error)
354 | }
355 | }
356 |
357 | switch cameraPosition {
358 | case .front:
359 | if let device = self.frontCamera {
360 | setBrightnessValue(device: device)
361 | }
362 | case .back:
363 | if let device = self.backCamera {
364 | setBrightnessValue(device: device)
365 | }
366 | default:
367 | break
368 | }
369 |
370 | }
371 |
372 |
373 | @discardableResult
374 | public func add(consumer: T) -> T {
375 | remove(consumer: consumer)
376 | lock.wait()
377 | _consumers.append(consumer)
378 | lock.signal()
379 | consumer.add(source: self)
380 | return consumer
381 | }
382 |
383 | public func add(consumer: ImageConsumer, at index: Int) {
384 | remove(consumer: consumer)
385 | lock.wait()
386 | _consumers.insert(consumer, at: index)
387 | lock.signal()
388 | consumer.add(source: self)
389 | }
390 |
391 | public func add(chain: ImageRelay) {
392 | for (idx, consumer) in consumers.enumerated() {
393 | chain.add(consumer: consumer, at: idx)
394 | }
395 | removeAllConsumers()
396 | add(consumer: chain, at: 0)
397 | }
398 |
399 | public func remove(consumer: ImageConsumer) {
400 | lock.wait()
401 | if let index = _consumers.firstIndex(where: { $0 === consumer }) {
402 | _consumers.remove(at: index)
403 | lock.signal()
404 | consumer.remove(source: self)
405 | } else {
406 | lock.signal()
407 | }
408 | }
409 |
410 | public func removeAllConsumers() {
411 | lock.wait()
412 | let consumers = _consumers
413 | _consumers.removeAll()
414 | lock.signal()
415 | for consumer in consumers {
416 | consumer.remove(source: self)
417 | }
418 | }
419 | }
420 |
421 |
422 | /// MARK: - Private Methods
423 | extension Camera {
424 | private func createCaptureSession() {
425 | self.captureSession = AVCaptureSession()
426 | self.captureSession?.sessionPreset = self.sessionPreset
427 | self.captureSession?.beginConfiguration()
428 | }
429 |
430 |
431 | private func configureCaptureDevices() throws {
432 | let cameraSession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInUltraWideCamera], mediaType: .video, position: .unspecified)
433 |
434 | let cameras = cameraSession.devices.compactMap { $0 }
435 | guard !cameras.isEmpty else { throw CameraError.noCamerasAvailable }
436 |
437 | self.frontWideCamera = cameras.filter({ $0.position == .front && $0.deviceType == .builtInWideAngleCamera }).first
438 | self.frontUltraWideCamera = cameras.filter({ $0.position == .front && $0.deviceType == .builtInUltraWideCamera }).first
439 | self.backWideCamera = cameras.filter({ $0.position == .back && $0.deviceType == .builtInWideAngleCamera }).first
440 | self.backUltraWideCamera = cameras.filter({ $0.position == .back && $0.deviceType == .builtInUltraWideCamera }).first
441 |
442 | self.frontCamera = frontWideCamera
443 | self.backCamera = backWideCamera
444 |
445 | let audioSession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInMicrophone], mediaType: .audio, position: .unspecified)
446 |
447 | self.audioDevice = audioSession.devices.compactMap { $0 }.first
448 |
449 | }
450 |
451 | private func configureDeviceInputs() throws {
452 | guard let captureSession = self.captureSession else { throw CameraError.captureSessionIsMissing }
453 |
454 | if self.cameraPosition == .back, let backCamera = self.backCamera {
455 | self.backCameraInput = try AVCaptureDeviceInput(device: backCamera)
456 |
457 | if captureSession.canAddInput(self.backCameraInput!) { captureSession.addInput(self.backCameraInput!) }
458 | else { throw CameraError.inputsAreInvalid }
459 | } else if self.cameraPosition == .front, let frontCamera = self.frontCamera {
460 | self.frontCameraInput = try AVCaptureDeviceInput(device: frontCamera)
461 |
462 | if captureSession.canAddInput(self.frontCameraInput!) { captureSession.addInput(self.frontCameraInput!) }
463 | else { throw CameraError.inputsAreInvalid }
464 | } else {
465 | throw CameraError.noCamerasAvailable
466 | }
467 |
468 | if let audioDevice = self.audioDevice {
469 | self.audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
470 |
471 | if captureSession.canAddInput(self.audioDeviceInput!) {
472 | captureSession.addInput(self.audioDeviceInput!)
473 | }
474 | }
475 | }
476 |
477 |
478 | private func configureFrameOutput() throws {
479 | guard let captureSession = self.captureSession else { throw CameraError.captureSessionIsMissing }
480 |
481 | // capture frame
482 | videoDataOutput = AVCaptureVideoDataOutput()
483 | videoDataOutput?.alwaysDiscardsLateVideoFrames = true
484 | videoDataOutput?.setSampleBufferDelegate(self, queue: cameraProcessingQueue)
485 | guard captureSession.canAddOutput(videoDataOutput!) else { return }
486 | captureSession.addOutput(videoDataOutput!)
487 | guard let connection = videoDataOutput?.connection(with: AVFoundation.AVMediaType.video) else { return }
488 | guard connection.isVideoOrientationSupported else { return }
489 | guard connection.isVideoMirroringSupported else { return }
490 | connection.videoOrientation = self.orientation
491 | connection.isVideoMirrored = cameraPosition == .front
492 | }
493 |
494 | private func configureAudioOutput() throws {
495 | guard let captureSession = self.captureSession else { throw CameraError.captureSessionIsMissing }
496 |
497 | // capture audio
498 | audioOutput = AVCaptureAudioDataOutput()
499 | audioOutput?.setSampleBufferDelegate(self, queue: audioProcessingQueue)
500 | audioOutput?.recommendedAudioSettingsForAssetWriter(writingTo: .mov)
501 | guard captureSession.canAddOutput(audioOutput!) else { return }
502 | captureSession.addOutput(audioOutput!)
503 |
504 | }
505 |
506 | private func configurePhotoOutput() throws {
507 | guard let captureSession = self.captureSession else { throw CameraError.captureSessionIsMissing }
508 | photoOutput = AVCapturePhotoOutput()
509 | guard captureSession.canAddOutput(photoOutput!) else { return }
510 | captureSession.addOutput(photoOutput!)
511 | photoOutput?.connection(with: .video)?.videoOrientation = .portrait
512 | }
513 | }
514 |
515 | extension Camera: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
516 | public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
517 | if output == videoDataOutput {
518 | guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
519 | let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
520 | cameraFrameProcessingQueue.async { [weak self] in
521 | guard let self = self else { return }
522 | let image = CIImage(cvPixelBuffer: imageBuffer)
523 | for consumer in self.consumers {
524 | consumer.newImageAvailable(BNImage(image: image, type: .videoFrame(timestamp: timestamp)), from: self)
525 | }
526 | }
527 |
528 | } else if output == audioOutput {
529 | self.processAudioSampleBuffer(sampleBuffer)
530 | }
531 | }
532 |
533 | public func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) {
534 | self.audioEncodingTarget?.processAudioBuffer(sampleBuffer)
535 | }
536 | }
537 |
538 | extension Camera: AVCapturePhotoCaptureDelegate {
539 | public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
540 | if error != nil { return }
541 |
542 | if let sampleBuffer = photoSampleBuffer, let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
543 | cameraPhotoProcessingQueue.async { [weak self] in
544 | guard let self = self else { return }
545 | let image = CIImage(cvPixelBuffer: imageBuffer)
546 | let trasnform = image.orientationTransform(for: .right)
547 | let transformed = image.transformed(by: trasnform)
548 | for consumer in self.consumers {
549 | consumer.newImageAvailable(BNImage(image: transformed, type: .photo), from: self)
550 | }
551 |
552 | }
553 | }
554 | }
555 |
556 | }
557 |
558 | public extension Camera {
559 | enum CameraError: Error {
560 | case captureSessionAlreadyRunning
561 | case captureSessionIsMissing
562 | case inputsAreInvalid
563 | case invalidOperation
564 | case noCamerasAvailable
565 | case unknown
566 | }
567 | }
568 |
569 |
--------------------------------------------------------------------------------