├── .gitignore
├── .swiftpm
└── xcode
│ ├── package.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
│ └── xcshareddata
│ └── xcschemes
│ └── FlipBook.xcscheme
├── Sources
└── FlipBook
│ ├── Rect.swift
│ ├── Screen.swift
│ ├── Image.swift
│ ├── View.swift
│ ├── FlipBookGIFWriter.swift
│ ├── FlipBookCoreAnimationVideoEditor.swift
│ ├── RPScreenWriter.swift
│ ├── FlipBook.swift
│ ├── FlipBookLivePhotoWriter.swift
│ └── FlipBookAssetWriter.swift
├── Tests
├── LinuxMain.swift
└── FlipBookTests
│ ├── XCTestManifests.swift
│ ├── RPScreenWriterUnitTests.swift
│ ├── FlipBookCoreAnimationVideoEditorUnitTests.swift
│ ├── FlipBookGIFWriterUnitTests.swift
│ ├── FlipBookUnitTests.swift
│ ├── FlipBookLivePhotoWriterUnitTests.swift
│ └── FlipBookAssetWriterUnitTests.swift
├── LICENSE
├── Package.swift
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | /.build
3 | /Packages
4 | /*.xcodeproj
5 | xcuserdata/
6 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Sources/FlipBook/Rect.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Rect.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/24/20.
6 | //
7 |
8 | #if os(OSX)
9 | import AppKit
10 | public typealias Rect = NSRect
11 |
12 | #else
13 | import UIKit
14 | public typealias Rect = CGRect
15 | #endif
16 |
--------------------------------------------------------------------------------
/Tests/LinuxMain.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | import FlipBookTests
4 |
5 | var tests = [XCTestCaseEntry]()
6 | tests += FlipBookUnitTests.allTests()
7 | tests += FlipBookAssetWriterUnitTests.allTests()
8 | tests += FlipBookGIFWriterUnitTests.allTests()
9 | XCTMain(tests)
10 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Tests/FlipBookTests/XCTestManifests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | #if !canImport(ObjectiveC)
4 | public func allTests() -> [XCTestCaseEntry] {
5 | return [
6 | testCase(FlipBookUnitTests.allTests),
7 | testCase(FlipBookAssetWriterUnitTests.allTests),
8 | testCase(FlipBookGIFWriterUnitTests.allTests)
9 | ]
10 | }
11 | #endif
12 |
--------------------------------------------------------------------------------
/Sources/FlipBook/Screen.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Screen.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/24/20.
6 | //
7 |
8 | #if os(OSX)
9 | import AppKit
10 | public typealias Screen = NSScreen
11 |
12 | extension Screen {
13 | static var maxFramesPerSecond: Int {
14 | return 60
15 | }
16 | }
17 |
18 | #else
19 | import UIKit
20 | public typealias Screen = UIScreen
21 |
22 | extension Screen {
23 | static var maxFramesPerSecond: Int {
24 | if #available(iOS 10.3, *) {
25 | return UIScreen.main.maximumFramesPerSecond
26 | } else {
27 | return 60
28 | }
29 | }
30 | }
31 | #endif
32 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2020 Brad Gayman
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/Sources/FlipBook/Image.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Image.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/24/20.
6 | //
7 |
8 | #if os(OSX)
9 | import AppKit
10 | public typealias Image = NSImage
11 |
12 | extension Image {
13 | var cgI: CGImage? {
14 | return cgImage(forProposedRect: nil, context: nil, hints: nil)
15 | }
16 |
17 | var jpegRep: Data? {
18 | guard let cgImage = self.cgI else {
19 | return nil
20 | }
21 | let bits = NSBitmapImageRep(cgImage: cgImage)
22 | return bits.representation(using: .jpeg, properties: [:])
23 | }
24 |
25 | static func makeImage(cgImage: CGImage) -> Image {
26 | return NSImage(cgImage: cgImage, size: NSSize(width: cgImage.width, height: cgImage.height))
27 | }
28 | }
29 |
30 | #else
31 | import UIKit
32 | public typealias Image = UIImage
33 | extension Image {
34 | var cgI: CGImage? {
35 | return cgImage
36 | }
37 |
38 | var jpegRep: Data? {
39 | jpegData(compressionQuality: 1.0)
40 | }
41 |
42 | static func makeImage(cgImage: CGImage) -> Image {
43 | return UIImage(cgImage: cgImage)
44 | }
45 | }
46 |
47 | #endif
48 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:5.1
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "FlipBook",
8 | platforms: [
9 | .iOS(.v10), .macOS(.v10_15), .tvOS(.v10)
10 | ],
11 | products: [
12 | // Products define the executables and libraries produced by a package, and make them visible to other packages.
13 | .library(
14 | name: "FlipBook",
15 | targets: ["FlipBook"]),
16 | ],
17 | dependencies: [
18 | // Dependencies declare other packages that this package depends on.
19 | // .package(url: /* package url */, from: "1.0.0"),
20 | ],
21 | targets: [
22 | // Targets are the basic building blocks of a package. A target can define a module or a test suite.
23 | // Targets can depend on other targets in this package, and on products in packages which this package depends on.
24 | .target(
25 | name: "FlipBook",
26 | dependencies: []),
27 | .testTarget(
28 | name: "FlipBookTests",
29 | dependencies: ["FlipBook"]),
30 | ]
31 | )
32 |
--------------------------------------------------------------------------------
/Sources/FlipBook/View.swift:
--------------------------------------------------------------------------------
1 | //
2 | // View.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/24/20.
6 | //
7 |
8 | #if os(OSX)
9 | import AppKit
10 | public typealias View = NSView
11 | extension View {
12 | var scale: CGFloat {
13 | Screen.main?.backingScaleFactor ?? 1.0
14 | }
15 |
16 | func fb_makeViewSnapshot() -> Image? {
17 | let wasHidden = isHidden
18 | let wantedLayer = wantsLayer
19 |
20 | isHidden = false
21 | wantsLayer = true
22 |
23 | let width = Int(bounds.width * scale)
24 | let height = Int(bounds.height * scale)
25 | let imageRepresentation = NSBitmapImageRep(bitmapDataPlanes: nil,
26 | pixelsWide: width,
27 | pixelsHigh: height,
28 | bitsPerSample: 8,
29 | samplesPerPixel: 4,
30 | hasAlpha: true,
31 | isPlanar: false,
32 | colorSpaceName: NSColorSpaceName.deviceRGB,
33 | bytesPerRow: 0,
34 | bitsPerPixel: 0)
35 | imageRepresentation?.size = bounds.size
36 |
37 | guard let imgRep = imageRepresentation,
38 | let context = NSGraphicsContext(bitmapImageRep: imgRep) else {
39 | return nil
40 | }
41 |
42 | layer?.presentation()?.render(in: context.cgContext)
43 |
44 | let image = NSImage(size: bounds.size)
45 | image.addRepresentation(imgRep)
46 |
47 | wantsLayer = wantedLayer
48 | isHidden = wasHidden
49 | return image
50 | }
51 | }
52 |
53 | #else
54 | import UIKit
55 | public typealias View = UIView
56 |
57 | extension View {
58 |
59 | var scale: CGFloat {
60 | Screen.main.scale
61 | }
62 |
63 | func fb_makeViewSnapshot() -> Image? {
64 | UIGraphicsBeginImageContextWithOptions(frame.size, true, 0)
65 | guard let context = UIGraphicsGetCurrentContext() else { return nil }
66 | layer.presentation()?.render(in: context)
67 | let rasterizedView = UIGraphicsGetImageFromCurrentImageContext()
68 | UIGraphicsEndImageContext()
69 | return rasterizedView
70 | }
71 | }
72 | #endif
73 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/xcshareddata/xcschemes/FlipBook.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
29 |
35 |
36 |
37 |
38 |
39 |
45 |
46 |
48 |
54 |
55 |
56 |
57 |
58 |
68 |
69 |
75 |
76 |
82 |
83 |
84 |
85 |
87 |
88 |
91 |
92 |
93 |
--------------------------------------------------------------------------------
/Tests/FlipBookTests/RPScreenWriterUnitTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RPScreenWriterUnitTests.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 2/3/20.
6 | //
7 |
8 | #if os(iOS)
9 | import XCTest
10 | import AVFoundation
11 | @testable import FlipBook
12 | import ReplayKit
13 |
14 | final class RPScreenWriterUnitTests: XCTestCase {
15 |
16 | func testInit() {
17 | let writer = RPScreenWriter()
18 | XCTAssertEqual(writer.videoWriter, nil)
19 | XCTAssertEqual(writer.videoInput, nil)
20 | XCTAssertEqual(writer.audioWriter, nil)
21 | XCTAssertEqual(writer.micAudioInput, nil)
22 | XCTAssertEqual(writer.appAudioInput, nil)
23 | XCTAssertEqual(writer.isVideoWritingFinished, false)
24 | XCTAssertEqual(writer.isAudioWritingFinished, false)
25 | XCTAssertEqual(writer.isPaused, false)
26 | XCTAssertEqual(writer.sessionStartTime, .zero)
27 | XCTAssertEqual(writer.currentTime, .zero)
28 | XCTAssertEqual(writer.didUpdateSeconds == nil, true)
29 | }
30 |
31 | func testSetupWriter() {
32 | let writer = RPScreenWriter()
33 | writer.setUpWriter()
34 |
35 | XCTAssertEqual(writer.videoWriter != nil, true)
36 | XCTAssertEqual(writer.videoInput != nil, true)
37 | XCTAssertEqual(writer.videoWriter?.inputs.count, 1)
38 | guard let videoInput = writer.videoInput else {
39 | XCTFail("No video input")
40 | return
41 | }
42 | XCTAssertEqual(writer.videoInput?.mediaType, .video)
43 | XCTAssertEqual(writer.videoWriter?.inputs.contains(videoInput), true)
44 | XCTAssertEqual(videoInput.outputSettings?[AVVideoWidthKey] as? CGFloat, UIScreen.main.bounds.width * UIScreen.main.scale)
45 | XCTAssertEqual(videoInput.outputSettings?[AVVideoHeightKey] as? CGFloat, UIScreen.main.bounds.height * UIScreen.main.scale)
46 |
47 | XCTAssertEqual(writer.audioWriter != nil, true)
48 | XCTAssertEqual(writer.appAudioInput != nil, true)
49 | XCTAssertEqual(writer.audioWriter?.inputs.count, 2)
50 | guard let appAudioInput = writer.appAudioInput else {
51 | XCTFail("No app audio input")
52 | return
53 | }
54 | XCTAssertEqual(appAudioInput.mediaType, .audio)
55 | XCTAssertEqual(writer.audioWriter?.inputs.contains(appAudioInput), true)
56 |
57 | XCTAssertEqual(writer.micAudioInput != nil, true)
58 | guard let micAudioInput = writer.micAudioInput else {
59 | XCTFail("No mic audio input")
60 | return
61 | }
62 | XCTAssertEqual(micAudioInput.mediaType, .audio)
63 | XCTAssertEqual(writer.audioWriter?.inputs.contains(micAudioInput), true)
64 | }
65 |
66 | func testWriteBuffer() {
67 | let writer = RPScreenWriter()
68 |
69 | let images = makeImages()
70 | let buffers = images.enumerated().compactMap { $0.element.cgI?.makeCMSampleBuffer($0.offset) }
71 | buffers.forEach {
72 | writer.writeBuffer($0, rpSampleType: .video)
73 | }
74 |
75 | XCTAssertEqual(writer.videoWriter != nil, true)
76 | XCTAssertEqual(writer.videoInput != nil, true)
77 | XCTAssertEqual(writer.isPaused, false)
78 | XCTAssertEqual(writer.isAudioWritingFinished, false)
79 | XCTAssertEqual(writer.isVideoWritingFinished, false)
80 | }
81 |
82 | func testFinishWriting() {
83 | let writer = RPScreenWriter()
84 |
85 | let images = makeImages()
86 | let buffers = images.enumerated().compactMap { $0.element.cgI?.makeCMSampleBuffer($0.offset) }
87 | buffers.forEach {
88 | writer.writeBuffer($0, rpSampleType: .video)
89 | }
90 |
91 | let expectation = self.expectation(description: "makeVideo")
92 | var videoURL: URL?
93 | var frames = [Image]()
94 | let flipBookAssetWriter = FlipBookAssetWriter()
95 | writer.finishWriting { (url, error) in
96 | guard error == nil else {
97 | XCTFail("Error \(error?.localizedDescription ?? "Some error")")
98 | return
99 | }
100 | guard let url = url else {
101 | XCTFail("No url")
102 | return
103 | }
104 | videoURL = url
105 | flipBookAssetWriter.makeFrames(from: url, progress: nil) { (imgs) in
106 | frames = imgs.map { Image(cgImage: $0) }
107 | expectation.fulfill()
108 | }
109 | }
110 | waitForExpectations(timeout: 10) { (error) in
111 | if let error = error {
112 | XCTFail(error.localizedDescription)
113 | }
114 | }
115 | guard let url = videoURL else {
116 | XCTFail("No video url")
117 | return
118 | }
119 | let asset = AVURLAsset(url: url)
120 | XCTAssertEqual(asset.tracks(withMediaType: .video).count, 1)
121 | XCTAssertEqual(writer.isVideoWritingFinished, false)
122 | XCTAssertEqual(writer.isAudioWritingFinished, false)
123 | XCTAssertEqual(writer.isPaused, false)
124 | XCTAssertEqual(writer.videoInput, nil)
125 | XCTAssertEqual(writer.videoWriter, nil)
126 | XCTAssertEqual(writer.audioWriter, nil)
127 | XCTAssertEqual(writer.appAudioInput, nil)
128 | XCTAssertEqual(writer.micAudioInput, nil)
129 | XCTAssertEqual(frames.count, images.count)
130 | }
131 |
132 | static var allTests = [
133 | ("testInit", testInit),
134 | ("testSetupWriter", testSetupWriter),
135 | ("testWriteBuffer", testWriteBuffer)
136 | ]
137 | }
138 |
139 | extension RPScreenWriterUnitTests {
140 |
141 | func makeImages() -> [Image] {
142 | // Make Images
143 | let image: Image
144 | let image1: Image
145 | let image2: Image
146 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
147 | view.backgroundColor = UIColor.systemGray
148 | guard let img = view.fb_makeViewSnapshot() else {
149 | XCTFail("Could not make image")
150 | return []
151 | }
152 | image = img
153 | view.backgroundColor = UIColor.systemBlue
154 | guard let img1 = view.fb_makeViewSnapshot() else {
155 | XCTFail("Could not make image")
156 | return []
157 | }
158 | image1 = img1
159 | view.backgroundColor = UIColor.systemRed
160 | guard let img2 = view.fb_makeViewSnapshot() else {
161 | XCTFail("Could not make image")
162 | return []
163 | }
164 | image2 = img2
165 | return [image, image1, image2]
166 | }
167 | }
168 |
169 | #endif
170 |
--------------------------------------------------------------------------------
/Tests/FlipBookTests/FlipBookCoreAnimationVideoEditorUnitTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBookCoreAnimationVideoEditorUnitTests.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/31/20.
6 | //
7 |
8 | import XCTest
9 | @testable import FlipBook
10 | #if os(macOS)
11 | import AppKit
12 | #else
13 | import UIKit
14 | #endif
15 | import AVFoundation
16 |
17 | final class FlipBookCoreAnimationVideoEditorUnitTests: XCTestCase {
18 |
19 | func testInit() {
20 | let coreAnimationVideoEditor = FlipBookCoreAnimationVideoEditor()
21 |
22 | XCTAssertEqual(coreAnimationVideoEditor.preferredFramesPerSecond, 60)
23 | XCTAssertEqual(coreAnimationVideoEditor.source == nil, true)
24 | }
25 |
26 | func testCompositionLayerInstruction() {
27 | let coreAnimationVideoEditor = FlipBookCoreAnimationVideoEditor()
28 | let expectation = self.expectation(description: "makeVideo")
29 | var videoURL: URL?
30 | makeVideo { (url) in
31 | guard let url = url else {
32 | XCTFail("Could not make movie")
33 | return
34 | }
35 | videoURL = url
36 | expectation.fulfill()
37 | }
38 |
39 | waitForExpectations(timeout: 30) { (error) in
40 | if let error = error {
41 | XCTFail(error.localizedDescription)
42 | }
43 | }
44 | guard let url = videoURL else {
45 | XCTFail("No video url")
46 | return
47 | }
48 | let composition = AVMutableComposition()
49 | let asset = AVURLAsset(url: url)
50 | guard let videoTrack = asset.tracks(withMediaType: .video).first else {
51 | XCTFail("No video track")
52 | return
53 | }
54 |
55 | guard let compositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else {
56 | XCTFail("Could not make composition track")
57 | return
58 | }
59 |
60 | let instruction = coreAnimationVideoEditor.compositionLayerInstruction(for: compositionTrack,
61 | assetTrack: videoTrack)
62 | var transform = CGAffineTransform(scaleX: 0.5, y: 0.5)
63 | guard instruction.getTransformRamp(for: .zero, start: &transform, end: nil, timeRange: nil) else {
64 | XCTFail("Could not get transform")
65 | return
66 | }
67 | XCTAssertEqual(transform, videoTrack.preferredTransform)
68 | }
69 |
70 | func testMakeVideo() {
71 | let coreAnimationVideoEditor = FlipBookCoreAnimationVideoEditor()
72 | let expectation = self.expectation(description: "makeVideo")
73 | var videoURL: URL?
74 | var progress: CGFloat = 0.0
75 | var animationCallCount = 0
76 | makeVideo { (url) in
77 | guard let url = url else {
78 | XCTFail("Could not make movie")
79 | return
80 | }
81 | coreAnimationVideoEditor.makeVideo(fromVideoAt: url, animation: { (layer) in
82 | let textLayer = CATextLayer()
83 | textLayer.string = "Testing!!"
84 | layer.addSublayer(textLayer)
85 | animationCallCount += 1
86 | }, progress: { (prog) in
87 | progress = prog
88 | }, completion: { result in
89 | switch result {
90 | case .success(let url):
91 | videoURL = url
92 | expectation.fulfill()
93 | case .failure(let error):
94 | XCTFail("\(error)")
95 | }
96 | })
97 | }
98 | waitForExpectations(timeout: 30) { (error) in
99 | if let error = error {
100 | XCTFail(error.localizedDescription)
101 | }
102 | }
103 | guard let url = videoURL else {
104 | XCTFail("No video URL")
105 | return
106 | }
107 | let asset = AVURLAsset(url: url)
108 | XCTAssertEqual(url.absoluteString.contains("Caches"), true)
109 | XCTAssertEqual(url.absoluteString.contains("FlipBookVideoComposition.mov"), true)
110 | XCTAssertEqual(animationCallCount, 1)
111 | XCTAssertEqual(progress > 0.0, true)
112 | XCTAssertEqual(asset.tracks(withMediaType: .video).first != nil, true)
113 | }
114 |
115 | static var allTests = [
116 | ("testInit", testInit),
117 | ("testCompositionLayerInstruction", testCompositionLayerInstruction),
118 | ("testMakeVideo", testMakeVideo)
119 | ]
120 | }
121 |
122 | // MARK: - FlipBookCoreAnimationVideoEditorUnitTests + MakeVideo -
123 |
124 | extension FlipBookCoreAnimationVideoEditorUnitTests {
125 |
126 | func makeVideo(completion: @escaping (URL?) -> Void) {
127 | let flipBookAssetWriter = FlipBookAssetWriter()
128 | flipBookAssetWriter.size = CGSize(width: 100.0 * View().scale, height: 100.0 * View().scale)
129 |
130 | // Make Images
131 | let image: Image
132 | let image1: Image
133 | let image2: Image
134 | #if os(OSX)
135 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
136 | view.wantsLayer = true
137 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
138 | guard let img = view.fb_makeViewSnapshot() else {
139 | completion(nil)
140 | return
141 | }
142 | image = img
143 | view.layer?.backgroundColor = NSColor.systemBlue.cgColor
144 | guard let img1 = view.fb_makeViewSnapshot() else {
145 | completion(nil)
146 | return
147 | }
148 | image1 = img1
149 | view.layer?.backgroundColor = NSColor.systemRed.cgColor
150 | guard let img2 = view.fb_makeViewSnapshot() else {
151 | completion(nil)
152 | return
153 | }
154 | image2 = img2
155 | #else
156 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
157 | view.backgroundColor = UIColor.systemGray
158 | guard let img = view.fb_makeViewSnapshot() else {
159 | completion(nil)
160 | return
161 | }
162 | image = img
163 | view.backgroundColor = UIColor.systemBlue
164 | guard let img1 = view.fb_makeViewSnapshot() else {
165 | completion(nil)
166 | return
167 | }
168 | image1 = img1
169 | view.backgroundColor = UIColor.systemRed
170 | guard let img2 = view.fb_makeViewSnapshot() else {
171 | completion(nil)
172 | return
173 | }
174 | image2 = img2
175 | #endif
176 |
177 | flipBookAssetWriter.createAsset(from: [image, image1, image2, image, image1, image2], progress: { (_) in }, completion: { result in
178 | switch result {
179 |
180 | case .success(let asset):
181 | switch asset {
182 | case .video(let url):
183 | completion(url)
184 | case .livePhoto, .gif:
185 | completion(nil)
186 | }
187 | case .failure:
188 | completion(nil)
189 | }
190 | })
191 | }
192 | }
193 |
--------------------------------------------------------------------------------
/Sources/FlipBook/FlipBookGIFWriter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBookGIFWriter.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/25/20.
6 | //
7 |
8 | import AVFoundation
9 | import ImageIO
10 | #if !os(macOS)
11 | import MobileCoreServices
12 | #else
13 | import CoreServices
14 | #endif
15 |
16 | // MARK: - FlipBookGIFWriter -
17 |
18 | /// Class that converts an array of images to animated gif
19 | public final class FlipBookGIFWriter: NSObject {
20 |
21 | // MARK: - Types -
22 |
23 | /// Errors that `FlipBookGIFWriter` can throw
24 | public enum FlipBookGIFWriterError: Error {
25 |
26 | /// Could not create gif destination for supplied file `URL`
27 | case couldNotCreateDestination
28 |
29 | /// Failed to finalize writing for gif
30 | case failedToFinalizeDestination
31 | }
32 |
33 | // MARK: - Public Properties -
34 |
35 | /// The file `URL` that the gif is written to
36 | public let fileOutputURL: URL
37 |
38 | // MARK: - Internal Properties -
39 |
40 | /// Queue on which gif writing takes place
41 | internal static let queue = DispatchQueue(label: "com.FlipBook.gif.writer.queue", attributes: .concurrent)
42 |
43 | // MARK: - Init / Deinit -
44 |
45 | /// Creates an instance of `FlipBookGIFWriter`
46 | /// - Parameter fileOutputURL: The file `URL` that the gif is written to
47 | public init?(fileOutputURL: URL?) {
48 | guard let fileOutputURL = fileOutputURL else {
49 | return nil
50 | }
51 | self.fileOutputURL = fileOutputURL
52 | }
53 |
54 | // MARK: - Public Methods -
55 |
56 | /// Function that takes an array of images and composes an animated gif with them
57 | /// - Parameters:
58 | /// - images: images that comprise the gif
59 | /// - delay: time in seconds gif should wait before moving to next frame. **Default** 0.02
60 | /// - loop: number of times gif should animate. Value of 0 will cause gif to repeat indefinately **Default** 0
61 | /// - sizeRatio: scale that image should be resized to when making gif **Default** 1.0
62 | /// - progress: closure called when progress is made while creating gif. Called from background thread.
63 | /// - completion: closure called after gif has been composed. Called from background thread.
64 | public func makeGIF(_ images: [Image], delay: CGFloat = 0.02, loop: Int = 0, sizeRatio: Float = 1.0, progress: ((CGFloat) -> Void)?, completion: @escaping (Result) -> Void) {
65 | var images: [Image?] = images
66 | let count = images.count
67 | Self.queue.async { [weak self] in
68 | autoreleasepool {
69 | guard let self = self else { return }
70 | let gifSettings = [
71 | kCGImagePropertyGIFDictionary as String: [kCGImagePropertyGIFLoopCount as String: loop,
72 | kCGImagePropertyGIFHasGlobalColorMap as String: false]
73 | ]
74 | let imageSettings = [
75 | kCGImagePropertyGIFDictionary as String: [kCGImagePropertyGIFDelayTime as String: delay]
76 | ]
77 | guard let destination = CGImageDestinationCreateWithURL(self.fileOutputURL as CFURL, kUTTypeGIF, count, nil) else {
78 | completion(.failure(FlipBookGIFWriterError.couldNotCreateDestination))
79 | return
80 | }
81 |
82 | CGImageDestinationSetProperties(destination, gifSettings as CFDictionary)
83 | for index in images.indices {
84 | autoreleasepool {
85 | let image = images[index]
86 | if let cgImage = image?.cgI?.resize(with: sizeRatio) {
87 | CGImageDestinationAddImage(destination, cgImage, imageSettings as CFDictionary)
88 | }
89 | images[index] = nil
90 | progress?(CGFloat(index + 1) / CGFloat(count))
91 | }
92 | }
93 |
94 | if CGImageDestinationFinalize(destination) == false {
95 | completion(.failure(FlipBookGIFWriterError.couldNotCreateDestination))
96 | } else {
97 | completion(.success(self.fileOutputURL))
98 | }
99 | }
100 | }
101 | }
102 |
103 | /// Determines the frame rate of a gif by looking at the `delay` of the first image
104 | /// - Parameter gifURL: The file `URL` where the gif is located.
105 | /// - Returns: The frame rate as an `Int` or `nil` if data at url was invalid
106 | public func makeFrameRate(_ gifURL: URL) -> Int? {
107 | guard let gifData = try? Data(contentsOf: gifURL),
108 | let source = CGImageSourceCreateWithData(gifData as CFData, nil) else { return nil }
109 | let delay = getDelayForImageAtIndex(0, source: source)
110 | return Int((1.0 / delay) + 0.5)
111 | }
112 |
113 | /// Creates an array of `Image`s that represent the frames of a gif
114 | /// - Parameter gifURL: The file `URL` where the gif is located.
115 | /// - Returns: The frames rate as an `Int` or `nil` if data at url was invalid
116 | public func makeImages(_ gifURL: URL) -> [Image]? {
117 | guard let gifData = try? Data(contentsOf: gifURL),
118 | let source = CGImageSourceCreateWithData(gifData as CFData, nil) else { return nil }
119 | var images = [Image]()
120 | let imageCount = CGImageSourceGetCount(source)
121 | for i in 0 ..< imageCount {
122 | if let image = CGImageSourceCreateImageAtIndex(source, i, nil) {
123 | images.append(Image.makeImage(cgImage: image))
124 | }
125 | }
126 | return images
127 | }
128 |
129 | /// Determines the delay of the frame of a gif at a given index
130 | /// - Parameters:
131 | /// - index: The index to determine the delay for
132 | /// - source: The `CGImageSource` of the gif
133 | internal func getDelayForImageAtIndex(_ index: Int, source: CGImageSource) -> Double {
134 | var delay = 0.1
135 |
136 | // Get dictionaries
137 | let cfProperties = CGImageSourceCopyPropertiesAtIndex(source, index, nil)
138 | let gifPropertiesPointer = UnsafeMutablePointer.allocate(capacity: 0)
139 | defer {
140 | gifPropertiesPointer.deallocate()
141 | }
142 | let unsafePointer = Unmanaged.passUnretained(kCGImagePropertyGIFDictionary).toOpaque()
143 | if CFDictionaryGetValueIfPresent(cfProperties, unsafePointer, gifPropertiesPointer) == false {
144 | return delay
145 | }
146 |
147 | let gifProperties: CFDictionary = unsafeBitCast(gifPropertiesPointer.pointee, to: CFDictionary.self)
148 |
149 | // Get delay time
150 | var delayObject: AnyObject = unsafeBitCast(
151 | CFDictionaryGetValue(gifProperties,
152 | Unmanaged.passUnretained(kCGImagePropertyGIFUnclampedDelayTime).toOpaque()),
153 | to: AnyObject.self)
154 | if delayObject.doubleValue == 0 {
155 | delayObject = unsafeBitCast(CFDictionaryGetValue(gifProperties,
156 | Unmanaged.passUnretained(kCGImagePropertyGIFDelayTime).toOpaque()), to: AnyObject.self)
157 | }
158 |
159 | if let delayObject = delayObject as? Double, delayObject > 0 {
160 | delay = delayObject
161 | } else {
162 | delay = 0.1
163 | }
164 |
165 | return delay
166 | }
167 | }
168 |
169 | // MARK: - CGImage + Resize -
170 | /// Add resizing helper function
171 | internal extension CGImage {
172 |
173 | /// Resizes image based on ration to natural size
174 | /// - Parameter ratio: Ration that represents the size of the image relative to its natural size
175 | func resize(with ratio: Float) -> CGImage? {
176 | let imageWidth: Int = Int(Float(self.width) * ratio)
177 | let imageHeight: Int = Int(Float(self.height) * ratio)
178 |
179 | guard let colorSpace = self.colorSpace else { return nil }
180 | guard let context = CGContext(data: nil, width: imageWidth, height: imageHeight, bitsPerComponent: self.bitsPerComponent, bytesPerRow: self.bytesPerRow, space: colorSpace, bitmapInfo: self.alphaInfo.rawValue) else { return nil }
181 |
182 | context.interpolationQuality = .low
183 | context.draw(self, in: CGRect(x: 0, y: 0, width: imageWidth, height: imageHeight))
184 |
185 | return context.makeImage()
186 | }
187 | }
188 |
--------------------------------------------------------------------------------
/Tests/FlipBookTests/FlipBookGIFWriterUnitTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBookGIFWriterUnitTests.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/26/20.
6 | //
7 |
8 | import XCTest
9 | @testable import FlipBook
10 | #if os(OSX)
11 | import AppKit
12 | #else
13 | import UIKit
14 | #endif
15 |
16 | final class FlipBookGIFWriterUnitTests: XCTestCase {
17 |
18 | func testInit() {
19 | let gifWriter = FlipBookGIFWriter(fileOutputURL: FlipBookAssetWriter().makeFileOutputURL(fileName: "output.gif"))
20 |
21 | XCTAssertEqual(FlipBookGIFWriter.queue.label, "com.FlipBook.gif.writer.queue")
22 | XCTAssertEqual(gifWriter?.fileOutputURL.absoluteString.contains("Caches"), true)
23 | XCTAssertEqual(gifWriter?.fileOutputURL.absoluteString.contains("output.gif"), true)
24 | }
25 |
26 | func testImageResize() {
27 | let image: Image
28 | let scale: CGFloat
29 | #if os(OSX)
30 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
31 | view.wantsLayer = true
32 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
33 | scale = view.scale
34 | guard let img = view.fb_makeViewSnapshot() else {
35 | XCTFail("Could not make image")
36 | return
37 | }
38 | image = img
39 | #else
40 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
41 | view.backgroundColor = UIColor.systemGray
42 | scale = view.scale
43 | guard let img = view.fb_makeViewSnapshot() else {
44 | XCTFail("Could not make image")
45 | return
46 | }
47 | image = img
48 | #endif
49 | let cgImage = image.cgI
50 |
51 | XCTAssertEqual(cgImage != nil, true)
52 | XCTAssertEqual(cgImage?.width, 100 * Int(scale))
53 | XCTAssertEqual(cgImage?.height, 100 * Int(scale))
54 |
55 | let resizedCGImage = cgImage?.resize(with: 0.5)
56 |
57 | XCTAssertEqual(resizedCGImage != nil, true)
58 | XCTAssertEqual(resizedCGImage?.width, 100 * Int(scale) / 2)
59 | XCTAssertEqual(resizedCGImage?.height, 100 * Int(scale) / 2)
60 | }
61 |
62 | func testMakeGIF() {
63 |
64 | let images = makeImages()
65 | var prog: CGFloat = 0.0
66 | var assetURL: URL? = nil
67 | let expectation = self.expectation(description: "createAsset")
68 | let scale = View().scale
69 |
70 | let gifWriter = FlipBookGIFWriter(fileOutputURL: FlipBookAssetWriter().makeFileOutputURL(fileName: "output.gif"))
71 | gifWriter?.makeGIF(images, delay: 0.02, loop: 0, sizeRatio: 0.5, progress: { p in
72 | prog = p
73 | }, completion: { result in
74 | switch result {
75 | case .success(let url):
76 | assetURL = url
77 | expectation.fulfill()
78 | case .failure(let error):
79 | XCTFail(error.localizedDescription)
80 | }
81 | })
82 |
83 | waitForExpectations(timeout: 30) { (error) in
84 | if let error = error {
85 | XCTFail(error.localizedDescription)
86 | }
87 | }
88 | guard let aURL = assetURL else {
89 | XCTFail("could not get asset url")
90 | return
91 | }
92 |
93 | XCTAssertEqual(prog != 0.0, true)
94 | do {
95 | let gifData = try Data(contentsOf: aURL)
96 | guard let source = CGImageSourceCreateWithData(gifData as CFData, nil) else {
97 | XCTFail("Could not make source")
98 | return
99 | }
100 | XCTAssertEqual(CGImageSourceGetCount(source), 3)
101 | if let image = CGImageSourceCreateImageAtIndex(source, 0, nil) {
102 | XCTAssertEqual(image.width, 100 * Int(scale) / 2)
103 | XCTAssertEqual(image.height, 100 * Int(scale) / 2)
104 | } else {
105 | XCTFail("No first image")
106 | }
107 | } catch {
108 | XCTFail(error.localizedDescription)
109 | }
110 | }
111 |
112 | func testGetDelay() {
113 | let images = makeImages()
114 | let gifWriter = FlipBookGIFWriter(fileOutputURL: FlipBookAssetWriter().makeFileOutputURL(fileName: "output.gif"))
115 | let expectation = self.expectation(description: "createAsset")
116 | var assetURL: URL? = nil
117 | gifWriter?.makeGIF(images, delay: 0.5, loop: 0, sizeRatio: 0.5, progress: nil, completion: { result in
118 | switch result {
119 | case .success(let url):
120 | assetURL = url
121 | expectation.fulfill()
122 | case .failure(let error):
123 | XCTFail(error.localizedDescription)
124 | }
125 | })
126 | waitForExpectations(timeout: 30) { (error) in
127 | if let error = error {
128 | XCTFail(error.localizedDescription)
129 | }
130 | }
131 | guard let aURL = assetURL else {
132 | XCTFail("failed to get asset url")
133 | return
134 | }
135 |
136 | guard let frameRate = gifWriter?.makeFrameRate(aURL) else {
137 | XCTFail("failed to get frameRate")
138 | return
139 | }
140 | XCTAssertEqual(frameRate, 2)
141 | }
142 |
143 | func testMakeImages() {
144 | let images = makeImages()
145 | let gifWriter = FlipBookGIFWriter(fileOutputURL: FlipBookAssetWriter().makeFileOutputURL(fileName: "output.gif"))
146 | let expectation = self.expectation(description: "createAsset")
147 | var assetURL: URL? = nil
148 | gifWriter?.makeGIF(images, delay: 0.5, loop: 0, sizeRatio: 0.5, progress: nil, completion: { result in
149 | switch result {
150 | case .success(let url):
151 | assetURL = url
152 | expectation.fulfill()
153 | case .failure(let error):
154 | XCTFail(error.localizedDescription)
155 | }
156 | })
157 | waitForExpectations(timeout: 30) { (error) in
158 | if let error = error {
159 | XCTFail(error.localizedDescription)
160 | }
161 | }
162 | guard let aURL = assetURL else {
163 | XCTFail("failed to get asset url")
164 | return
165 | }
166 | guard let gifFrames = gifWriter?.makeImages(aURL) else {
167 | XCTFail("Could not get images")
168 | return
169 | }
170 | XCTAssertEqual(gifFrames.count, images.count)
171 | }
172 |
173 | static var allTests = [
174 | ("testInit", testInit),
175 | ("testImageResize", testImageResize),
176 | ("testMakeGIF", testMakeGIF),
177 | ("testGetDelay", testGetDelay),
178 | ("testMakeImages", testMakeImages)
179 | ]
180 | }
181 |
182 | // MARK: - FlipBookGIFWriterUnitTests + MakeImages -
183 |
184 | extension FlipBookGIFWriterUnitTests {
185 |
186 | func makeImages() -> [Image] {
187 | // Make Images
188 | let image: Image
189 | let image1: Image
190 | let image2: Image
191 | #if os(OSX)
192 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
193 | view.wantsLayer = true
194 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
195 | guard let img = view.fb_makeViewSnapshot() else {
196 | XCTFail("Could not make image")
197 | return []
198 | }
199 | image = img
200 | view.layer?.backgroundColor = NSColor.systemBlue.cgColor
201 | guard let img1 = view.fb_makeViewSnapshot() else {
202 | XCTFail("Could not make image")
203 | return []
204 | }
205 | image1 = img1
206 | view.layer?.backgroundColor = NSColor.systemRed.cgColor
207 | guard let img2 = view.fb_makeViewSnapshot() else {
208 | XCTFail("Could not make image")
209 | return []
210 | }
211 | image2 = img2
212 | #else
213 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
214 | view.backgroundColor = UIColor.systemGray
215 | guard let img = view.fb_makeViewSnapshot() else {
216 | XCTFail("Could not make image")
217 | return []
218 | }
219 | image = img
220 | view.backgroundColor = UIColor.systemBlue
221 | guard let img1 = view.fb_makeViewSnapshot() else {
222 | XCTFail("Could not make image")
223 | return []
224 | }
225 | image1 = img1
226 | view.backgroundColor = UIColor.systemRed
227 | guard let img2 = view.fb_makeViewSnapshot() else {
228 | XCTFail("Could not make image")
229 | return []
230 | }
231 | image2 = img2
232 | #endif
233 | return [image, image1, image2]
234 | }
235 | }
236 |
--------------------------------------------------------------------------------
/Tests/FlipBookTests/FlipBookUnitTests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | import AVFoundation
3 | @testable import FlipBook
4 | #if os(OSX)
5 | import AppKit
6 | #else
7 | import UIKit
8 | #endif
9 |
10 | final class FlipBookUnitTests: XCTestCase {
11 |
12 | func testInit() {
13 | let flipBook = FlipBook()
14 |
15 | XCTAssertEqual(flipBook.preferredFramesPerSecond, 60)
16 | XCTAssertEqual(flipBook.gifImageScale, 0.5)
17 | XCTAssertEqual(flipBook.assetType, .video)
18 | XCTAssertEqual(flipBook.onProgress == nil, true)
19 | XCTAssertEqual(flipBook.onCompletion == nil, true)
20 | XCTAssertEqual(flipBook.sourceView == nil, true)
21 | #if os(OSX)
22 | XCTAssertEqual(flipBook.queue == nil, true)
23 | XCTAssertEqual(flipBook.source == nil, true)
24 | #else
25 | XCTAssertEqual(flipBook.displayLink == nil , true)
26 | #endif
27 | }
28 |
29 | func testStart() {
30 | let flipBook = FlipBook()
31 | flipBook.gifImageScale = 0.75
32 | flipBook.preferredFramesPerSecond = 12
33 | let view: View
34 | #if os(OSX)
35 | view = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
36 | view.wantsLayer = true
37 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
38 | #else
39 | view = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
40 | view.backgroundColor = UIColor.systemGray
41 | #endif
42 |
43 | flipBook.startRecording(view, progress: { _ in }, completion: { _ in })
44 |
45 | XCTAssertEqual(flipBook.sourceView, view)
46 | XCTAssertEqual(flipBook.onProgress != nil, true)
47 | XCTAssertEqual(flipBook.onCompletion != nil, true)
48 | XCTAssertEqual(flipBook.writer.size, CGSize(width: 100.0 * view.scale, height: 100.0 * view.scale))
49 | XCTAssertEqual(flipBook.writer.startDate != nil, true)
50 | XCTAssertEqual(flipBook.writer.gifImageScale, 0.75)
51 |
52 |
53 | #if os(OSX)
54 | XCTAssertEqual(flipBook.queue != nil, true)
55 | XCTAssertEqual(flipBook.source != nil, true)
56 | XCTAssertEqual(flipBook.source?.isCancelled, false)
57 | #else
58 | XCTAssertEqual(flipBook.displayLink != nil, true)
59 | if #available(iOS 10.0, *) {
60 | XCTAssertEqual(flipBook.displayLink?.preferredFramesPerSecond, 12)
61 | }
62 | #endif
63 | flipBook.stop()
64 | }
65 |
66 | func testStop() {
67 | let flipBook = FlipBook()
68 | flipBook.gifImageScale = 0.75
69 | flipBook.preferredFramesPerSecond = 12
70 | let view: View
71 | #if os(OSX)
72 | view = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
73 | view.wantsLayer = true
74 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
75 | #else
76 | view = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
77 | view.backgroundColor = UIColor.systemGray
78 | #endif
79 |
80 | let expectation = self.expectation(description: "makeAsset")
81 | var progress: CGFloat = 0.0
82 | var animationCallCount = 0
83 | var videoURL: URL? = nil
84 |
85 | flipBook.startRecording(view,
86 | compositionAnimation: { _ in animationCallCount += 1 },
87 | progress: { prog in progress = prog },
88 | completion: { result in
89 | switch result {
90 | case .success(let asset):
91 | videoURL = asset.assetURL
92 | expectation.fulfill()
93 | case .failure(let error):
94 | XCTFail("\(error)")
95 | }
96 | })
97 |
98 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
99 | flipBook.stop()
100 | XCTAssertEqual(flipBook.writer.endDate != nil, true)
101 | #if os(OSX)
102 | XCTAssertEqual(flipBook.source?.isCancelled, true)
103 | #else
104 | XCTAssertEqual(flipBook.displayLink == nil, true)
105 | #endif
106 | XCTAssertEqual(flipBook.sourceView == nil, true)
107 | }
108 |
109 | waitForExpectations(timeout: 30) { (error) in
110 | if let error = error {
111 | XCTFail(error.localizedDescription)
112 | }
113 | }
114 | XCTAssertEqual(progress != 0.0, true)
115 | XCTAssertEqual(animationCallCount, 1)
116 |
117 | guard let url = videoURL else {
118 | XCTFail("Failed to get video url")
119 | return
120 | }
121 | let asset = AVURLAsset(url: url)
122 | guard let videoTrack = asset.tracks(withMediaType: .video).first else {
123 | XCTFail("No video track")
124 | return
125 | }
126 | XCTAssertEqual(videoTrack.naturalSize.width, view.bounds.width * view.scale)
127 | XCTAssertEqual(videoTrack.naturalSize.height, view.bounds.height * view.scale)
128 | }
129 |
130 | func testMakeAssetFromImages() {
131 | let flipBook = FlipBook()
132 |
133 | // Make Images
134 | let image: Image
135 | let image1: Image
136 | let image2: Image
137 | #if os(OSX)
138 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
139 | view.wantsLayer = true
140 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
141 | guard let img = view.fb_makeViewSnapshot() else {
142 | XCTFail("Could not make image")
143 | return
144 | }
145 | image = img
146 | view.layer?.backgroundColor = NSColor.systemBlue.cgColor
147 | guard let img1 = view.fb_makeViewSnapshot() else {
148 | XCTFail("Could not make image")
149 | return
150 | }
151 | image1 = img1
152 | view.layer?.backgroundColor = NSColor.systemRed.cgColor
153 | guard let img2 = view.fb_makeViewSnapshot() else {
154 | XCTFail("Could not make image")
155 | return
156 | }
157 | image2 = img2
158 | #else
159 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
160 | view.backgroundColor = UIColor.systemGray
161 | guard let img = view.fb_makeViewSnapshot() else {
162 | XCTFail("Could not make image")
163 | return
164 | }
165 | image = img
166 | view.backgroundColor = UIColor.systemBlue
167 | guard let img1 = view.fb_makeViewSnapshot() else {
168 | XCTFail("Could not make image")
169 | return
170 | }
171 | image1 = img1
172 | view.backgroundColor = UIColor.systemRed
173 | guard let img2 = view.fb_makeViewSnapshot() else {
174 | XCTFail("Could not make image")
175 | return
176 | }
177 | image2 = img2
178 | #endif
179 |
180 | let expectation = self.expectation(description: "makeAsset")
181 | var prog: CGFloat = 0.0
182 | var assetURL: URL?
183 | var animationCallCount = 0
184 |
185 | flipBook.makeAsset(from: [image, image1, image2], compositionAnimation: { _ in
186 | animationCallCount += 1
187 | }, progress: { (p) in
188 | prog = p
189 | XCTAssertEqual(Thread.isMainThread, true)
190 | }, completion: { result in
191 | XCTAssertEqual(Thread.isMainThread, true)
192 | switch result {
193 | case .success(let asset):
194 | switch asset {
195 | case .video(let url):
196 | assetURL = url
197 | expectation.fulfill()
198 | case .livePhoto, .gif:
199 | XCTFail("wrong asset type")
200 | }
201 | case .failure(let error):
202 | XCTFail(error.localizedDescription)
203 | }
204 | })
205 | waitForExpectations(timeout: 30) { (error) in
206 | if let err = error {
207 | XCTFail(err.localizedDescription)
208 | }
209 | }
210 |
211 | XCTAssertEqual(prog != 0.0, true)
212 | XCTAssertEqual(animationCallCount, 1)
213 |
214 | guard let url = assetURL else {
215 | XCTFail("No asset url")
216 | return
217 | }
218 | let asset = AVURLAsset(url: url)
219 | guard let videoTrack = asset.tracks(withMediaType: .video).first else {
220 | XCTFail("No video track")
221 | return
222 | }
223 | XCTAssertEqual(videoTrack.naturalSize.width, 100.0 * View().scale)
224 | XCTAssertEqual(videoTrack.naturalSize.height, 100.0 * View().scale)
225 | }
226 |
227 | static var allTests = [
228 | ("testInit", testInit),
229 | ("testStart", testStart),
230 | ("testStop", testStop),
231 | ("testMakeAssetFromImages", testMakeAssetFromImages)
232 | ]
233 | }
234 |
--------------------------------------------------------------------------------
/Sources/FlipBook/FlipBookCoreAnimationVideoEditor.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBookCoreAnimationVideoEditor.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/30/20.
6 | //
7 |
8 | #if os(macOS)
9 | import AppKit
10 | #else
11 | import UIKit
12 | #endif
13 | import AVFoundation
14 |
15 | // MARK: - FlipBookCoreAnimationVideoEditor -
16 |
17 | public final class FlipBookCoreAnimationVideoEditor: NSObject {
18 |
19 | // MARK: - Types -
20 |
21 | /// Errors that can `FlipBookCoreAnimationVideoEditor` might throw
22 | enum FlipBookCoreAnimationVideoEditorError: String, Error {
23 |
24 | /// Compositing was cancelled
25 | case cancelled
26 |
27 | /// The composition could not be created
28 | case couldNotCreateComposition
29 |
30 | /// The export session could not be created
31 | case couldNotCreateExportSession
32 |
33 | /// The output URL could not be created
34 | case couldNotCreateOutputURL
35 |
36 | /// An unknown error occured
37 | case unknown
38 | }
39 |
40 | // MARK: - Public Properties -
41 |
42 | /// The number of frames per second targetted
43 | /// **Default** 60 frames per second
44 | public var preferredFramesPerSecond: Int = 60
45 |
46 | // MARK: - Internal Properties -
47 |
48 | /// Source for capturing progress of export
49 | internal var source: DispatchSourceTimer?
50 |
51 | // MARK: - Public Methods -
52 |
53 | /// Makes a new video composition from a video and core animation animation
54 | /// - Parameters:
55 | /// - videoURL: The `URL` of the video that the core animation animation should be composited with
56 | /// - animation: Closure for adding `AVVideoCompositionCoreAnimationTool` composition animations. Add `CALayer`s as sublayers to the passed in `CALayer`. Then trigger animations with a `beginTime` of `AVCoreAnimationBeginTimeAtZero`. *Reminder that `CALayer` origin for `AVVideoCompositionCoreAnimationTool` is lower left for `UIKit` setting `isGeometryFlipped = true is suggested* **Default is `nil`**
57 | /// - progress: Optional closure that is called with a `CGFloat` representing the progress of composit generation. `CGFloat` is in the range `(0.0 ... 1.0)`. `progress` will be called from a main thread
58 | /// - completion: Closure that is called when the video composit has been created with the `URL` for the created video. `completion` will be called from a main thread
59 | public func makeVideo(fromVideoAt videoURL: URL,
60 | animation: @escaping (CALayer) -> Void,
61 | progress: ((CGFloat) -> Void)?,
62 | completion: @escaping (Result) -> Void) {
63 |
64 | let asset = AVURLAsset(url: videoURL)
65 | let composition = AVMutableComposition()
66 |
67 | guard let compositionTrack = composition.addMutableTrack(withMediaType: .video,
68 | preferredTrackID: kCMPersistentTrackID_Invalid),
69 | let assetTrack = asset.tracks(withMediaType: .video).first else {
70 | DispatchQueue.main.async { completion(.failure(FlipBookCoreAnimationVideoEditorError.couldNotCreateComposition))}
71 | return
72 | }
73 |
74 | do {
75 | let timeRange = CMTimeRange(start: .zero, duration: asset.duration)
76 | try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: .zero)
77 |
78 | if let audioAssetTrack = asset.tracks(withMediaType: .audio).first,
79 | let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio,
80 | preferredTrackID: kCMPersistentTrackID_Invalid) {
81 | try compositionAudioTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)
82 | }
83 |
84 | } catch {
85 | DispatchQueue.main.async { completion(.failure(error)) }
86 | return
87 | }
88 |
89 | compositionTrack.preferredTransform = assetTrack.preferredTransform
90 | let videoInfo = orientation(from: assetTrack.preferredTransform)
91 | let videoSize: CGSize
92 |
93 | if videoInfo.isPortrait {
94 | videoSize = CGSize(width: assetTrack.naturalSize.height, height: assetTrack.naturalSize.width)
95 | } else {
96 | videoSize = assetTrack.naturalSize
97 | }
98 |
99 | let videoLayer = CALayer()
100 | videoLayer.frame = CGRect(origin: .zero, size: videoSize)
101 | let overlayLayer = CALayer()
102 | overlayLayer.frame = CGRect(origin: .zero, size: videoSize)
103 |
104 | let outputLayer = CALayer()
105 | outputLayer.frame = CGRect(origin: .zero, size: videoSize)
106 | outputLayer.addSublayer(videoLayer)
107 | outputLayer.addSublayer(overlayLayer)
108 |
109 | animation(overlayLayer)
110 |
111 | let videoComposition = AVMutableVideoComposition()
112 | videoComposition.renderSize = videoSize
113 | videoComposition.frameDuration = CMTime(value: 1, timescale: CMTimeScale(preferredFramesPerSecond))
114 | videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer,
115 | in: outputLayer)
116 |
117 | let instruction = AVMutableVideoCompositionInstruction()
118 | instruction.timeRange = CMTimeRange(start: .zero,
119 | duration: composition.duration)
120 |
121 | videoComposition.instructions = [instruction]
122 | let layerInstruction = compositionLayerInstruction(for: compositionTrack,
123 | assetTrack: assetTrack)
124 | instruction.layerInstructions = [layerInstruction]
125 |
126 | guard let export = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
127 | DispatchQueue.main.async { completion(.failure(FlipBookCoreAnimationVideoEditorError.couldNotCreateExportSession)) }
128 | return
129 | }
130 |
131 | guard let exportURL = FlipBookAssetWriter().makeFileOutputURL(fileName: "FlipBookVideoComposition.mov") else {
132 | DispatchQueue.main.async { completion(.failure(FlipBookCoreAnimationVideoEditorError.couldNotCreateOutputURL)) }
133 | return
134 | }
135 |
136 | export.videoComposition = videoComposition
137 | export.outputFileType = .mov
138 | export.outputURL = exportURL
139 |
140 | if let progress = progress {
141 | source = DispatchSource.makeTimerSource(queue: DispatchQueue.main)
142 | source?.schedule(deadline: .now(), repeating: 1.0 / Double(self.preferredFramesPerSecond))
143 | source?.setEventHandler { [weak self] in
144 | progress(CGFloat(export.progress))
145 | if export.progress == 1.0 {
146 | self?.source?.cancel()
147 | self?.source = nil
148 | }
149 | }
150 | source?.resume()
151 | }
152 |
153 | export.exportAsynchronously {
154 | DispatchQueue.main.async {
155 | switch export.status {
156 | case .completed:
157 | completion(.success(exportURL))
158 | case .unknown, .exporting, .waiting:
159 | completion(.failure(FlipBookCoreAnimationVideoEditorError.unknown))
160 | case .failed:
161 | completion(.failure(export.error ?? FlipBookCoreAnimationVideoEditorError.unknown))
162 | case .cancelled:
163 | completion(.failure(FlipBookCoreAnimationVideoEditorError.cancelled))
164 | @unknown default:
165 | completion(.failure(FlipBookCoreAnimationVideoEditorError.unknown))
166 | }
167 | }
168 | }
169 | }
170 |
171 | //MARK: - Internal Methods -
172 |
173 | /// Function that determines the orientation and whether a rectangle is in "Portrait" from a transform
174 | /// - Parameter transform: The transform of the rectangle
175 | internal func orientation(from transform: CGAffineTransform) -> (orientation: CGImagePropertyOrientation, isPortrait: Bool) {
176 | var assetOrientation = CGImagePropertyOrientation.up
177 | var isPortrait = false
178 | if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
179 | assetOrientation = .right
180 | isPortrait = true
181 | } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
182 | assetOrientation = .left
183 | isPortrait = true
184 | } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
185 | assetOrientation = .up
186 | } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
187 | assetOrientation = .down
188 | }
189 |
190 | return (assetOrientation, isPortrait)
191 | }
192 |
193 | /// Function that makes the composition instruction for a given composition track from a given asset track
194 | /// - Parameters:
195 | /// - track: The track of the composition
196 | /// - assetTrack: The track of the asset
197 | internal func compositionLayerInstruction(for track: AVCompositionTrack, assetTrack: AVAssetTrack) -> AVMutableVideoCompositionLayerInstruction {
198 | let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
199 | let transform = assetTrack.preferredTransform
200 |
201 | instruction.setTransform(transform, at: .zero)
202 |
203 | return instruction
204 | }
205 | }
206 |
--------------------------------------------------------------------------------
/Sources/FlipBook/RPScreenWriter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RPScreenWriter.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 2/2/20.
6 | //
7 | // Taken from https://gist.github.com/mspvirajpatel/f7e1e258f3c1fff96917d82fa9c4c137
8 |
9 | #if os(iOS)
10 | import Foundation
11 | import AVFoundation
12 | import ReplayKit
13 |
14 | internal final class RPScreenWriter: NSObject {
15 | // Write video
16 | var videoOutputURL: URL
17 | var videoWriter: AVAssetWriter?
18 | var videoInput: AVAssetWriterInput?
19 | // Write audio
20 | var audioOutputURL: URL
21 | var audioWriter: AVAssetWriter?
22 | var micAudioInput:AVAssetWriterInput?
23 | var appAudioInput:AVAssetWriterInput?
24 |
25 | var isVideoWritingFinished = false
26 | var isAudioWritingFinished = false
27 |
28 | var isPaused: Bool = false
29 |
30 | var sessionStartTime: CMTime = .zero
31 |
32 | var currentTime: CMTime = .zero {
33 | didSet {
34 | didUpdateSeconds?(currentTime.seconds)
35 | }
36 | }
37 |
38 | var didUpdateSeconds: ((Double) -> ())?
39 |
40 | override init() {
41 | let documentsPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true)[0] as NSString
42 | self.videoOutputURL = URL(fileURLWithPath: documentsPath.appendingPathComponent("FlipBookVideo.mp4"))
43 | self.audioOutputURL = URL(fileURLWithPath: documentsPath.appendingPathComponent("FlipBookAudio.mp4"))
44 | super.init()
45 | self.removeURLsIfNeeded()
46 | }
47 |
48 | func removeURLsIfNeeded() {
49 | do {
50 | try FileManager.default.removeItem(at: self.videoOutputURL)
51 | try FileManager.default.removeItem(at: self.audioOutputURL)
52 | } catch {}
53 | }
54 |
55 | func setUpWriter() {
56 | do {
57 | try videoWriter = AVAssetWriter(outputURL: self.videoOutputURL, fileType: .mp4)
58 | } catch let writerError as NSError {
59 | print("Error opening video file \(writerError)")
60 | }
61 | let videoSettings: [String: Any]
62 | if #available(iOS 11.0, *) {
63 | videoSettings = [
64 | AVVideoCodecKey : AVVideoCodecType.h264,
65 | AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
66 | AVVideoWidthKey : UIScreen.main.bounds.width * UIScreen.main.scale,
67 | AVVideoHeightKey : UIScreen.main.bounds.height * UIScreen.main.scale
68 | ] as [String : Any]
69 | } else {
70 | videoSettings = [
71 | AVVideoCodecKey : AVVideoCodecH264,
72 | AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
73 | AVVideoWidthKey : UIScreen.main.bounds.width * UIScreen.main.scale,
74 | AVVideoHeightKey : UIScreen.main.bounds.height * UIScreen.main.scale
75 | ] as [String : Any]
76 | }
77 |
78 | videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
79 | if let videoInput = self.videoInput,
80 | let canAddInput = videoWriter?.canAdd(videoInput),
81 | canAddInput {
82 | videoWriter?.add(videoInput)
83 | } else {
84 | print("couldn't add video input")
85 | }
86 |
87 | do {
88 | try audioWriter = AVAssetWriter(outputURL: self.audioOutputURL, fileType: .mp4)
89 | } catch let writerError as NSError {
90 | print("Error opening video file \(writerError)")
91 | }
92 |
93 | let audioOutputSettings = [
94 | AVNumberOfChannelsKey : 2,
95 | AVFormatIDKey : kAudioFormatMPEG4AAC_HE,
96 | AVSampleRateKey : 44100
97 | ] as [String : Any]
98 |
99 | appAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
100 | if let appAudioInput = self.appAudioInput,
101 | let canAddInput = audioWriter?.canAdd(appAudioInput),
102 | canAddInput {
103 | audioWriter?.add(appAudioInput)
104 | } else {
105 | print("couldn't add app audio input")
106 | }
107 | micAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
108 | if let micAudioInput = self.micAudioInput,
109 | let canAddInput = audioWriter?.canAdd(micAudioInput),
110 | canAddInput {
111 | audioWriter?.add(micAudioInput)
112 | } else {
113 | print("couldn't add mic audio input")
114 | }
115 | }
116 |
117 | func writeBuffer(_ cmSampleBuffer: CMSampleBuffer, rpSampleType: RPSampleBufferType) {
118 | if self.videoWriter == nil {
119 | self.setUpWriter()
120 | }
121 | guard let videoWriter = self.videoWriter,
122 | let audioWriter = self.audioWriter,
123 | !isPaused else {
124 | return
125 | }
126 | let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(cmSampleBuffer)
127 | switch rpSampleType {
128 | case .video:
129 | if videoWriter.status == .unknown {
130 | if videoWriter.startWriting() {
131 | print("video writing started")
132 | self.sessionStartTime = presentationTimeStamp
133 | videoWriter.startSession(atSourceTime: presentationTimeStamp)
134 | }
135 | } else if videoWriter.status == .writing {
136 | if let isReadyForMoreMediaData = videoInput?.isReadyForMoreMediaData,
137 | isReadyForMoreMediaData {
138 | self.currentTime = CMTimeSubtract(presentationTimeStamp, self.sessionStartTime)
139 | if let appendInput = videoInput?.append(cmSampleBuffer),
140 | !appendInput {
141 | print("couldn't write video buffer")
142 | }
143 | }
144 | }
145 | case .audioApp:
146 | if audioWriter.status == .unknown {
147 | if audioWriter.startWriting() {
148 | print("audio writing started")
149 | audioWriter.startSession(atSourceTime: presentationTimeStamp)
150 | }
151 | } else if audioWriter.status == .writing {
152 | if let isReadyForMoreMediaData = appAudioInput?.isReadyForMoreMediaData,
153 | isReadyForMoreMediaData {
154 | if let appendInput = appAudioInput?.append(cmSampleBuffer),
155 | !appendInput {
156 | print("couldn't write app audio buffer")
157 | }
158 | }
159 | }
160 | case .audioMic:
161 | if audioWriter.status == .unknown {
162 | if audioWriter.startWriting() {
163 | print("audio writing started")
164 | audioWriter.startSession(atSourceTime: presentationTimeStamp)
165 | }
166 | } else if audioWriter.status == .writing {
167 | if let isReadyForMoreMediaData = micAudioInput?.isReadyForMoreMediaData,
168 | isReadyForMoreMediaData {
169 | if let appendInput = micAudioInput?.append(cmSampleBuffer),
170 | !appendInput {
171 | print("couldn't write mic audio buffer")
172 | }
173 | }
174 | }
175 | @unknown default:
176 | break
177 | }
178 | }
179 |
180 | func finishWriting(completionHandler handler: @escaping (URL?, Error?) -> Void) {
181 | self.videoInput?.markAsFinished()
182 | self.videoWriter?.finishWriting {
183 | self.isVideoWritingFinished = true
184 | completion()
185 | }
186 |
187 | if audioWriter?.status.rawValue != 0 {
188 | self.appAudioInput?.markAsFinished()
189 | self.micAudioInput?.markAsFinished()
190 | self.audioWriter?.finishWriting {
191 | self.isAudioWritingFinished = true
192 | completion()
193 | }
194 | } else {
195 | self.isAudioWritingFinished = true
196 | }
197 |
198 | func completion() {
199 | if self.isVideoWritingFinished && self.isAudioWritingFinished {
200 | self.isVideoWritingFinished = false
201 | self.isAudioWritingFinished = false
202 | self.isPaused = false
203 | self.videoInput = nil
204 | self.videoWriter = nil
205 | self.appAudioInput = nil
206 | self.micAudioInput = nil
207 | self.audioWriter = nil
208 | merge()
209 | }
210 | }
211 |
212 | func merge() {
213 | let mergeComposition = AVMutableComposition()
214 |
215 | let videoAsset = AVAsset(url: self.videoOutputURL)
216 | let videoTracks = videoAsset.tracks(withMediaType: .video)
217 | print(videoAsset.duration.seconds)
218 | let videoCompositionTrack = mergeComposition.addMutableTrack(withMediaType: .video,
219 | preferredTrackID: kCMPersistentTrackID_Invalid)
220 | do {
221 | try videoCompositionTrack?.insertTimeRange(CMTimeRange(start: .zero, end: videoAsset.duration),
222 | of: videoTracks.first!,
223 | at: .zero)
224 | } catch let error {
225 | removeURLsIfNeeded()
226 | handler(nil, error)
227 | }
228 | videoCompositionTrack?.preferredTransform = videoTracks.first!.preferredTransform
229 |
230 | let audioAsset = AVAsset(url: self.audioOutputURL)
231 | let audioTracks = audioAsset.tracks(withMediaType: .audio)
232 | for audioTrack in audioTracks {
233 | let audioCompositionTrack = mergeComposition.addMutableTrack(withMediaType: .audio,
234 | preferredTrackID: kCMPersistentTrackID_Invalid)
235 | do {
236 | try audioCompositionTrack?.insertTimeRange(CMTimeRange(start: .zero, end: audioAsset.duration),
237 | of: audioTrack,
238 | at: .zero)
239 | } catch let error {
240 | print(error)
241 | }
242 | }
243 | let documentsPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true)[0] as NSString
244 | let outputURL = URL(fileURLWithPath: documentsPath.appendingPathComponent("FlipBookMergedVideo.mp4"))
245 | do {
246 | try FileManager.default.removeItem(at: outputURL)
247 | } catch {}
248 |
249 | let exportSession = AVAssetExportSession(asset: mergeComposition,
250 | presetName: AVAssetExportPresetHighestQuality)
251 | exportSession?.outputFileType = .mp4
252 | exportSession?.shouldOptimizeForNetworkUse = true
253 | exportSession?.outputURL = outputURL
254 | exportSession?.exportAsynchronously {
255 | if let error = exportSession?.error {
256 | self.removeURLsIfNeeded()
257 | handler(nil, error)
258 | } else {
259 | self.removeURLsIfNeeded()
260 | handler(exportSession?.outputURL, nil)
261 | }
262 | }
263 | }
264 | }
265 | }
266 | #endif
267 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | # FlipBook
4 |
5 | A swift package for recording views. Record a view and write to video, gif, or Live Photo. Also, create videos, gifs, and Live Photos from an array of images.
6 |
7 | ## Features
8 |
9 | - Record a view over time
10 | - Write recording to video
11 | - Write recording to .gif
12 | - Compose recording into a Live Photo
13 | - Create asset (video, .gif, Live Photo) from an array of images
14 |
15 | ## Requirements
16 |
17 | - iOS 10.0
18 | - tvOS 10.0
19 | - macOS 10.15
20 | - Xcode 11
21 | - Swift 5.1
22 |
23 | ## Installation
24 |
25 | Use Xcode's built in integration with Swift Package Manager.
26 |
27 | - Open Xcode
28 | - Click File -> Swift Packages -> Add Package Dependency
29 | - In modal that says "Choose Package Repository" paste https://github.com/bgayman/FlipBook.git and press return
30 | - Select version range you desire (default selection works well)
31 | - Xcode will add the package to your project
32 | - In any file where you want to use FlipBook add `import FlipBook`
33 |
34 | ## Usage
35 |
36 | The main object of the package is the `FlipBook` object. With it, you can record a view, create an asset from an array of images, and save a Live Photo to the users photo library. There are other specific writer objects (`FlipBookAssetWriter`, `FlipBookLivePhotoWriter`, and `FlipBookGIFWriter`) for more control over how assets are generated. But, by and large, `FlipBook` is the class that you'll use for easy view capture and easy asset creation from images.
37 |
38 | ### Recording a View
39 |
40 | Begin by creating an instance of `FlipBook` and setting the `assetType` to desired. You'll next start the recording by calling `start`, passing in the view you wish to record, an optional progress closure that will be called when asset creation progress has been made, and a completion closure that will return the asset when you're done. To stop the recording, call `stop()` which will trigger the asset creation to begin. For example:
41 |
42 | ```swift
43 | import UIKit
44 | import FlipBook
45 |
46 | class ViewController: UIViewController {
47 | // Hold a refrence to `flipBook` otherwise it will go out of scope
48 | let flipBook = FlipBook()
49 | @IBOutlet weak var myAnimatingView: UIView!
50 |
51 | override func viewDidLoad() {
52 | super.viewDidLoad()
53 |
54 | // Set the assetType we want to create
55 | flipBook.assetType = .video
56 | }
57 |
58 | override func viewDidAppear(animated: Bool) {
59 | super.viewDidAppear(animated: animated)
60 |
61 | // Start recording when we appear, here we're recording the root view of `ViewController` but could record any arbitary view
62 | flipBook.startRecording(view) { [weak self] result in
63 |
64 | // Switch on result
65 | switch result {
66 | case .success(let asset):
67 | // Switch on the asset that's returned
68 | switch asset {
69 | case .video(let url):
70 | // Do something with the video
71 |
72 | // We expect a video so do nothing for .livePhoto and .gif
73 | case .livePhoto, .gif:
74 | break
75 | }
76 | case .failure(let error):
77 | // Handle error in recording
78 | print(error)
79 | }
80 | }
81 |
82 | // In this example we want to record some animation, so after we start recording we kick off the animation
83 | animateMyAnimatingView {
84 | // The animation is done so stop recording
85 | self.flipBook.stop()
86 | }
87 | }
88 |
89 | private func animateMyAnimatingView(_ completion: () -> Void) { ... }
90 | }
91 | ```
92 | You can checkout a complete [iOS example](https://github.com/bgayman/FlipBookExampleiOS) and [macOS example](https://github.com/bgayman/FlipBookExamplemacOS). On macOS, remember to set `wantsLayer` to `true` as FlipBook depends on rendering `CALayer`s for snapshotting.
93 |
94 | ### Creating an Asset from Images
95 |
96 | Similarly, begin by creating an instance of `FlipBook` and setting the `assetType` desired. When creating an asset from Images it is also important to set the `preferredFramesPerSecond` as this will determine the overall duration of the asset. For best results, it is also important that all of the images you wish to include are the same size. Finally, you call `makeAsset` passing in the images you want to include, a progress closure, and a completion closure. For example:
97 |
98 | ```swift
99 | import UIKit
100 | import FlipBook
101 |
102 | class ViewController: UIViewController {
103 |
104 | // Hold a refrence to `flipBook` otherwise it will go out of scope
105 | let flipBook = FlipBook()
106 |
107 | override func viewDidLoad() {
108 | super.viewDidLoad()
109 |
110 | // Set `assetType` to the asset type you desire
111 | flipBook.assetType = .video
112 |
113 | // Set `preferredFramesPerSecond` to the frame rate of the animation images
114 | flipBook.preferredFramesPerSecond = 24
115 |
116 | // Load the images. More realistically these would likely be images the user created or ones that were stored remotely.
117 | let images = (1 ... 48).compactMap { UIImage(named: "animationImage\($0)") }
118 |
119 | // Make the asset
120 | flipBook.makeAsset(from: images) { [weak self] (result) in
121 | switch result {
122 | case .success(let asset):
123 | // handle asset
124 | case .failure(let error):
125 | // handle error
126 | }
127 | }
128 | }
129 | }
130 | ```
131 |
132 | ## Advanced Usage
133 |
134 | FlipBook will work for most view animations and interactions however many `CoreAnimation` animations and effects will not work with the simple start and stop method described above. However, there is an optional `animationComposition` closure of type `((CALayer) -> Void)?` that will allow you to composite `CALayer` animations and effects with a FlipBook video using the `AVVideoCompositionCoreAnimationTool`. For example:
135 |
136 | ```swift
137 | import UIKit
138 | import FlipBook
139 |
140 | class ViewController: UIViewController {
141 | // Hold a refrence to `flipBook` otherwise it will go out of scope
142 | let flipBook = FlipBook()
143 | @IBOutlet weak var myBackgroundView: UIView!
144 |
145 | override func viewDidLoad() {
146 | super.viewDidLoad()
147 |
148 | // Set the assetType we want to create
149 | flipBook.assetType = .video
150 | }
151 |
152 | override func viewDidAppear(animated: Bool) {
153 | super.viewDidAppear(animated: animated)
154 |
155 | // Get the scale of the screen that we capturing on as we'll want to apply the scale when animating for the composition
156 | let scale = view.window?.screen.scale ?? 1.0
157 |
158 | // Start recording when we appear, here we're recording a view that will act as the background for our layer animation
159 | flipBook.startRecording(myBackgroundView, compositionAnimation: { layer in
160 |
161 | // create a gradient layer
162 | let gradientLayer = CAGradientLayer()
163 | gradientLayer.frame = layer.bounds
164 | gradientLayer.colors = [UIColor.systemRed.cgColor, UIColor.systemBlue.cgColor]
165 | gradientLayer.locations = [0.0, 1.0]
166 | gradientLayer.startPoint = CGPoint.zero
167 | gradientLayer.endPoint = CGPoint(x: 1.0, y: 1.0)
168 |
169 | // create a shape layer
170 | let shapeLayer = CAShapeLayer()
171 | shapeLayer.frame = layer.bounds
172 |
173 | // remember that layer composition is in pixels not points so scale up
174 | shapeLayer.lineWidth = 10.0 * scale
175 | shapeLayer.lineCap = .round
176 | shapeLayer.fillColor = UIColor.clear.cgColor
177 | shapeLayer.strokeColor = UIColor.black.cgColor
178 | shapeLayer.path = UIBezierPath(ovalIn: layer.bounds.insetBy(dx: 150 * scale, dy: 150 * scale)).cgPath
179 | shapeLayer.strokeEnd = 0.0
180 | gradientLayer.mask = shapeLayer
181 |
182 | layer.addSublayer(gradientLayer)
183 |
184 | let strokeAnimation = CABasicAnimation(keyPath: "strokeEnd")
185 | strokeAnimation.fromValue = 0.0
186 | strokeAnimation.toValue = 1.0
187 | strokeAnimation.duration = 8.0
188 |
189 | // must start the animation at `AVCoreAnimationBeginTimeAtZero`
190 | strokeAnimation.beginTime = AVCoreAnimationBeginTimeAtZero
191 | strokeAnimation.isRemovedOnCompletion = false
192 | strokeAnimation.fillMode = .forwards
193 | shapeLayer.add(strokeAnimation, forKey: "strokeAnimation")
194 |
195 | }, completion: { [weak self] result in
196 |
197 | // Switch on result
198 | switch result {
199 | case .success(let asset):
200 | // Switch on the asset that's returned
201 | switch asset {
202 | case .video(let url):
203 | // Do something with the video
204 |
205 | // We expect a video so do nothing for .livePhoto and .gif
206 | case .livePhoto, .gif:
207 | break
208 | }
209 | case .failure(let error):
210 | // Handle error in recording
211 | print(error)
212 | }
213 | })
214 |
215 | // After 9 seconds stop recording. We'll have 8 seconds of animation and 1 second of final state
216 | DispatchQueue.main.asyncAfter(deadline: .now() + 9.0) {
217 | self.flipBook.stop()
218 | }
219 | }
220 | }
221 |
222 | ```
223 |
224 | Generating a gif with the code above you should get something like:
225 |
226 | 
227 |
228 | Where the card view is the background view recorded by FlipBook and the gradient stroke is the layer composited on top of the recording. Remember that `AVVideoCompositionCoreAnimationTool` has an origin in the lower left, not top left like `UIKit`.
229 |
230 | ## When to Use
231 |
232 | FlipBook is a great way to capture view animations and interactions or to compose a video, gif, or Live Photo from a loose collection of images. It's great for targeting just a portion of the screen or window. And for creating not just videos, but also animated gifs and Live Photos.
233 |
234 | However, it is likely not the best choice for recording long user sessions or when performance is being pushed to the limits. For those situations [`ReplayKit`](https://developer.apple.com/documentation/replaykit) is likely a better solution. Also if system audio is important, FlipBook does not current capture any audio whatsoever while `ReplayKit` does.
235 |
236 | It is important to also be mindful of sensitive user information and data; don't record screens that might have information a user wouldn't want recorded.
237 |
238 | ## Known Issues
239 |
240 | - Memory pressure when creating GIFs. GIF creation with large images or large views at a high framerate will cause the device to quickly run out of memory.
241 | - Not all `CALayer` animations and effects are captured (see "Advance Usage").
242 | - `UIView.transition`s don't capture animation.
243 | - On macOS make sure `NSView` has `wantsLayer` is set to `true`
244 | - With SwiftUI, the use of `View` and `Image` might be confusing. FlipBook uses `View` and `Image` to typealias between AppKit and UIKit.
245 |
246 | ## Examples of Generated Assets
247 |
248 | You can find a gallery of generated assets [here](https://bradgayman.com/FlipBook/). You can also [read](https://bradgayman.com/blog/recordingAView/) more about the motivation for FlipBook.
249 |
250 | ## Contact
251 |
252 | Brad Gayman
253 |
254 | [@bgayman](https://twitter.com/bgayman)
255 |
256 | ## Attributions
257 |
258 | Inspiration taken from:
259 |
260 | - [Glimpse](https://github.com/wess/Glimpse)
261 | - [Live Photo Demo](https://github.com/genadyo/LivePhotoDemo)
262 | - [AVFoundation Tutorial: Adding Overlays and Animations to Videos](https://www.raywenderlich.com/6236502-avfoundation-tutorial-adding-overlays-and-animations-to-videos)
263 |
264 | ## License
265 |
266 | FlipBook is released under an MIT license.
267 |
268 | Copyright (c) 2020 Brad Gayman
269 |
270 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
271 |
272 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
273 |
274 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
275 |
--------------------------------------------------------------------------------
/Tests/FlipBookTests/FlipBookLivePhotoWriterUnitTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBookLivePhotoWriterUnitTests.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/26/20.
6 | //
7 |
8 | import XCTest
9 | import Photos
10 | @testable import FlipBook
11 | #if os(OSX)
12 | import AppKit
13 | #else
14 | import UIKit
15 | #endif
16 |
17 | // MARK: - FlipBookLivePhotoWriterUnitTests -
18 |
19 | final class FlipBookLivePhotoWriterUnitTests: XCTestCase {
20 |
21 | func testInit() {
22 |
23 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
24 |
25 | XCTAssertEqual(FlipBookLivePhotoWriter.queue.label, "com.FlipBook.live.photo.writer.queue")
26 | XCTAssertEqual(flipBookLivePhotoWriter.cacheDirectory != nil, true)
27 | XCTAssertEqual(flipBookLivePhotoWriter.audioReader == nil, true)
28 | XCTAssertEqual(flipBookLivePhotoWriter.videoReader == nil, true)
29 | XCTAssertEqual(flipBookLivePhotoWriter.assetWriter == nil, true)
30 | }
31 |
32 | func testMakeCacheDirectoryURL() {
33 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
34 | let url = flipBookLivePhotoWriter.makeCacheDirectoryURL()
35 |
36 | XCTAssertEqual(url != nil, true)
37 | XCTAssertEqual(url?.absoluteString.contains("Caches"), true)
38 | XCTAssertEqual(url?.absoluteString.contains("FlipBook-LivePhoto"), true)
39 | XCTAssertEqual(FileManager.default.fileExists(atPath: url?.path ?? ""), true)
40 | }
41 |
42 | func testClearCache() {
43 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
44 | let url = flipBookLivePhotoWriter.makeCacheDirectoryURL()
45 | XCTAssertEqual(FileManager.default.fileExists(atPath: url?.path ?? ""), true)
46 | flipBookLivePhotoWriter.clearCache()
47 | XCTAssertEqual(FileManager.default.fileExists(atPath: url?.path ?? ""), false)
48 | }
49 |
50 | func testMakeKeyPhoto() {
51 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
52 | var assetURL: URL?
53 | let expectation = self.expectation(description: "createAsset")
54 |
55 | makeVideo { (url) in
56 | guard let url = url else {
57 | XCTFail("Could not make movie")
58 | return
59 | }
60 | assetURL = url
61 | expectation.fulfill()
62 | }
63 |
64 | waitForExpectations(timeout: 30) { (error) in
65 | if let error = error {
66 | XCTFail(error.localizedDescription)
67 | }
68 | }
69 |
70 | guard let url = assetURL else {
71 | XCTFail("Could not make movie")
72 | return
73 | }
74 | do {
75 | guard let imageURL = try flipBookLivePhotoWriter.makeKeyPhoto(from: url) else {
76 | XCTFail("Could not make url")
77 | return
78 | }
79 | let imageData = try Data(contentsOf: imageURL)
80 | guard let source = CGImageSourceCreateWithData(imageData as CFData, nil) else {
81 | XCTFail("Could not make source")
82 | return
83 | }
84 | XCTAssertEqual(CGImageSourceGetCount(source), 1)
85 | XCTAssertEqual(CGImageSourceCreateImageAtIndex(source, 0, nil) != nil, true)
86 | } catch {
87 | XCTFail(error.localizedDescription)
88 | }
89 | }
90 |
91 | func testMakeMetadataItemForStillImageTime() {
92 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
93 | let item = flipBookLivePhotoWriter.makeMetadataItemForStillImageTime()
94 | XCTAssertEqual(item.key != nil, true)
95 | XCTAssertEqual(item.key as? NSString, "com.apple.quicktime.still-image-time" as NSString)
96 | XCTAssertEqual(item.keySpace, AVMetadataKeySpace("mdta"))
97 | XCTAssertEqual(item.value as? NSNumber, 0 as NSNumber)
98 | XCTAssertEqual(item.dataType, "com.apple.metadata.datatype.int8")
99 | }
100 |
101 | func testMakeMetadataAdaptorForStillImageTime() {
102 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
103 | let adaptor = flipBookLivePhotoWriter.makeMetadataAdaptorForStillImageTime()
104 | let input = adaptor.assetWriterInput
105 |
106 | XCTAssertEqual(input.mediaType, .metadata)
107 | XCTAssertEqual(input.sourceFormatHint != nil, true)
108 | }
109 |
110 | func testMakeMetadataForAssetID() {
111 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
112 | let idString = UUID().uuidString
113 | let item = flipBookLivePhotoWriter.makeMetadata(for: idString)
114 |
115 | XCTAssertEqual(item.key as? NSString, "com.apple.quicktime.content.identifier" as NSString)
116 | XCTAssertEqual(item.keySpace, AVMetadataKeySpace("mdta"))
117 | XCTAssertEqual(item.value as? NSString, idString as NSString)
118 | XCTAssertEqual(item.dataType, "com.apple.metadata.datatype.UTF-8")
119 | }
120 |
121 | func testAddAssetIDToImage() {
122 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
123 | let flipBookAssetWriter = FlipBookAssetWriter()
124 | let idString = UUID().uuidString
125 |
126 | guard let startURL = flipBookAssetWriter.makeFileOutputURL(fileName: "startURL.jpg"),
127 | let destURL = flipBookAssetWriter.makeFileOutputURL(fileName: "destURL.jpg") else {
128 | XCTFail("Could not create URLs")
129 | return
130 | }
131 |
132 | let image: Image
133 | #if os(OSX)
134 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
135 | view.wantsLayer = true
136 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
137 | guard let img = view.fb_makeViewSnapshot() else {
138 | XCTFail("Could not make image")
139 | return
140 | }
141 | image = img
142 | #else
143 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
144 | view.backgroundColor = UIColor.systemGray
145 | guard let img = view.fb_makeViewSnapshot() else {
146 | XCTFail("Could not make image")
147 | return
148 | }
149 | image = img
150 | #endif
151 | guard let data = image.jpegRep else {
152 | XCTFail("Could not make Image data")
153 | return
154 | }
155 | do {
156 | try data.write(to: startURL)
157 | } catch {
158 | XCTFail(error.localizedDescription)
159 | }
160 |
161 | _ = flipBookLivePhotoWriter.add(idString, toImage: startURL, saveTo: destURL)
162 | guard let imageSource = CGImageSourceCreateWithURL(destURL as CFURL, nil),
163 | let imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable: Any] else {
164 | XCTFail("Could not get properties")
165 | return
166 | }
167 | let assetProps = imageProperties[kCGImagePropertyMakerAppleDictionary] as? [AnyHashable: Any]
168 | XCTAssertEqual(assetProps?["17"] != nil, true)
169 | XCTAssertEqual(assetProps?["17"] as? String, idString)
170 | }
171 |
172 | func testExtractResources() {
173 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
174 | var livePhotoResources: LivePhotoResources?
175 | let expectation = self.expectation(description: "createAsset")
176 |
177 | // Make video
178 | makeVideo { (url) in
179 | guard let url = url else {
180 | XCTFail("Could not make movie")
181 | return
182 | }
183 | flipBookLivePhotoWriter.makeLivePhoto(from: nil, videoURL: url, progress: { _ in }) { (result) in
184 | switch result {
185 | case .success(let lp, _):
186 | flipBookLivePhotoWriter.extractResources(from: lp) { (result) in
187 | switch result {
188 | case .success(let resources):
189 | livePhotoResources = resources
190 | XCTAssertEqual(Thread.isMainThread, true)
191 | expectation.fulfill()
192 | case .failure(let error):
193 | XCTFail(error.localizedDescription)
194 | }
195 | }
196 | case .failure(let error):
197 | XCTFail(error.localizedDescription)
198 | }
199 | }
200 | }
201 |
202 | waitForExpectations(timeout: 30) { (error) in
203 | if let error = error {
204 | XCTFail(error.localizedDescription)
205 | }
206 | }
207 |
208 | guard let resources = livePhotoResources else {
209 | XCTFail("livePhotoResources should not be nil")
210 | return
211 | }
212 | XCTAssertEqual(resources.videoURL.absoluteString.contains("Caches"), true)
213 | XCTAssertEqual(resources.imageURL.absoluteString.contains("Caches"), true)
214 |
215 | let asset = AVURLAsset(url: resources.videoURL)
216 | XCTAssertEqual(asset.tracks(withMediaType: .video).first != nil, true)
217 | do {
218 | let data = try Data(contentsOf: resources.imageURL)
219 | let image = Image(data: data)
220 | XCTAssertEqual(image != nil, true)
221 | } catch {
222 | XCTFail("Could not get data")
223 | }
224 | }
225 |
226 | func testMakeLivePhoto() {
227 | let flipBookLivePhotoWriter = FlipBookLivePhotoWriter()
228 | var livePhotoResources: LivePhotoResources?
229 | var livePhoto: PHLivePhoto?
230 | var prog: CGFloat = 0.0
231 | let expectation = self.expectation(description: "makeLivePhoto")
232 | makeVideo { (url) in
233 | guard let url = url else {
234 | XCTFail("Could not make movie")
235 | return
236 | }
237 | flipBookLivePhotoWriter.makeLivePhoto(from: nil, videoURL: url, progress: { p in
238 | prog = p
239 | XCTAssertEqual(Thread.isMainThread, true)
240 | }) { (result) in
241 | XCTAssertEqual(Thread.isMainThread, true)
242 | switch result {
243 | case let .success(lp, resources):
244 | livePhoto = lp
245 | livePhotoResources = resources
246 | expectation.fulfill()
247 | case .failure(let error):
248 | XCTFail(error.localizedDescription)
249 | }
250 | }
251 | }
252 |
253 | waitForExpectations(timeout: 30) { (error) in
254 | if let error = error {
255 | XCTFail(error.localizedDescription)
256 | }
257 | }
258 |
259 | XCTAssertEqual(prog != 0.0, true)
260 | guard let liveP = livePhoto, let resources = livePhotoResources else {
261 | XCTFail("values nil")
262 | return
263 | }
264 | XCTAssertEqual(liveP.size.width, 100 * View().scale)
265 | XCTAssertEqual(liveP.size.height, 100 * View().scale)
266 | XCTAssertEqual(resources.videoURL.absoluteString.contains("Caches"), true)
267 | XCTAssertEqual(resources.imageURL.absoluteString.contains("Caches"), true)
268 | let asset = AVURLAsset(url: resources.videoURL)
269 | XCTAssertEqual(asset.tracks(withMediaType: .video).first != nil, true)
270 | do {
271 | let data = try Data(contentsOf: resources.imageURL)
272 | let image = Image(data: data)
273 | XCTAssertEqual(image != nil, true)
274 | } catch {
275 | XCTFail("Could not get data")
276 | }
277 | }
278 |
279 | static var allTests = [
280 | ("testInit", testInit),
281 | ("testMakeCacheDirectoryURL", testMakeCacheDirectoryURL),
282 | ("testClearCache", testClearCache),
283 | ("testMakeKeyPhoto", testMakeKeyPhoto),
284 | ("testMakeMetadataItemForStillImageTime", testMakeMetadataItemForStillImageTime),
285 | ("testMakeMetadataAdaptorForStillImageTime", testMakeMetadataAdaptorForStillImageTime),
286 | ("testMakeMetadataForAssetID", testMakeMetadataForAssetID),
287 | ("testExtractResources", testExtractResources),
288 | ("testMakeLivePhoto", testMakeLivePhoto)
289 | ]
290 | }
291 |
292 | // MARK: - FlipBookLivePhotoWriterUnitTests + MakeVideo -
293 |
294 | extension FlipBookLivePhotoWriterUnitTests {
295 |
296 | func makeVideo(completion: @escaping (URL?) -> Void) {
297 | let flipBookAssetWriter = FlipBookAssetWriter()
298 | flipBookAssetWriter.size = CGSize(width: 100.0 * View().scale, height: 100.0 * View().scale)
299 |
300 | // Make Images
301 | let image: Image
302 | let image1: Image
303 | let image2: Image
304 | #if os(OSX)
305 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
306 | view.wantsLayer = true
307 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
308 | guard let img = view.fb_makeViewSnapshot() else {
309 | completion(nil)
310 | return
311 | }
312 | image = img
313 | view.layer?.backgroundColor = NSColor.systemBlue.cgColor
314 | guard let img1 = view.fb_makeViewSnapshot() else {
315 | completion(nil)
316 | return
317 | }
318 | image1 = img1
319 | view.layer?.backgroundColor = NSColor.systemRed.cgColor
320 | guard let img2 = view.fb_makeViewSnapshot() else {
321 | completion(nil)
322 | return
323 | }
324 | image2 = img2
325 | #else
326 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
327 | view.backgroundColor = UIColor.systemGray
328 | guard let img = view.fb_makeViewSnapshot() else {
329 | completion(nil)
330 | return
331 | }
332 | image = img
333 | view.backgroundColor = UIColor.systemBlue
334 | guard let img1 = view.fb_makeViewSnapshot() else {
335 | completion(nil)
336 | return
337 | }
338 | image1 = img1
339 | view.backgroundColor = UIColor.systemRed
340 | guard let img2 = view.fb_makeViewSnapshot() else {
341 | completion(nil)
342 | return
343 | }
344 | image2 = img2
345 | #endif
346 |
347 | flipBookAssetWriter.createAsset(from: [image, image1, image2], progress: { (_) in }, completion: { result in
348 | switch result {
349 |
350 | case .success(let asset):
351 | switch asset {
352 | case .video(let url):
353 | completion(url)
354 | case .livePhoto, .gif:
355 | completion(nil)
356 | }
357 | case .failure:
358 | completion(nil)
359 | }
360 | })
361 | }
362 | }
363 |
--------------------------------------------------------------------------------
/Sources/FlipBook/FlipBook.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBook.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/24/20.
6 | //
7 |
8 | #if os(OSX)
9 | import AppKit
10 | #else
11 | import UIKit
12 | import ReplayKit
13 | #endif
14 |
15 | // MARK: - FlipBook -
16 |
17 | /// Class that records a view
18 | public final class FlipBook: NSObject {
19 |
20 | // MARK: - Types -
21 |
22 | /// Enum that represents the errors that `FlipBook` can throw
23 | public enum FlipBookError: String, Error {
24 |
25 | /// Recording is already in progress. Stop current recording before beginning another.
26 | case recordingInProgress
27 |
28 | /// Recording is not availible using `ReplayKit` with `assetType == .gif`
29 | case recordingNotAvailible
30 | }
31 |
32 | // MARK: - Public Properties
33 |
34 | /// The number of frames per second targetted
35 | /// **Default** 60 frames per second on macOS and to the `maxFramesPerSecond` of the main screen of the device on iOS
36 | /// - Will be ignored if `shouldUseReplayKit` is set to true
37 | public var preferredFramesPerSecond: Int = Screen.maxFramesPerSecond
38 |
39 | /// The amount images in animated gifs should be scaled by. Fullsize gif images can be memory intensive. **Default** `0.5`
40 | public var gifImageScale: Float = 0.5
41 |
42 | /// The asset type to be created
43 | /// **Default** `.video`
44 | public var assetType: FlipBookAssetWriter.AssetType = .video
45 |
46 | /// Boolean that when set to `true` will cause the entire screen to be captured using `ReplayKit` on iOS 11.0+ only and will otherwise be ignored
47 | public var shouldUseReplayKit: Bool = false
48 |
49 | #if os(iOS)
50 |
51 | /// The replay kit screen recorder used when `shouldUseReplayKit` is set to `true`
52 | public lazy var screenRecorder: RPScreenRecorder = {
53 | let recorder = RPScreenRecorder.shared()
54 | recorder.isCameraEnabled = false
55 | recorder.isMicrophoneEnabled = false
56 | return recorder
57 | }()
58 | #endif
59 |
60 | // MARK: - Internal Properties -
61 |
62 | /// Asset writer used to convert screenshots into video
63 | internal let writer = FlipBookAssetWriter()
64 |
65 | /// Closure to be called when the asset writing has progressed
66 | internal var onProgress: ((CGFloat) -> Void)?
67 |
68 | /// Closure to be called when compositing video with `CAAnimation`s
69 | internal var compositionAnimation: ((CALayer) -> Void)?
70 |
71 | /// Closure to be called when the video asset stops writing
72 | internal var onCompletion: ((Result) -> Void)?
73 |
74 | /// View that is currently being recorded
75 | internal var sourceView: View?
76 |
77 | #if os(OSX)
78 |
79 | /// Queue for capturing snapshots for view
80 | internal var queue: DispatchQueue?
81 |
82 | /// Source for capturing snapshots for view
83 | internal var source: DispatchSourceTimer?
84 | #else
85 |
86 | /// Display link that drives view snapshotting
87 | internal var displayLink: CADisplayLink?
88 | #endif
89 |
90 | // MARK: - Public Methods -
91 |
92 | /// Starts recording a view
93 | /// - Parameters:
94 | /// - view: view to be recorded. This value is ignored if `shouldUseReplayKit` is set to `true`
95 | /// - compositionAnimation: optional closure for adding `AVVideoCompositionCoreAnimationTool` composition animations. Add `CALayer`s as sublayers to the passed in `CALayer`. Then trigger animations with a `beginTime` of `AVCoreAnimationBeginTimeAtZero`. *Reminder that `CALayer` origin for `AVVideoCompositionCoreAnimationTool` is lower left for `UIKit` setting `isGeometryFlipped = true is suggested* **Default is `nil`**
96 | /// - progress: optional closure that is called with a `CGFloat` representing the progress of video generation. `CGFloat` is in the range `(0.0 ... 1.0)`. `progress` is called from the main thread. **Default is `nil`**
97 | /// - completion: closure that is called when the video has been created with the `URL` for the created video. `completion` will be called from the main thread
98 | public func startRecording(_ view: View,
99 | compositionAnimation: ((CALayer) -> Void)? = nil,
100 | progress: ((CGFloat) -> Void)? = nil,
101 | completion: @escaping (Result) -> Void) {
102 | if shouldUseReplayKit {
103 | #if os(macOS)
104 | shouldUseReplayKit = false
105 | startRecording(view, compositionAnimation: compositionAnimation, progress: progress, completion: completion)
106 | #else
107 | guard assetType != .gif else {
108 | completion(.failure(FlipBookError.recordingNotAvailible))
109 | return
110 | }
111 | onProgress = progress
112 | onCompletion = completion
113 | writer.gifImageScale = gifImageScale
114 | writer.preferredFramesPerSecond = preferredFramesPerSecond
115 | self.compositionAnimation = compositionAnimation
116 | if #available(iOS 11.0, *) {
117 | screenRecorder.startCapture(handler: { [weak self] (buffer, type, error) in
118 | if let error = error {
119 | print(error)
120 | }
121 | self?.writer.append(buffer, type: type)
122 | }, completionHandler: { error in
123 | guard let error = error else {
124 | return
125 | }
126 | print(error)
127 | })
128 | } else {
129 | shouldUseReplayKit = false
130 | startRecording(view, compositionAnimation: compositionAnimation, progress: progress, completion: completion)
131 | }
132 | #endif
133 | } else {
134 | #if os(OSX)
135 | guard queue == nil else {
136 | completion(.failure(FlipBookError.recordingInProgress))
137 | return
138 | }
139 | #else
140 | guard displayLink == nil else {
141 | completion(.failure(FlipBookError.recordingInProgress))
142 | return
143 | }
144 | #endif
145 | sourceView = view
146 | onProgress = progress
147 | onCompletion = completion
148 | self.compositionAnimation = compositionAnimation
149 | writer.size = CGSize(width: view.bounds.size.width * view.scale, height: view.bounds.size.height * view.scale)
150 | writer.startDate = Date()
151 | writer.gifImageScale = gifImageScale
152 | writer.preferredFramesPerSecond = preferredFramesPerSecond
153 |
154 | #if os(OSX)
155 | queue = DispatchQueue.global()
156 | source = DispatchSource.makeTimerSource(queue: queue)
157 | source?.schedule(deadline: .now(), repeating: 1.0 / Double(self.preferredFramesPerSecond))
158 | source?.setEventHandler { [weak self] in
159 | guard let self = self else {
160 | return
161 | }
162 | DispatchQueue.main.async {
163 | self.tick()
164 | }
165 | }
166 | source?.resume()
167 | #else
168 | displayLink = CADisplayLink(target: self, selector: #selector(tick(_:)))
169 | if #available(iOS 10.0, *) {
170 | displayLink?.preferredFramesPerSecond = preferredFramesPerSecond
171 | }
172 | displayLink?.add(to: RunLoop.main, forMode: .common)
173 | #endif
174 | }
175 | }
176 |
177 | /// Stops recording of view and begins writing frames to video
178 | public func stop() {
179 | if shouldUseReplayKit {
180 | #if os(macOS)
181 | shouldUseReplayKit = false
182 | stop()
183 | #else
184 | if #available(iOS 11.0, *) {
185 | screenRecorder.stopCapture { [weak self] (error) in
186 | guard let self = self else {
187 | return
188 | }
189 | if let error = error {
190 | self.onProgress = nil
191 | self.compositionAnimation = nil
192 | self.onCompletion?(.failure(error))
193 | self.onCompletion = nil
194 | } else {
195 | let composition: ((CALayer) -> Void)?
196 | if self.compositionAnimation != nil {
197 | composition = { [weak self] layer in self?.compositionAnimation?(layer) }
198 | } else {
199 | composition = nil
200 | }
201 | self.writer.endLiveCapture(assetType: self.assetType,
202 | compositionAnimation: composition,
203 | progress: { [weak self] prog in DispatchQueue.main.async { self?.onProgress?(prog) }
204 | }, completion: { [weak self] result in
205 | guard let self = self else {
206 | return
207 | }
208 | DispatchQueue.main.async {
209 | self.writer.startDate = nil
210 | self.writer.endDate = nil
211 | self.onProgress = nil
212 | self.compositionAnimation = nil
213 | self.onCompletion?(result)
214 | self.onCompletion = nil
215 | }
216 | })
217 | }
218 | }
219 | } else {
220 | shouldUseReplayKit = false
221 | stop()
222 | }
223 | #endif
224 | } else {
225 | #if os(OSX)
226 | source?.cancel()
227 | queue = nil
228 | #else
229 | guard let displayLink = self.displayLink else {
230 | return
231 | }
232 | displayLink.invalidate()
233 | self.displayLink = nil
234 | #endif
235 |
236 | writer.endDate = Date()
237 | sourceView = nil
238 |
239 | writer.createVideoFromCapturedFrames(assetType: assetType,
240 | compositionAnimation: compositionAnimation,
241 | progress: { [weak self] (prog) in
242 | guard let self = self else {
243 | return
244 | }
245 | DispatchQueue.main.async {
246 | self.onProgress?(prog)
247 | }
248 | }, completion: { [weak self] result in
249 | guard let self = self else {
250 | return
251 | }
252 | DispatchQueue.main.async {
253 | self.writer.startDate = nil
254 | self.writer.endDate = nil
255 | self.onProgress = nil
256 | self.compositionAnimation = nil
257 | self.onCompletion?(result)
258 | self.onCompletion = nil
259 | }
260 | })
261 | }
262 | }
263 |
264 | /// Makes an asset of type `assetType` from a an array of images with a framerate equal to `preferredFramesPerSecond`. The asset will have a size equal to the first image's size.
265 | /// - Parameters:
266 | /// - images: The array of images
267 | /// - compositionAnimation: optional closure for adding `AVVideoCompositionCoreAnimationTool` composition animations. Add `CALayer`s as sublayers to the passed in `CALayer`. Then trigger animations with a `beginTime` of `AVCoreAnimationBeginTimeAtZero`. *Reminder that `CALayer` origin for `AVVideoCompositionCoreAnimationTool` is lower left for `UIKit` setting `isGeometryFlipped = true is suggested* **Default is `nil`**
268 | /// - progress: Closure called when progress is made. Called on the main thread. **Default is `nil`**
269 | /// - completion: Closure called when the asset has finished being created. Called on the main thread.
270 | public func makeAsset(from images: [Image],
271 | compositionAnimation: ((CALayer) -> Void)? = nil,
272 | progress: ((CGFloat) -> Void)? = nil,
273 | completion: @escaping (Result) -> Void) {
274 | writer.frames = images
275 | writer.preferredFramesPerSecond = preferredFramesPerSecond
276 | let firstCGImage = images.first?.cgI
277 | writer.size = CGSize(width: firstCGImage?.width ?? 0, height: firstCGImage?.height ?? 0)
278 | writer.createVideoFromCapturedFrames(assetType: assetType,
279 | compositionAnimation: compositionAnimation,
280 | progress: { (prog) in
281 | DispatchQueue.main.async { progress?(prog) }
282 | }, completion: { result in
283 | DispatchQueue.main.async { completion(result) }
284 | })
285 | }
286 |
287 | /// Saves a `LivePhotoResources` to photo library as a Live Photo. **You must request permission to modify photo library before attempting to save as well as add "Privacy - Photo Library Usage Description" key to your app's info.plist**
288 | /// - Parameters:
289 | /// - resources: The resources of the Live Photo to be saved
290 | /// - completion: Closure called after the resources have been saved. Called on the main thread.
291 | public func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Result) -> Void) {
292 | writer.livePhotoWriter.saveToLibrary(resources, completion: completion)
293 | }
294 |
295 | /// Determines the frame rate of a gif by looking at the `delay` of the first image
296 | /// - Parameter gifURL: The file `URL` where the gif is located.
297 | /// - Returns: The frame rate as an `Int` or `nil` if data at url was invalid
298 | public func makeFrameRate(_ gifURL: URL) -> Int? {
299 | return writer.gifWriter?.makeFrameRate(gifURL)
300 | }
301 |
302 | /// Creates an array of `Image`s that represent the frames of a gif
303 | /// - Parameter gifURL: The file `URL` where the gif is located.
304 | /// - Returns: The frames rate as an `Int` or `nil` if data at url was invalid
305 | public func makeImages(_ gifURL: URL) -> [Image]? {
306 | return writer.gifWriter?.makeImages(gifURL)
307 | }
308 |
309 | // MARK: - Internal Methods -
310 |
311 | #if os(OSX)
312 | internal func tick() {
313 | guard let viewImage = sourceView?.fb_makeViewSnapshot() else {
314 | return
315 | }
316 | writer.writeFrame(viewImage)
317 | }
318 |
319 | #else
320 |
321 | @objc internal func tick(_ displayLink: CADisplayLink) {
322 | guard let viewImage = sourceView?.fb_makeViewSnapshot() else {
323 | return
324 | }
325 | writer.writeFrame(viewImage)
326 | }
327 | #endif
328 | }
329 |
--------------------------------------------------------------------------------
/Tests/FlipBookTests/FlipBookAssetWriterUnitTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBookAssetWriterUnitTests.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/25/20.
6 | //
7 |
8 | import XCTest
9 | import Photos
10 | @testable import FlipBook
11 | #if os(OSX)
12 | import AppKit
13 | #else
14 | import UIKit
15 | #endif
16 |
17 | final class FlipBookAssetWriterUnitTests: XCTestCase {
18 |
19 | func testInit() {
20 |
21 | let flipBookAssetWriter = FlipBookAssetWriter()
22 |
23 | XCTAssertEqual(flipBookAssetWriter.preferredFramesPerSecond, 60)
24 | XCTAssertEqual(flipBookAssetWriter.fileOutputURL != nil, true)
25 | XCTAssertEqual(flipBookAssetWriter.startDate == nil, true)
26 | XCTAssertEqual(flipBookAssetWriter.endDate == nil, true)
27 | XCTAssertEqual(flipBookAssetWriter.gifImageScale, 0.5)
28 | XCTAssertEqual(flipBookAssetWriter.frames.isEmpty, true)
29 | XCTAssertEqual(flipBookAssetWriter.queue.label, "com.FlipBook.asset.writer.queue")
30 | XCTAssertEqual(flipBookAssetWriter.videoInput == nil, true)
31 | XCTAssertEqual(flipBookAssetWriter.adapter == nil, true)
32 | XCTAssertEqual(flipBookAssetWriter.gifWriter != nil, true)
33 | }
34 |
35 | func testWriteToFrame() {
36 | let flipBookAssetWriter = FlipBookAssetWriter()
37 | let image: Image
38 | #if os(OSX)
39 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
40 | view.wantsLayer = true
41 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
42 | guard let img = view.fb_makeViewSnapshot() else {
43 | XCTFail("Could not make image")
44 | return
45 | }
46 | image = img
47 | #else
48 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
49 | view.backgroundColor = UIColor.systemGray
50 | guard let img = view.fb_makeViewSnapshot() else {
51 | XCTFail("Could not make image")
52 | return
53 | }
54 | image = img
55 | #endif
56 | flipBookAssetWriter.writeFrame(image)
57 | XCTAssertEqual(flipBookAssetWriter.frames.count, 1)
58 | }
59 |
60 | func testCreateAssetFromImages() {
61 | let flipBookAssetWriter = FlipBookAssetWriter()
62 | flipBookAssetWriter.size = CGSize(width: 100.0, height: 100.0)
63 |
64 | // Make Images
65 | let image: Image
66 | let image1: Image
67 | let image2: Image
68 | #if os(OSX)
69 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
70 | view.wantsLayer = true
71 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
72 | guard let img = view.fb_makeViewSnapshot() else {
73 | XCTFail("Could not make image")
74 | return
75 | }
76 | image = img
77 | view.layer?.backgroundColor = NSColor.systemBlue.cgColor
78 | guard let img1 = view.fb_makeViewSnapshot() else {
79 | XCTFail("Could not make image")
80 | return
81 | }
82 | image1 = img1
83 | view.layer?.backgroundColor = NSColor.systemRed.cgColor
84 | guard let img2 = view.fb_makeViewSnapshot() else {
85 | XCTFail("Could not make image")
86 | return
87 | }
88 | image2 = img2
89 | #else
90 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
91 | view.backgroundColor = UIColor.systemGray
92 | guard let img = view.fb_makeViewSnapshot() else {
93 | XCTFail("Could not make image")
94 | return
95 | }
96 | image = img
97 | view.backgroundColor = UIColor.systemBlue
98 | guard let img1 = view.fb_makeViewSnapshot() else {
99 | XCTFail("Could not make image")
100 | return
101 | }
102 | image1 = img1
103 | view.backgroundColor = UIColor.systemRed
104 | guard let img2 = view.fb_makeViewSnapshot() else {
105 | XCTFail("Could not make image")
106 | return
107 | }
108 | image2 = img2
109 | #endif
110 |
111 | // Test video
112 | var prog: CGFloat = 0.0
113 | var assetURL: URL? = nil
114 | let expectation = self.expectation(description: "createAsset")
115 |
116 | flipBookAssetWriter.createAsset(from: [image, image1, image2], progress: { (p) in
117 | prog = p
118 | }, completion: { result in
119 | switch result {
120 |
121 | case .success(let asset):
122 | switch asset {
123 | case .video(let url):
124 | assetURL = url
125 | expectation.fulfill()
126 | case .livePhoto, .gif:
127 | XCTFail("Wrong asset type")
128 | }
129 | case .failure(let error):
130 | XCTFail(error.localizedDescription)
131 | }
132 | })
133 |
134 | waitForExpectations(timeout: 30) { (error) in
135 | if let error = error {
136 | XCTFail(error.localizedDescription)
137 | }
138 | }
139 | XCTAssertEqual(prog != 0.0, true)
140 | XCTAssertEqual(assetURL != nil, true)
141 |
142 | // Test GIF
143 | var prog1: CGFloat = 0.0
144 | var assetURLGIF: URL? = nil
145 | let expectationGIF = self.expectation(description: "createAssetGif")
146 |
147 | flipBookAssetWriter.createAsset(from: [image, image1, image2], assetType: .gif, progress: { (p) in
148 | prog1 = p
149 | }, completion: { result in
150 | switch result {
151 |
152 | case .success(let asset):
153 | switch asset {
154 | case .gif(let url):
155 | assetURLGIF = url
156 | expectationGIF.fulfill()
157 | case .livePhoto, .video:
158 | XCTFail("Wrong asset type")
159 | }
160 | case .failure(let error):
161 | XCTFail(error.localizedDescription)
162 | }
163 | })
164 |
165 | waitForExpectations(timeout: 30) { (error) in
166 | if let error = error {
167 | XCTFail(error.localizedDescription)
168 | }
169 | }
170 | XCTAssertEqual(prog1 != 0.0, true)
171 | XCTAssertEqual(assetURLGIF != nil, true)
172 |
173 | // Test Live Photo
174 | var prog2: CGFloat = 0.0
175 | var livePhoto: PHLivePhoto? = nil
176 | let expectationLivePhoto = self.expectation(description: "createAssetLivePhoto")
177 |
178 | flipBookAssetWriter.createAsset(from: [image, image1, image2], assetType: .livePhoto(nil), progress: { (p) in
179 | prog2 = p
180 | }, completion: { result in
181 | switch result {
182 |
183 | case .success(let asset):
184 | switch asset {
185 | case let .livePhoto(lp, _):
186 | livePhoto = lp
187 | expectationLivePhoto.fulfill()
188 | case .gif, .video:
189 | XCTFail("Wrong asset type")
190 | }
191 | case .failure(let error):
192 | XCTFail(error.localizedDescription)
193 | }
194 | })
195 |
196 | waitForExpectations(timeout: 30) { (error) in
197 | if let error = error {
198 | XCTFail(error.localizedDescription)
199 | }
200 | }
201 | XCTAssertEqual(prog2 != 0.0, true)
202 | XCTAssertEqual(livePhoto != nil, true)
203 | }
204 |
205 | func testMakeFileOutputURL() {
206 | let flipBookAssetWriter = FlipBookAssetWriter()
207 | let urlString = flipBookAssetWriter.makeFileOutputURL()?.absoluteString
208 | XCTAssertEqual(urlString?.contains("FlipBook.mov"), true)
209 | XCTAssertEqual(urlString?.contains("Caches"), true)
210 |
211 | let urlString1 = flipBookAssetWriter.makeFileOutputURL(fileName: "myGreat.gif")?.absoluteString
212 | XCTAssertEqual(urlString1?.contains("myGreat.gif"), true)
213 | XCTAssertEqual(urlString1?.contains("Caches"), true)
214 | }
215 |
216 | func testMakeWriter() {
217 | let flipBookAssetWriter = FlipBookAssetWriter()
218 | flipBookAssetWriter.size = CGSize(width: 100.0, height: 100.0)
219 | do {
220 | let writer = try flipBookAssetWriter.makeWriter()
221 | XCTAssertEqual(flipBookAssetWriter.videoInput != nil, true)
222 | XCTAssertEqual(flipBookAssetWriter.adapter != nil, true)
223 | XCTAssertEqual(writer.inputs.contains(flipBookAssetWriter.videoInput!), true)
224 | } catch {
225 | XCTFail(error.localizedDescription)
226 | }
227 | }
228 |
229 | func testMakeFrameRate() {
230 | let flipBookAssetWriter = FlipBookAssetWriter()
231 | flipBookAssetWriter.frames = Array(repeating: nil, count: 180)
232 | flipBookAssetWriter.startDate = Date(timeIntervalSinceNow: -3)
233 | flipBookAssetWriter.endDate = Date()
234 |
235 | let frameRate = flipBookAssetWriter.makeFrameRate()
236 | XCTAssertEqual(frameRate, 60)
237 |
238 | let flipBookAssetWriter1 = FlipBookAssetWriter()
239 | flipBookAssetWriter1.frames = Array(repeating: nil, count: 180)
240 | flipBookAssetWriter1.preferredFramesPerSecond = 20
241 |
242 | let frameRate1 = flipBookAssetWriter1.makeFrameRate()
243 | XCTAssertEqual(frameRate1, 20)
244 | }
245 |
246 | func testMakePixelBuffer() {
247 | let image: Image
248 | #if os(OSX)
249 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
250 | view.wantsLayer = true
251 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
252 | guard let img = view.fb_makeViewSnapshot() else {
253 | XCTFail("Could not make image")
254 | return
255 | }
256 | image = img
257 | #else
258 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
259 | view.backgroundColor = UIColor.systemGray
260 | guard let img = view.fb_makeViewSnapshot() else {
261 | XCTFail("Could not make image")
262 | return
263 | }
264 | image = img
265 | #endif
266 |
267 | let pixelBuffer = image.cgI?.makePixelBuffer()
268 | XCTAssertEqual(pixelBuffer != nil, true)
269 | }
270 |
271 | func testMakeFrames() {
272 | let assetWriter = FlipBookAssetWriter()
273 | let expectation = self.expectation(description: "makeVideo")
274 | var progress: CGFloat = 0.0
275 | var frames = [CGImage]()
276 | makeVideo { (url) in
277 | guard let url = url else {
278 | XCTFail("Could not make movie")
279 | return
280 | }
281 | assetWriter.makeFrames(from: url, progress: { (prog) in
282 | progress = prog
283 | }, completion: { images in
284 | frames = images
285 | expectation.fulfill()
286 | })
287 | }
288 |
289 | waitForExpectations(timeout: 30) { (error) in
290 | if let error = error {
291 | XCTFail(error.localizedDescription)
292 | }
293 | }
294 |
295 | XCTAssertEqual(progress != 0.0, true)
296 | XCTAssertEqual(frames.isEmpty == false, true)
297 | XCTAssertEqual(frames.count, 3)
298 | }
299 |
300 | func testAssetAssociatedValueAccessors() {
301 | guard let url = URL(string: "http://apple.com") else {
302 | XCTFail("Invalid url")
303 | return
304 | }
305 |
306 | var asset: FlipBookAssetWriter.Asset = .video(url)
307 | XCTAssertEqual(asset.assetURL, url)
308 | XCTAssertEqual(asset.livePhoto, nil)
309 | XCTAssertEqual(asset.livePhotoResources, nil)
310 |
311 | asset = .gif(url)
312 | XCTAssertEqual(asset.assetURL, url)
313 | XCTAssertEqual(asset.livePhoto, nil)
314 | XCTAssertEqual(asset.livePhotoResources, nil)
315 |
316 | let livePhotoWriter = FlipBookLivePhotoWriter()
317 | let expectation = self.expectation(description: "makeVideo")
318 | makeVideo { (url) in
319 | guard let url = url else {
320 | XCTFail("Could not make movie")
321 | return
322 | }
323 | livePhotoWriter.makeLivePhoto(from: nil, videoURL: url, progress: nil) { (result) in
324 | switch result {
325 | case let .success(livePhoto, resources):
326 | asset = .livePhoto(livePhoto, resources)
327 | expectation.fulfill()
328 | case .failure(let error):
329 | XCTFail("Could not make Live Photo \(error)")
330 | }
331 | }
332 | }
333 |
334 | waitForExpectations(timeout: 30) { (error) in
335 | if let error = error {
336 | XCTFail(error.localizedDescription)
337 | }
338 | }
339 |
340 | if case let .livePhoto(livePhoto, resources) = asset {
341 | XCTAssertEqual(asset.assetURL, nil)
342 | XCTAssertEqual(asset.livePhoto, livePhoto)
343 | XCTAssertEqual(asset.livePhotoResources, resources)
344 | } else {
345 | XCTFail("Wrong asset type")
346 | }
347 | }
348 |
349 | static var allTests = [
350 | ("testInit", testInit),
351 | ("testWriteToFrame", testWriteToFrame),
352 | ("testCreateAssetFromImages", testCreateAssetFromImages),
353 | ("testMakeFileOutputURL", testMakeFileOutputURL),
354 | ("testMakeWriter", testMakeWriter),
355 | ("testMakeFrameRate", testMakeFrameRate),
356 | ("testMakePixelBuffer", testMakePixelBuffer),
357 | ("testMakeFrames", testMakeFrames),
358 | ("testAssetAssociatedValueAccessors", testAssetAssociatedValueAccessors)
359 | ]
360 | }
361 |
362 | // MARK: - FlipBookAssetWriterUnitTests + MakeVideo -
363 |
364 | extension FlipBookAssetWriterUnitTests {
365 |
366 | func makeVideo(completion: @escaping (URL?) -> Void) {
367 | let flipBookAssetWriter = FlipBookAssetWriter()
368 | flipBookAssetWriter.size = CGSize(width: 100.0 * View().scale, height: 100.0 * View().scale)
369 |
370 | // Make Images
371 | let image: Image
372 | let image1: Image
373 | let image2: Image
374 | #if os(OSX)
375 | let view: View = NSView(frame: NSRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
376 | view.wantsLayer = true
377 | view.layer?.backgroundColor = NSColor.systemGray.cgColor
378 | guard let img = view.fb_makeViewSnapshot() else {
379 | completion(nil)
380 | return
381 | }
382 | image = img
383 | view.layer?.backgroundColor = NSColor.systemBlue.cgColor
384 | guard let img1 = view.fb_makeViewSnapshot() else {
385 | completion(nil)
386 | return
387 | }
388 | image1 = img1
389 | view.layer?.backgroundColor = NSColor.systemRed.cgColor
390 | guard let img2 = view.fb_makeViewSnapshot() else {
391 | completion(nil)
392 | return
393 | }
394 | image2 = img2
395 | #else
396 | let view: View = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 100.0, height: 100.0)))
397 | view.backgroundColor = UIColor.systemGray
398 | guard let img = view.fb_makeViewSnapshot() else {
399 | completion(nil)
400 | return
401 | }
402 | image = img
403 | view.backgroundColor = UIColor.systemBlue
404 | guard let img1 = view.fb_makeViewSnapshot() else {
405 | completion(nil)
406 | return
407 | }
408 | image1 = img1
409 | view.backgroundColor = UIColor.systemRed
410 | guard let img2 = view.fb_makeViewSnapshot() else {
411 | completion(nil)
412 | return
413 | }
414 | image2 = img2
415 | #endif
416 |
417 | flipBookAssetWriter.createAsset(from: [image, image1, image2], progress: { (_) in }, completion: { result in
418 | switch result {
419 |
420 | case .success(let asset):
421 | switch asset {
422 | case .video(let url):
423 | completion(url)
424 | case .livePhoto, .gif:
425 | completion(nil)
426 | }
427 | case .failure:
428 | completion(nil)
429 | }
430 | })
431 | }
432 | }
433 |
--------------------------------------------------------------------------------
/Sources/FlipBook/FlipBookLivePhotoWriter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBookLivePhotoWriter.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/25/20.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 | #if !os(macOS)
11 | import MobileCoreServices
12 | #else
13 | import CoreServices
14 | #endif
15 | import Photos
16 |
17 | // MARK: - LivePhotoResources -
18 |
19 | /// Struct that represents the resources that comprise a Live Photo
20 | public struct LivePhotoResources: Equatable, Hashable {
21 |
22 | /// The url of the still image of a Live Photo
23 | let imageURL: URL
24 |
25 | /// The url of the video of a Live Photo
26 | let videoURL: URL
27 | }
28 |
29 | // MARK: - FlipBookLivePhotoWriter -
30 |
31 | /// Class that performs common tasks with Live Photos
32 | public final class FlipBookLivePhotoWriter: NSObject {
33 |
34 | // MARK: - Types -
35 |
36 | /// Errors that `FlipBookLivePhotoWriter` can throw
37 | public enum FlipBookLivePhotoWriterError: Error {
38 |
39 | /// Unable to write to cache directory
40 | case couldNotWriteToDirectory
41 |
42 | /// Could not find video track
43 | case couldNotAccessVideoTrack
44 |
45 | /// An unknown error occured
46 | case unknownError
47 | }
48 |
49 | // MARK: - Public Properties -
50 |
51 | // MARK: - Private Properties -
52 |
53 | /// Queue on which Live Photo writing takes place
54 | static internal let queue = DispatchQueue(label: "com.FlipBook.live.photo.writer.queue", attributes: .concurrent)
55 |
56 | /// `URL` to location in caches directory where files will be written to
57 | lazy internal var cacheDirectory: URL? = self.makeCacheDirectoryURL()
58 |
59 | /// Asset reader for audio track
60 | internal var audioReader: AVAssetReader?
61 |
62 | /// Asset reader for video track
63 | internal var videoReader: AVAssetReader?
64 |
65 | /// Asset writer for Live Photo
66 | internal var assetWriter: AVAssetWriter?
67 |
68 | // MARK: - Init / Deinit -
69 |
70 | deinit {
71 | clearCache()
72 | }
73 |
74 | // MARK: - Public Methods -
75 |
76 | /// Makes Live Photo from image url and video url
77 | /// - Parameters:
78 | /// - imageURL: The `URL` of the still image. `imageURL` is `nil` still image is generated from middle of video.
79 | /// - videoURL: The `URL`of the video fo the Live Photo
80 | /// - progress: Closure that is called when progress is made on creating Live Photo. Called from the main thread.
81 | /// - completion: Closure call when the Live Photo has finished being created. Called from the main thread.
82 | public func makeLivePhoto(from imageURL: URL?,
83 | videoURL: URL,
84 | progress: ((CGFloat) -> Void)?,
85 | completion: @escaping (Result<(PHLivePhoto, LivePhotoResources), Error>) -> Void) {
86 | Self.queue.async { [weak self] in
87 | guard let self = self else { return }
88 | self.make(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
89 | }
90 | }
91 |
92 | /// Extracts out the still image and video from a Live Photo
93 | /// - Parameters:
94 | /// - livePhoto: The Live Photo to be decomposed
95 | /// - completion: Closure called with the resources are seporated and saved. Called on the main thread.
96 | public func extractResources(_ livePhoto: PHLivePhoto,
97 | completion: @escaping (Result) -> Void) {
98 | Self.queue.async {
99 | self.extractResources(from: livePhoto, completion: completion)
100 | }
101 | }
102 |
103 | /// Saves a `LivePhotoResources` to photo library as a Live Photo. **You must request permission to modify photo library before attempting to save as well as add "Privacy - Photo Library Usage Description" key to your app's info.plist**
104 | /// - Parameters:
105 | /// - resources: The resources of the Live Photo to be saved
106 | /// - completion: Closure called after the resources have been saved. Called on the main thread
107 | public func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Result) -> Void) {
108 | PHPhotoLibrary.shared().performChanges({
109 | let creationRequest = PHAssetCreationRequest.forAsset()
110 | let options = PHAssetResourceCreationOptions()
111 | creationRequest.addResource(with: .pairedVideo, fileURL: resources.videoURL, options: options)
112 | creationRequest.addResource(with: .photo, fileURL: resources.imageURL, options: options)
113 | }, completionHandler: { success, error in
114 | DispatchQueue.main.async {
115 | if let error = error {
116 | completion(.failure(error))
117 | } else {
118 | completion(.success(success))
119 | }
120 | }
121 | })
122 | }
123 |
124 | // MARK: - Private Methods -
125 |
126 | /// Makes Live Photo from image url and video url
127 | /// - Parameters:
128 | /// - imageURL: The `URL` of the still image. `imageURL` is `nil` still image is generated from middle of video.
129 | /// - videoURL: The `URL`of the video fo the Live Photo
130 | /// - progress: Closure that is called when progress is made on creating Live Photo. Called from the main thread.
131 | /// - completion: Closure call when the Live Photo has finished being created. Called from the main thread.
132 | internal func make(from imageURL: URL?,
133 | videoURL: URL,
134 | progress: ((CGFloat) -> Void)?,
135 | completion: @escaping (Result<(PHLivePhoto, LivePhotoResources), Error>) -> Void) {
136 | guard let cacheDirectory = self.cacheDirectory else {
137 | DispatchQueue.main.async { completion(.failure(FlipBookLivePhotoWriterError.couldNotWriteToDirectory)) }
138 | return
139 | }
140 | let assetIdentifier = UUID().uuidString
141 | do {
142 | var kPhotoURL = imageURL
143 | if kPhotoURL == nil {
144 | kPhotoURL = try makeKeyPhoto(from: videoURL)
145 | }
146 | guard let keyPhotoURL = kPhotoURL, let pairImageURL = add(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
147 | DispatchQueue.main.async { completion(.failure(FlipBookLivePhotoWriterError.unknownError)) }
148 | return
149 | }
150 | add(assetIdentifier, to: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { (result) in
151 | switch result {
152 | case .success(let url):
153 | _ = PHLivePhoto.request(withResourceFileURLs: [pairImageURL, url], placeholderImage: nil, targetSize: .zero, contentMode: .aspectFit) { (livePhoto, info) in
154 | guard let livePhoto = livePhoto, (info[PHLivePhotoInfoIsDegradedKey] as? Bool ?? false) == false else {
155 | return
156 | }
157 | DispatchQueue.main.async { completion(.success((livePhoto, LivePhotoResources(imageURL: pairImageURL, videoURL: url)))) }
158 | }
159 | case .failure(let error):
160 | DispatchQueue.main.async { completion(.failure(error)) }
161 | }
162 | }
163 | } catch {
164 | DispatchQueue.main.async { completion(.failure(error)) }
165 | }
166 | }
167 |
168 | /// Adds asset id to video and saves to destination
169 | /// - Parameters:
170 | /// - assetIdentifier: The asset identifier to be added
171 | /// - videoURL: The `URL` of the video
172 | /// - destination: Where the asset with the added identifier should be written
173 | /// - progress: Closure that calls back with progress of writing. Called from background thread.
174 | /// - completion: Closure called when video with asset identifier has been written. Called from background thread.
175 | internal func add(_ assetIdentifier: String,
176 | to videoURL: URL,
177 | saveTo destination: URL,
178 | progress: ((CGFloat) -> Void)?,
179 | completion: @escaping (Result) -> Void) {
180 |
181 | var audioWriterInput: AVAssetWriterInput?
182 | var audioReaderOutput: AVAssetReaderOutput?
183 | let videoAsset = AVURLAsset(url: videoURL)
184 | let frameCount = videoAsset.frameCount(exact: false)
185 |
186 | guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
187 | completion(.failure(FlipBookLivePhotoWriterError.couldNotAccessVideoTrack))
188 | return
189 | }
190 | do {
191 |
192 | // Create the Asset Writer
193 | assetWriter = try AVAssetWriter(outputURL: destination, fileType: .mov)
194 |
195 | // Create Video Reader Output
196 | videoReader = try AVAssetReader(asset: videoAsset)
197 | let videoReaderSettings = [
198 | kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA as NSNumber
199 | ]
200 | let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
201 | videoReader?.add(videoReaderOutput)
202 |
203 | // Create Video Writer Input
204 | #if !os(macOS)
205 | let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
206 | AVVideoCodecKey: AVVideoCodecH264,
207 | AVVideoWidthKey: videoTrack.naturalSize.width,
208 | AVVideoHeightKey: videoTrack.naturalSize.height
209 | ])
210 | #else
211 | let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
212 | AVVideoCodecKey: AVVideoCodecType.h264,
213 | AVVideoWidthKey: videoTrack.naturalSize.width,
214 | AVVideoHeightKey: videoTrack.naturalSize.height
215 | ])
216 | #endif
217 | videoWriterInput.transform = videoTrack.preferredTransform
218 | videoWriterInput.expectsMediaDataInRealTime = true
219 | assetWriter?.add(videoWriterInput)
220 |
221 | // Create Audio Reader Output & Writer Input
222 | if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
223 | do {
224 | let aReader = try AVAssetReader(asset: videoAsset)
225 | let aReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
226 | aReader.add(aReaderOutput)
227 | audioReader = aReader
228 | audioReaderOutput = aReaderOutput
229 | let aWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
230 | aWriterInput.expectsMediaDataInRealTime = false
231 | assetWriter?.add(aWriterInput)
232 | audioWriterInput = aWriterInput
233 | } catch {
234 | print(error)
235 | }
236 | }
237 |
238 | // Create necessary indentifier metadata and still image time metadata
239 | let assetIdentifierMetadata = makeMetadata(for: assetIdentifier)
240 | let stillImageTimeMetadataAdapter = makeMetadataAdaptorForStillImageTime()
241 | assetWriter?.metadata = [assetIdentifierMetadata]
242 | assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
243 |
244 | // Start the Asset Writer
245 | assetWriter?.startWriting()
246 | assetWriter?.startSession(atSourceTime: .zero)
247 |
248 | // Add still image metadata
249 | let sIPercent: Float = 0.5
250 | stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [makeMetadataItemForStillImageTime()],
251 | timeRange: videoAsset.makeStillImageTimeRange(percent: sIPercent, in: frameCount)))
252 |
253 | // For end of writing / progress
254 | var writingVideoFinished = false
255 | var writingAudioFinished = false
256 | var currentFrameCount = 0
257 |
258 | // Create onCompletion function
259 | func didCompleteWriting() {
260 | guard writingAudioFinished && writingVideoFinished else {
261 | return
262 | }
263 | assetWriter?.finishWriting { [weak self] in
264 | guard let self = self else {
265 | completion(.failure(FlipBookLivePhotoWriterError.unknownError))
266 | return
267 | }
268 | if self.assetWriter?.status == .completed {
269 | completion(.success(destination))
270 | } else if let error = self.assetWriter?.error {
271 | completion(.failure(error))
272 | } else {
273 | completion(.failure(FlipBookLivePhotoWriterError.unknownError))
274 | }
275 | }
276 | }
277 |
278 | // Start writing video
279 | if videoReader?.startReading() ?? false {
280 | videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
281 | while videoWriterInput.isReadyForMoreMediaData {
282 | if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
283 | currentFrameCount += 1
284 | let percent = CGFloat(currentFrameCount) / CGFloat(frameCount)
285 | DispatchQueue.main.async { progress?(percent) }
286 | if videoWriterInput.append(sampleBuffer) == false {
287 | self.videoReader?.cancelReading()
288 | completion(.failure(self.assetWriter?.error ?? FlipBookLivePhotoWriterError.unknownError))
289 | }
290 | } else {
291 | videoWriterInput.markAsFinished()
292 | writingVideoFinished = true
293 | didCompleteWriting()
294 | }
295 | }
296 | }
297 | } else {
298 | writingVideoFinished = true
299 | didCompleteWriting()
300 | }
301 |
302 | // Start writing audio
303 | if audioReader?.startReading() ?? false {
304 | audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
305 | while audioWriterInput?.isReadyForMoreMediaData ?? false {
306 | guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
307 | audioWriterInput?.markAsFinished()
308 | writingAudioFinished = true
309 | didCompleteWriting()
310 | return
311 | }
312 | audioWriterInput?.append(sampleBuffer)
313 | }
314 | }
315 | } else {
316 | writingAudioFinished = true
317 | didCompleteWriting()
318 | }
319 | } catch {
320 | completion(.failure(error))
321 | }
322 | }
323 |
324 | /// Extracts out the still image and video from a Live Photo
325 | /// - Parameters:
326 | /// - livePhoto: The Live Photo to be decomposed
327 | /// - completion: Closure called with the resources are seporated and saved
328 | internal func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (Result) -> Void) {
329 | guard let url = cacheDirectory else {
330 | DispatchQueue.main.async {
331 | completion(.failure(FlipBookLivePhotoWriterError.couldNotWriteToDirectory))
332 | }
333 | return
334 | }
335 | extractResources(from: livePhoto, to: url, completion: completion)
336 | }
337 |
338 | /// Seporates still image and video from a Live Photo
339 | /// - Parameters:
340 | /// - livePhoto: The Live Photo to be decomposed
341 | /// - directoryURL: The `URL` of the directory to save the seporated resources
342 | /// - completion: Closure called with the resources are seporated and saved
343 | internal func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (Result) -> Void) {
344 | let assetResources = PHAssetResource.assetResources(for: livePhoto)
345 | let group = DispatchGroup()
346 | var keyPhotoURL: URL?
347 | var videoURL: URL?
348 | var result: Result?
349 | for resource in assetResources {
350 | var buffer = Data()
351 | let options = PHAssetResourceRequestOptions()
352 | options.isNetworkAccessAllowed = true
353 | group.enter()
354 | PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { (data) in
355 | buffer.append(data)
356 | }, completionHandler: { err in
357 | if let error = err {
358 | result = .failure(error)
359 | } else if resource.type == .pairedVideo {
360 | do {
361 | videoURL = try self.save(resource, to: directoryURL, resourceData: buffer)
362 | } catch {
363 | result = .failure(error)
364 | }
365 | } else {
366 | do {
367 | keyPhotoURL = try self.save(resource, to: directoryURL, resourceData: buffer)
368 | } catch {
369 | result = .failure(error)
370 | }
371 | }
372 | group.leave()
373 | })
374 | }
375 | group.notify(queue: .main) {
376 | if let result = result {
377 | completion(result)
378 | } else if let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL {
379 | completion(.success(LivePhotoResources(imageURL: pairedPhotoURL, videoURL: pairedVideoURL)))
380 | } else {
381 | completion(.failure(FlipBookLivePhotoWriterError.unknownError))
382 | }
383 | }
384 | }
385 |
386 | /// Saves a resource in a given directory
387 | /// - Parameters:
388 | /// - resource: The resource to be saved
389 | /// - directory: The directory in which the resource should be saved
390 | /// - resourceData: The data that the resource is composed of
391 | internal func save(_ resource: PHAssetResource, to directory: URL, resourceData: Data) throws -> URL? {
392 | let fileExtension = UTTypeCopyPreferredTagWithClass(resource.uniformTypeIdentifier as CFString,
393 | kUTTagClassFilenameExtension)?.takeRetainedValue()
394 |
395 | guard let ext = fileExtension else {
396 | return nil
397 | }
398 |
399 | var fileURL = directory.appendingPathComponent(UUID().uuidString)
400 | fileURL = fileURL.appendingPathExtension(ext as String)
401 |
402 | try resourceData.write(to: fileURL, options: [.atomic])
403 | return fileURL
404 | }
405 |
406 | /// Adds asset identifier to metadata of image
407 | /// - Parameters:
408 | /// - assetIdentifier: The asset identifier to be added
409 | /// - imageURL: The `URL` where the image is currently
410 | /// - saveTo: The `URL` where the image should be written to
411 | internal func add(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
412 | guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, kUTTypeJPEG, 1, nil),
413 | let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
414 | var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable: Any] else {
415 | return nil
416 | }
417 | let assetIdentifierKey = "17"
418 | let assetIdentifierInfo = [assetIdentifierKey: assetIdentifier]
419 | imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
420 | CGImageDestinationAddImageFromSource(imageDestination, imageSource, 0, imageProperties as CFDictionary)
421 | CGImageDestinationFinalize(imageDestination)
422 | return destinationURL
423 | }
424 |
425 | /// Makes an `AVMetadataItem` for a given asset identifier
426 | /// - Parameter assetIdentifier: the asset identifier to be enclosed in the metadata item
427 | internal func makeMetadata(for assetIdentifier: String) -> AVMetadataItem {
428 | let item = AVMutableMetadataItem()
429 | let keyContentIdentifier = "com.apple.quicktime.content.identifier"
430 | let keySpaceQuickTimeMetadata = "mdta"
431 | item.key = keyContentIdentifier as NSString
432 | item.keySpace = AVMetadataKeySpace(keySpaceQuickTimeMetadata)
433 | item.value = assetIdentifier as NSString
434 | item.dataType = "com.apple.metadata.datatype.UTF-8"
435 | return item
436 | }
437 |
438 | /// Makes an `AVAssetWriterInputMetadataAdaptor` for the still image time
439 | internal func makeMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
440 | let keyStillImageTime = "com.apple.quicktime.still-image-time"
441 | let keySpaceQuickTimeMetadata = "mdta"
442 | let spec: NSDictionary = [
443 | kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString: "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
444 | kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString: "com.apple.metadata.datatype.int8"
445 | ]
446 | var desc: CMFormatDescription? = nil
447 | CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault,
448 | metadataType: kCMMetadataFormatType_Boxed,
449 | metadataSpecifications: [spec] as CFArray,
450 | formatDescriptionOut: &desc)
451 | let input = AVAssetWriterInput(mediaType: .metadata,
452 | outputSettings: nil,
453 | sourceFormatHint: desc)
454 | return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
455 | }
456 |
457 | /// Makes an `AVMetadataItem` for a still image time
458 | internal func makeMetadataItemForStillImageTime() -> AVMetadataItem {
459 | let item = AVMutableMetadataItem()
460 | let keyStillImageTime = "com.apple.quicktime.still-image-time"
461 | let keySpaceQuickTimeMetadata = "mdta"
462 | item.key = keyStillImageTime as NSString
463 | item.keySpace = AVMetadataKeySpace(keySpaceQuickTimeMetadata)
464 | item.value = 0 as NSNumber
465 | item.dataType = "com.apple.metadata.datatype.int8"
466 | return item
467 | }
468 |
469 | /// Makes a still image at the % mark of a video
470 | /// - Parameters:
471 | /// - videoURL: The `URL` of the video to make the still image from
472 | /// - percent: How far into the video the key photo should come from **Default** is 50%
473 | internal func makeKeyPhoto(from videoURL: URL, percent: Float = 0.5) throws -> URL? {
474 | var percent: Float = percent
475 | let videoAsset = AVURLAsset(url: videoURL)
476 | if let stillImageTime = videoAsset.getStillImageTime() {
477 | percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
478 | }
479 | guard let imageFrame = videoAsset.makeImageFromFrame(at: percent),
480 | let jpegData = imageFrame.jpegRep,
481 | let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else {
482 | return nil
483 | }
484 | try jpegData.write(to: url)
485 | return url
486 | }
487 |
488 | /// Makes `URL` "FlipBook-LivePhoto" to directory in caches directory
489 | internal func makeCacheDirectoryURL() -> URL? {
490 | do {
491 | let cacheDirectoryURL = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
492 | let fullDirectory = cacheDirectoryURL.appendingPathComponent("FlipBook-LivePhoto", isDirectory: true)
493 | if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
494 | try FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
495 | }
496 | return fullDirectory
497 | } catch {
498 | print(error)
499 | return nil
500 | }
501 | }
502 |
503 | /// Removes "FlipBook-LivePhoto" from caches directory
504 | internal func clearCache() {
505 | guard let url = cacheDirectory else { return }
506 | try? FileManager.default.removeItem(at: url)
507 | }
508 | }
509 |
510 | // MARK: - AVAsset + Live Photo -
511 |
512 | /// Collection of helper functions for getting asset frames and stills
513 | internal extension AVAsset {
514 |
515 | /// Returns the number a frames for the first video track
516 | /// - Parameter exact: if `true` counts every frame. If `false` uses the `nominalFrameRate` of the video track to determine the number of frames
517 | func frameCount(exact: Bool) -> Int {
518 | guard let videoReader = try? AVAssetReader(asset: self),
519 | let videoTrack = tracks(withMediaType: .video).first else {
520 | return 0
521 | }
522 |
523 | var frameCount = Int(CMTimeGetSeconds(duration) * Float64(videoTrack.nominalFrameRate))
524 |
525 | if exact {
526 | frameCount = 0
527 | let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
528 | videoReader.add(videoReaderOutput)
529 | videoReader.startReading()
530 | while videoReaderOutput.copyNextSampleBuffer() != nil {
531 | frameCount += 1
532 | }
533 | videoReader.cancelReading()
534 | }
535 | return frameCount
536 | }
537 |
538 | /// Looks through asset metadata and determines `CMTime` for the still image of a Live Photo
539 | func getStillImageTime() -> CMTime? {
540 | guard let videoReader = try? AVAssetReader(asset: self),
541 | let metadataTrack = tracks(withMediaType: .metadata).first else {
542 | return nil
543 | }
544 | var stillTime: CMTime? = nil
545 |
546 | let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
547 | videoReader.add(videoReaderOutput)
548 | videoReader.startReading()
549 |
550 | let keyStillImageTime = "com.apple.quicktime.still-image-time"
551 | let keySpaceQuickTimeMetadata = "mdta"
552 |
553 | while let sampleBuffer = videoReaderOutput.copyNextSampleBuffer(), stillTime == nil {
554 | if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
555 | let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
556 | for item in group?.items ?? [] {
557 | if item.key as? String == keyStillImageTime && item.keySpace?.rawValue == keySpaceQuickTimeMetadata {
558 | stillTime = group?.timeRange.start
559 | break
560 | }
561 | }
562 | }
563 | }
564 |
565 | videoReader.cancelReading()
566 |
567 | return stillTime
568 | }
569 |
570 | /// Makes a `CMTimeRange` representing the range of the asset after the supplied percent
571 | /// - Parameters:
572 | /// - percent: How much of the beging of the track to be excluded. Values should be in `(0.0 ... 1.0)`
573 | /// - frameCount: The number of frames in the asset. **Default** 0. If `0` is passed in the number of frames will be determined exactly
574 | func makeStillImageTimeRange(percent: Float, in frameCount: Int = 0) -> CMTimeRange {
575 | var time = duration
576 | let frameCount = frameCount == 0 ? self.frameCount(exact: true) : frameCount
577 |
578 | let frameDuration = Int64(Float(time.value) / Float(frameCount))
579 |
580 | time.value = Int64(Float(time.value) * percent)
581 |
582 | return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
583 | }
584 |
585 | /// Makes a still image from the frame of an asset at location determined by its percentage into the asset
586 | /// - Parameter percent: What percent of the way through an asset should the image come from
587 | func makeImageFromFrame(at percent: Float) -> Image? {
588 | let imageGenerator = AVAssetImageGenerator(asset: self)
589 | imageGenerator.appliesPreferredTrackTransform = true
590 |
591 | imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1, timescale: 100)
592 | imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1, timescale: 100)
593 |
594 | var time = duration
595 | time.value = Int64(Float(time.value) * percent)
596 |
597 | do {
598 | var actualTime = CMTime.zero
599 | let cgImage = try imageGenerator.copyCGImage(at: time, actualTime: &actualTime)
600 |
601 | #if os(OSX)
602 | return Image(cgImage: cgImage, size: NSSize(width: cgImage.width, height: cgImage.height))
603 | #else
604 | return Image(cgImage: cgImage)
605 | #endif
606 | } catch {
607 | print(error)
608 | return nil
609 | }
610 | }
611 | }
612 |
--------------------------------------------------------------------------------
/Sources/FlipBook/FlipBookAssetWriter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FlipBookAssetWriter.swift
3 | //
4 | //
5 | // Created by Brad Gayman on 1/24/20.
6 | //
7 |
8 | import AVFoundation
9 | import CoreGraphics
10 | import VideoToolbox
11 | import Photos
12 | import CoreImage
13 | #if os(OSX)
14 | import AppKit
15 | #else
16 | import UIKit
17 | import ReplayKit
18 | #endif
19 |
20 | // MARK: - FlipBookAssetWriter -
21 |
22 | /// Class that converts a collection of images to an asset
23 | public final class FlipBookAssetWriter: NSObject {
24 |
25 | // MARK: - Types -
26 |
27 | /// The assets that can be writen
28 | public enum Asset {
29 |
30 | /// video with its associated `URL`
31 | case video(URL)
32 |
33 | /// Live Photo with its associated `PHLivePhoto`
34 | case livePhoto(PHLivePhoto, LivePhotoResources)
35 |
36 | /// Animated gif with its associated `URL`
37 | case gif(URL)
38 |
39 | /// The url of a video or animated gif. If the asset is a live photo `assetURL` is `nil`
40 | public var assetURL: URL? {
41 | switch self {
42 | case .video(let url): return url
43 | case .livePhoto: return nil
44 | case .gif(let url): return url
45 | }
46 | }
47 |
48 | /// The Live Photo of a live photo asset. If the asset is a gif or video `livePhoto` is `nil`
49 | public var livePhoto: PHLivePhoto? {
50 | switch self {
51 | case .video: return nil
52 | case .livePhoto(let lp, _): return lp
53 | case .gif: return nil
54 | }
55 | }
56 |
57 | /// The live photo resources of a live photo asset. If the asset is a gif or video `livePhotoResources` is `nil`
58 | public var livePhotoResources: LivePhotoResources? {
59 | switch self {
60 | case .video: return nil
61 | case .livePhoto(_, let resources): return resources
62 | case .gif: return nil
63 | }
64 | }
65 | }
66 |
67 | /// Enum that represents the different types of assets that can be created
68 | public enum AssetType: Equatable {
69 |
70 | /// `AssetType` that represents a conversion to an `.mov` video
71 | case video
72 |
73 | /// `AssetType` that represents a conversion to a Live Photo with an optional image that represents the still image of the Live Photo if associated type is `nil` the first frame is used
74 | case livePhoto(Image?)
75 |
76 | /// `AssetType` that represents a conversion to an animated `.gif`
77 | case gif
78 |
79 | public static func == (lhs: AssetType, rhs: AssetType) -> Bool {
80 | switch (lhs, rhs) {
81 | case (.video, .video): return true
82 | case (.gif, .gif): return true
83 | case let (.livePhoto(imgLHS), .livePhoto(imgRHS)): return imgLHS == imgRHS
84 | default: return false
85 | }
86 | }
87 | }
88 |
89 | /// Errors that can `FlipBookAssetWriter` might throw
90 | public enum FlipBookAssetWriterError: Error {
91 |
92 | /// Attempted to create video from 0 images
93 | case noFrames
94 |
95 | /// `FlipBookAssetWriter` was unable to write asset
96 | case couldNotWriteAsset
97 |
98 | /// `FlipBookAssetWriter` failed for an unknown reason
99 | case unknownError
100 | }
101 |
102 | // MARK: - Public Properties -
103 |
104 | /// The size of the recording area.
105 | /// **Default** is the size of the `keyWindow` of the application
106 | public var size: CGSize = {
107 | #if os(OSX)
108 | let size = NSScreen.main?.frame.size ?? CGSize(width: 400.0, height: 300.0)
109 | let scale = NSScreen.main?.backingScaleFactor ?? 1.0
110 | return CGSize(width: size.width * scale, height: size.height * scale)
111 | #else
112 | let size = UIScreen.main.bounds.size
113 | let scale = UIScreen.main.scale
114 | return CGSize(width: size.width * scale, height: size.height * scale)
115 | #endif
116 | }()
117 |
118 | /// The frame rate of a recording without a `startDate` and `endDate`.
119 | /// **Note** this value is ignored if both `startDate` and `endDate` are non-null. Also not, full framerate gifs can be memory intensive.
120 | /// **Default** is 60 frames per second
121 | public var preferredFramesPerSecond: Int = 60
122 |
123 | /// The URL for where the video is written
124 | /// **Default** is `"FlipBook.mov` in caches directory
125 | public lazy var fileOutputURL: URL? = self.makeFileOutputURL()
126 |
127 | /// The `Date` for when the recording started
128 | public var startDate: Date?
129 |
130 | /// The `Date` for when the recording stopped
131 | public var endDate: Date?
132 |
133 | /// The amount images in animated gifs should be scaled by. Fullsize gif images can be memory intensive. **Default** `0.5`
134 | public var gifImageScale: Float = 0.5
135 |
136 | // MARK: - Internal Properties -
137 |
138 | /// The images that compose the frames of the final video
139 | internal var frames = [Image?]()
140 |
141 | /// The queue on which video asset writing is done
142 | internal let queue = DispatchQueue(label: "com.FlipBook.asset.writer.queue")
143 |
144 | /// The video writer input for the asset writer
145 | internal var videoInput: AVAssetWriterInput?
146 |
147 | /// The input pixel buffer adaptor for the asset writer
148 | internal var adapter: AVAssetWriterInputPixelBufferAdaptor?
149 |
150 | /// The writer used for making gifs
151 | internal lazy var gifWriter = FlipBookGIFWriter(fileOutputURL: self.makeFileOutputURL(fileName: "FlipBook.gif"))
152 |
153 | /// The writer used for making Live Photos
154 | internal lazy var livePhotoWriter = FlipBookLivePhotoWriter()
155 |
156 | /// The video editor used for making core animation compositions
157 | internal lazy var coreAnimationVideoEditor = FlipBookCoreAnimationVideoEditor()
158 |
159 | /// The core image context
160 | internal lazy var ciContext = CIContext()
161 |
162 | #if os(iOS)
163 | internal lazy var rpScreenWriter = RPScreenWriter()
164 | #endif
165 |
166 | // MARK: - Public Methods -
167 |
168 | /// Appends image to collection images to be written to video
169 | /// - Parameter image: image to be written to video
170 | public func writeFrame(_ image: Image) {
171 | frames.append(image)
172 | }
173 |
174 | #if os(iOS)
175 | /// Appends a sample buffer to the specified input
176 | /// - Parameters:
177 | /// - sampleBuffer: The sample buffer to be appended
178 | /// - type: The type of the sample buffer to be appended
179 | public func append(_ sampleBuffer: CMSampleBuffer, type: RPSampleBufferType) {
180 | rpScreenWriter.writeBuffer(sampleBuffer, rpSampleType: type)
181 | }
182 |
183 | /// Ends live capture driven by `ReplayKit`
184 | /// - Parameters:
185 | /// - assetType: determines what type of asset is created. **Default** is video.
186 | /// - compositionAnimation: optional closure for adding `AVVideoCompositionCoreAnimationTool` composition animations. Add `CALayer`s as sublayers to the passed in `CALayer`. Then trigger animations with a `beginTime` of `AVCoreAnimationBeginTimeAtZero`. *Reminder that `CALayer` origin for `AVVideoCompositionCoreAnimationTool` is lower left for `UIKit` setting `isGeometryFlipped = true is suggested* **Default is `nil`**
187 | /// - progress: closure that is called with a `CGFloat` representing the progress of video generation. `CGFloat` is in the range `(0.0 ... 1.0)`. `progress` will be called from a background thread
188 | /// - completion: closure that is called when the video has been created with the `URL` for the created video. `completion` will be called from a background thread
189 | public func endLiveCapture(assetType: AssetType = .video,
190 | compositionAnimation: ((CALayer) -> Void)? = nil,
191 | progress: ((CGFloat) -> Void)?,
192 | completion: @escaping (Result) -> Void) {
193 | endLiveCaptureAndWrite { [weak self] (result) in
194 | guard let self = self else {
195 | completion(.failure(FlipBookAssetWriterError.unknownError))
196 | return
197 | }
198 | switch result {
199 | case .success(let url):
200 | if let animation = compositionAnimation {
201 | self.coreAnimationVideoEditor.preferredFramesPerSecond = self.preferredFramesPerSecond
202 | self.coreAnimationVideoEditor.makeVideo(fromVideoAt: url, animation: animation, progress: {prog in progress?(prog * 0.75) }) { [weak self] (result) in
203 | guard let self = self else {
204 | completion(.failure(FlipBookAssetWriterError.unknownError))
205 | return
206 | }
207 | switch result {
208 | case .success(let url):
209 | switch assetType {
210 | case .video:
211 | completion(.success(.video(url)))
212 | case .livePhoto(let img):
213 | // Make image URL for still aspect of Live Photo
214 | let imageURL: URL?
215 | if let image = img, let jpgData = image.jpegRep, let url = self.makeFileOutputURL(fileName: "img.jpg") {
216 | try? jpgData.write(to: url, options: [.atomic])
217 | imageURL = url
218 | } else {
219 | imageURL = try? self.livePhotoWriter.makeKeyPhoto(from: url, percent: 0.0)
220 | }
221 | self.livePhotoWriter.make(from: imageURL, videoURL: url, progress: { prog in progress?(prog) }, completion: { result in
222 | switch result {
223 | case let .success(livePhoto, resources):
224 | completion(.success(.livePhoto(livePhoto, resources)))
225 | case .failure(let error):
226 | completion(.failure(error))
227 | }
228 | })
229 | case .gif:
230 | self.makeFrames(from: url, progress: { (prog) in
231 | progress?(0.75 + prog * 0.125)
232 | }, completion: { [weak self] images in
233 | guard let self = self, let gWriter = self.gifWriter, self.preferredFramesPerSecond > 0 else {
234 | completion(.failure(FlipBookAssetWriterError.unknownError))
235 | return
236 | }
237 | // Make the gif
238 | gWriter.makeGIF(images.map(Image.makeImage),
239 | delay: CGFloat(1.0) / CGFloat(self.preferredFramesPerSecond),
240 | sizeRatio: self.gifImageScale,
241 | progress: { prog in progress?(0.875 + prog * 0.125) },
242 | completion: { result in
243 | switch result {
244 | case .success(let url):
245 | completion(.success(.gif(url)))
246 | case .failure(let error):
247 | completion(.failure(error))
248 | }
249 | })
250 | })
251 | }
252 | case .failure(let error):
253 | completion(.failure(error))
254 | }
255 | }
256 | } else {
257 | switch assetType {
258 | case .video:
259 | completion(.success(.video(url)))
260 | case .livePhoto(let img):
261 | let imageURL: URL?
262 | // Make image URL for still aspect of Live Photo
263 | if let image = img, let jpgData = image.jpegRep, let url = self.makeFileOutputURL(fileName: "img.jpg") {
264 | try? jpgData.write(to: url, options: [.atomic])
265 | imageURL = url
266 | } else {
267 | imageURL = try? self.livePhotoWriter.makeKeyPhoto(from: url, percent: 0.0)
268 | }
269 | self.livePhotoWriter.make(from: imageURL, videoURL: url, progress: { prog in progress?(prog) }, completion: { result in
270 | switch result {
271 | case let .success(livePhoto, resources):
272 | completion(.success(.livePhoto(livePhoto, resources)))
273 | case .failure(let error):
274 | completion(.failure(error))
275 | }
276 | })
277 | case .gif:
278 | self.makeFrames(from: url, progress: { (prog) in
279 | progress?(prog * 0.5)
280 | }, completion: { [weak self] images in
281 | guard let self = self, let gWriter = self.gifWriter, self.preferredFramesPerSecond > 0 else {
282 | completion(.failure(FlipBookAssetWriterError.unknownError))
283 | return
284 | }
285 | // Make the gif
286 | gWriter.makeGIF(images.map(Image.makeImage),
287 | delay: CGFloat(1.0) / CGFloat(self.preferredFramesPerSecond),
288 | sizeRatio: self.gifImageScale,
289 | progress: { prog in progress?(0.5 + prog * 0.5) },
290 | completion: { result in
291 | switch result {
292 | case .success(let url):
293 | completion(.success(.gif(url)))
294 | case .failure(let error):
295 | completion(.failure(error))
296 | }
297 | })
298 | })
299 | }
300 | }
301 | case .failure(let error):
302 | completion(.failure(error))
303 | }
304 |
305 | }
306 | }
307 | #endif
308 |
309 | /// Makes asset from array of `Image`s and writes to disk at `fileOutputURL`
310 | /// - Parameters:
311 | /// - images: images that comprise the video
312 | /// - assetType: determines what type of asset is created. **Default** is video.
313 | /// - compositionAnimation: optional closure for adding `AVVideoCompositionCoreAnimationTool` composition animations. Add `CALayer`s as sublayers to the passed in `CALayer`. Then trigger animations with a `beginTime` of `AVCoreAnimationBeginTimeAtZero`. *Reminder that `CALayer` origin for `AVVideoCompositionCoreAnimationTool` is lower left for `UIKit` setting `isGeometryFlipped = true is suggested* **Default is `nil`**
314 | /// - progress: closure that is called with a `CGFloat` representing the progress of video generation. `CGFloat` is in the range `(0.0 ... 1.0)`. `progress` will be called from a background thread
315 | /// - completion: closure that is called when the video has been created with the `URL` for the created video. `completion` will be called from a background thread
316 | public func createAsset(from images: [Image],
317 | assetType: AssetType = .video,
318 | compositionAnimation: ((CALayer) -> Void)? = nil,
319 | progress: ((CGFloat) -> Void)?,
320 | completion: @escaping (Result) -> Void) {
321 | frames = images
322 | createVideoFromCapturedFrames(assetType: assetType, compositionAnimation: compositionAnimation, progress: progress, completion: completion)
323 | }
324 |
325 | /// Makes asset from the images added using `writeFrame(_ image: Image)`
326 | /// - Parameters:
327 | /// - assetType: determines what type of asset is created. **Default** is video.
328 | /// - compositionAnimation: optional closure for adding `AVVideoCompositionCoreAnimationTool` composition animations. Add `CALayer`s as sublayers to the passed in `CALayer`. Then trigger animations with a `beginTime` of `AVCoreAnimationBeginTimeAtZero`. *Reminder that `CALayer` origin for `AVVideoCompositionCoreAnimationTool` is lower left for `UIKit` setting `isGeometryFlipped = true is suggested* **Default is `nil`**
329 | /// - progress: closure that is called with a `CGFloat` representing the progress of video generation. `CGFloat` is in the range `(0.0 ... 1.0)`. `progress` will be called from a background thread
330 | /// - completion: closure that is called when the video has been created with the `URL` for the created video. `completion` will be called from a background thread
331 | public func createVideoFromCapturedFrames(assetType: AssetType = .video,
332 | compositionAnimation: ((CALayer) -> Void)? = nil,
333 | progress: ((CGFloat) -> Void)?,
334 | completion: @escaping (Result) -> Void) {
335 | guard frames.isEmpty == false else {
336 | completion(.failure(FlipBookAssetWriterError.noFrames))
337 | return
338 | }
339 | switch assetType {
340 |
341 | // Handle Video
342 | case .video:
343 |
344 | // Begin by writing the video
345 | writeVideo(progress: { prog in
346 | let scale: CGFloat = compositionAnimation == nil ? 1.0 : 0.5
347 | progress?(prog * scale)
348 | }, completion: { [weak self] result in
349 | switch result {
350 | case .success(let url):
351 |
352 | // If we have to do a composition do that
353 | if let compositionAnimation = compositionAnimation {
354 | self?.coreAnimationVideoEditor.preferredFramesPerSecond = self?.preferredFramesPerSecond ?? 60
355 | self?.coreAnimationVideoEditor.makeVideo(fromVideoAt: url, animation: compositionAnimation, progress: { (prog) in
356 | progress?(0.5 + prog * 0.5)
357 | }, completion: { result in
358 | // Handle the composition result
359 | switch result {
360 | case .success(let url):
361 | completion(.success(.video(url)))
362 | case .failure(let error):
363 | completion(.failure(error))
364 | }
365 | })
366 | } else {
367 |
368 | // No composition return the video
369 | completion(.success(.video(url)))
370 | }
371 | case .failure(let error):
372 | completion(.failure(error))
373 | }
374 | })
375 |
376 | // Handle Live Photo
377 | case .livePhoto(let img):
378 | let image: Image? = img ?? frames[0]
379 | let imageURL: URL?
380 |
381 | // Make image URL for still aspect of Live Photo
382 | if let jpgData = image?.jpegRep, let url = makeFileOutputURL(fileName: "img.jpg") {
383 | try? jpgData.write(to: url, options: [.atomic])
384 | imageURL = url
385 | } else {
386 | imageURL = nil
387 | }
388 |
389 | // Write the video
390 | writeVideo(progress: { (prog) in
391 | let scale: CGFloat = compositionAnimation == nil ? 0.5 : 0.333333
392 | progress?(prog * scale)
393 | }, completion: { [weak self] result in
394 | guard let self = self else {
395 | completion(.failure(FlipBookAssetWriterError.unknownError))
396 | return
397 | }
398 | switch result {
399 | case .success(let url):
400 |
401 | // If we have a composition make that
402 | if let composition = compositionAnimation {
403 | self.coreAnimationVideoEditor.preferredFramesPerSecond = self.preferredFramesPerSecond
404 | self.coreAnimationVideoEditor.makeVideo(fromVideoAt: url, animation: composition, progress: { (prog) in
405 | progress?(0.333333 + prog * 0.333333)
406 | }, completion: { [weak self] result in
407 | switch result {
408 | case .success(let url):
409 |
410 | // Composition finished make Live Photo from image and video
411 | self?.livePhotoWriter.makeLivePhoto(from: imageURL, videoURL: url, progress: { (prog) in
412 | progress?(0.66666666 + prog * 0.333333)
413 | }, completion: { result in
414 |
415 | // Handle Live Photo result
416 | switch result {
417 | case let .success(livePhoto, resources):
418 | completion(.success(.livePhoto(livePhoto, resources)))
419 | case .failure(let error):
420 | completion(.failure(error))
421 | }
422 | })
423 | case .failure(let error):
424 | completion(.failure(error))
425 | }
426 | })
427 | } else {
428 |
429 | // No composition make Live Photo from video and image
430 | self.livePhotoWriter.makeLivePhoto(from: imageURL, videoURL: url, progress: { (prog) in
431 | progress?(0.5 + prog * 0.5)
432 | }, completion: { result in
433 |
434 | // Handle Live Photo result
435 | switch result {
436 | case let .success(livePhoto, resources):
437 | completion(.success(.livePhoto(livePhoto, resources)))
438 | case .failure(let error):
439 | completion(.failure(error))
440 | }
441 | })
442 | }
443 | case .failure(let error):
444 | completion(.failure(error))
445 | }
446 | })
447 |
448 | // Handle GIF
449 | case .gif:
450 | guard let gWriter = self.gifWriter else {
451 | completion(.failure(FlipBookAssetWriterError.couldNotWriteAsset))
452 | return
453 | }
454 | if let composition = compositionAnimation {
455 | coreAnimationVideoEditor.preferredFramesPerSecond = preferredFramesPerSecond
456 |
457 | // Write video
458 | writeVideo(progress: { (prog) in
459 | progress?(prog * 0.25)
460 | }, completion: { [weak self] result in
461 | guard let self = self else {
462 | completion(.failure(FlipBookAssetWriterError.unknownError))
463 | return
464 | }
465 | switch result {
466 | case .success(let url):
467 |
468 | // Add composition
469 | self.coreAnimationVideoEditor.makeVideo(fromVideoAt: url, animation: composition, progress: { (prog) in
470 | progress?(0.25 + prog * 0.25)
471 | }, completion: { [weak self] result in
472 | guard let self = self else {
473 | completion(.failure(FlipBookAssetWriterError.unknownError))
474 | return
475 | }
476 | switch result {
477 | case .success(let url):
478 |
479 | // Get the frames
480 | DispatchQueue.global().async {
481 | self.makeFrames(from: url, progress: { (prog) in
482 | progress?(0.50 + prog * 0.25)
483 | }, completion: { [weak self] images in
484 | guard images.isEmpty == false,
485 | let self = self,
486 | let gWriter = self.gifWriter,
487 | self.preferredFramesPerSecond > 0 else {
488 | completion(.failure(FlipBookAssetWriterError.unknownError))
489 | return
490 | }
491 |
492 | // Make the gif
493 | gWriter.makeGIF(images.map(Image.makeImage),
494 | delay: CGFloat(1.0) / CGFloat(self.preferredFramesPerSecond),
495 | sizeRatio: self.gifImageScale,
496 | progress: { prog in progress?(0.75 + prog * 0.25) },
497 | completion: { result in
498 | switch result {
499 | case .success(let url):
500 | completion(.success(.gif(url)))
501 | case .failure(let error):
502 | completion(.failure(error))
503 | }
504 | })
505 | })
506 | }
507 | case .failure(let error):
508 | completion(.failure(error))
509 | }
510 | })
511 | case .failure(let error):
512 | completion(.failure(error))
513 | }
514 | })
515 | } else {
516 |
517 | // No composition so make GIF directly
518 |
519 | // Make sure preferredFramesPerSecond is greater than 0
520 | guard preferredFramesPerSecond > 0 else {
521 | completion(.failure(FlipBookAssetWriterError.couldNotWriteAsset))
522 | return
523 | }
524 | gWriter.makeGIF(frames.compactMap { $0 },
525 | delay: CGFloat(1.0) / CGFloat(preferredFramesPerSecond),
526 | sizeRatio: gifImageScale,
527 | progress: progress,
528 | completion: { (result) in
529 | switch result {
530 | case .success(let url):
531 | completion(.success(.gif(url)))
532 | case .failure(let error):
533 | completion(.failure(error))
534 | }
535 | })
536 | frames = []
537 | }
538 |
539 | }
540 | }
541 |
542 | /// Gets frames as `CGImage` from a video asset
543 | /// - Parameters:
544 | /// - videoURL: The `URL` where the video is located
545 | /// - progress: A closure that is called when image generator makes progress. Called from a background thread.
546 | /// - completion: A closure called when image generation is complete. Called from a background thread.
547 | public func makeFrames(from videoURL: URL,
548 | progress: ((CGFloat) -> Void)?,
549 | completion: @escaping ([CGImage]) -> Void) {
550 | let asset = AVURLAsset(url: videoURL)
551 | guard let videoTrack = asset.tracks(withMediaType: .video).first,
552 | let videoReader = try? AVAssetReader(asset: asset) else {
553 | completion([])
554 | return
555 | }
556 |
557 | let videoReaderSettings = [
558 | kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA as NSNumber
559 | ]
560 | let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
561 | videoReader.add(videoReaderOutput)
562 | let duration = videoTrack.timeRange.duration.seconds
563 | let frameCount = Int(duration * Double(videoTrack.nominalFrameRate) + 0.5)
564 | var currentFrameCount = 0
565 | if videoReader.startReading() {
566 | var sampleBuffers = [CMSampleBuffer]()
567 | while let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
568 | currentFrameCount += 1
569 | sampleBuffers.append(sampleBuffer)
570 | progress?(CGFloat(currentFrameCount) / CGFloat(frameCount))
571 | }
572 | let cgImages = sampleBuffers
573 | .compactMap { CMSampleBufferGetImageBuffer($0) }
574 | .map { CIImage(cvImageBuffer: $0) }
575 | .compactMap { ciContext.createCGImage($0, from: $0.extent) }
576 | completion(cgImages)
577 | } else {
578 | completion([])
579 | }
580 | }
581 |
582 | // MARK: - Internal Methods -
583 |
584 | /// Function that returns the default file url for the generated video
585 | internal func makeFileOutputURL(fileName: String = "FlipBook.mov") -> URL? {
586 | do {
587 | var cachesDirectory: URL = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
588 | cachesDirectory.appendPathComponent(fileName)
589 | if FileManager.default.fileExists(atPath: cachesDirectory.path) {
590 | try FileManager.default.removeItem(atPath: cachesDirectory.path)
591 | }
592 | return cachesDirectory
593 | } catch {
594 | print(error)
595 | return nil
596 | }
597 | }
598 |
599 | /// Function that returns a configured `AVAssetWriter`
600 | internal func makeWriter() throws -> AVAssetWriter {
601 | guard let fileURL = self.fileOutputURL else {
602 | throw FlipBookAssetWriterError.couldNotWriteAsset
603 | }
604 | let writer = try AVAssetWriter(url: fileURL, fileType: .mov)
605 | #if !os(macOS)
606 | let settings: [String : Any] = [
607 | AVVideoCodecKey: AVVideoCodecH264,
608 | AVVideoWidthKey: size.width,
609 | AVVideoHeightKey: size.height
610 | ]
611 | #else
612 | let settings: [String : Any] = [
613 | AVVideoCodecKey: AVVideoCodecType.h264,
614 | AVVideoWidthKey: size.width,
615 | AVVideoHeightKey: size.height
616 | ]
617 | #endif
618 |
619 |
620 | videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: settings)
621 |
622 | let attributes: [String: Any] = [
623 | kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB,
624 | kCVPixelBufferWidthKey as String: size.width,
625 | kCVPixelBufferHeightKey as String: size.height
626 | ]
627 |
628 | if let inp = self.videoInput {
629 | adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: inp, sourcePixelBufferAttributes: attributes)
630 | writer.add(inp)
631 | }
632 | videoInput?.expectsMediaDataInRealTime = true
633 |
634 | return writer
635 | }
636 |
637 | /// Writes `frames` to video
638 | /// - Parameters:
639 | /// - progress: Closure called when progress is made writing video. Called from background thread.
640 | /// - completion: Closure called when video is done writing. Called from background thread.
641 | internal func writeVideo(progress: ((CGFloat) -> Void)?, completion: @escaping (Result) -> Void) {
642 | guard let fileURL = self.fileOutputURL else {
643 | completion(.failure(FlipBookAssetWriterError.couldNotWriteAsset))
644 | return
645 | }
646 | do {
647 | if FileManager.default.fileExists(atPath: fileURL.path) {
648 | try FileManager.default.removeItem(atPath: fileURL.path)
649 | }
650 | let writer = try makeWriter()
651 | guard writer.startWriting() else {
652 | completion(.failure(writer.error ?? FlipBookAssetWriterError.couldNotWriteAsset))
653 | return
654 | }
655 | writer.startSession(atSourceTime: .zero)
656 | let frameRate = makeFrameRate()
657 |
658 | queue.async {
659 | var i = 0
660 | for index in self.frames.indices {
661 | autoreleasepool {
662 | while self.videoInput?.isReadyForMoreMediaData == false {
663 | }
664 | let time = CMTime(value: CMTimeValue(i), timescale: CMTimeScale(frameRate))
665 | if let buffer = self.frames[index]?.cgI?.makePixelBuffer() {
666 | guard self.adapter?.append(buffer, withPresentationTime: time) == true else {
667 | let error = writer.error ?? FlipBookAssetWriterError.couldNotWriteAsset
668 | completion(.failure(error))
669 | return
670 | }
671 | }
672 | self.frames[index] = nil
673 | progress?(CGFloat(index + 1) / CGFloat(self.frames.count))
674 | i += 1
675 | }
676 | }
677 |
678 | self.videoInput?.markAsFinished()
679 | writer.finishWriting {
680 | self.frames = []
681 | completion(.success(fileURL))
682 | }
683 | }
684 | } catch {
685 | completion(.failure(error))
686 | }
687 | }
688 |
689 | /// Ends the realtime writing of sample buffers and writes to `fileOutputPath`
690 | /// - Parameter completion: Closure called when writing is finished
691 | internal func endLiveCaptureAndWrite(completion: @escaping (Result) -> Void) {
692 | rpScreenWriter.finishWriting { (url, error) in
693 | if let url = url {
694 | completion(.success(url))
695 | } else if let error = error{
696 | completion(.failure(error))
697 | } else {
698 | completion(.failure(FlipBookAssetWriterError.unknownError))
699 | }
700 | }
701 | }
702 |
703 | /// Helper function that calculates the frame rate if `startDate` and `endDate` are set. Otherwise it returns `preferredFramesPerSecond`
704 | internal func makeFrameRate() -> Int {
705 | let startTimeDiff = startDate?.timeIntervalSinceNow ?? 0
706 | let endTimeDiff = endDate?.timeIntervalSinceNow ?? 0
707 | let diff = endTimeDiff - startTimeDiff
708 | let frameRate: Int
709 | if diff != 0 {
710 | frameRate = Int(Double(frames.count) / diff + 0.5)
711 | } else {
712 | frameRate = preferredFramesPerSecond
713 | }
714 | return frameRate
715 | }
716 | }
717 |
718 | // MARK: - CGImage + CVPixelBuffer -
719 |
720 | /// Adds helper functions for converting from `CGImage` to `CVPixelBuffer`
721 | internal extension CGImage {
722 |
723 | /// Creates and returns a pixel buffer for the image
724 | func makePixelBuffer() -> CVPixelBuffer? {
725 | return pixelBuffer(width: self.width,
726 | height: self.height,
727 | pixelFormatType: kCVPixelFormatType_32ARGB,
728 | colorSpace: CGColorSpaceCreateDeviceRGB(),
729 | alphaInfo: .noneSkipFirst)
730 | }
731 |
732 |
733 | /// Creates and returns a pixel buffer for the image
734 | /// - Parameters:
735 | /// - width: The desired width of the image represented by the image buffer
736 | /// - height: The desired height of the image represented by the image buffer
737 | /// - pixelFormatType: The desired pixel format type used by the image buffer
738 | /// - colorSpace: The desired color space used by the image buffer
739 | /// - alphaInfo: The desired alpha info used by the image buffer
740 | func pixelBuffer(width: Int, height: Int,
741 | pixelFormatType: OSType,
742 | colorSpace: CGColorSpace,
743 | alphaInfo: CGImageAlphaInfo) -> CVPixelBuffer? {
744 | var maybePixelBuffer: CVPixelBuffer?
745 | let attrs = [
746 | kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue,
747 | kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue
748 | ]
749 | let status = CVPixelBufferCreate(kCFAllocatorDefault,
750 | width,
751 | height,
752 | pixelFormatType,
753 | attrs as CFDictionary,
754 | &maybePixelBuffer)
755 |
756 | guard status == kCVReturnSuccess, let pixelBuffer = maybePixelBuffer else {
757 | return nil
758 | }
759 |
760 | let flags = CVPixelBufferLockFlags(rawValue: 0)
761 | guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(pixelBuffer, flags) else {
762 | return nil
763 | }
764 |
765 | defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, flags) }
766 |
767 | guard let context = CGContext(data: CVPixelBufferGetBaseAddress(pixelBuffer),
768 | width: width,
769 | height: height,
770 | bitsPerComponent: 8,
771 | bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
772 | space: colorSpace,
773 | bitmapInfo: alphaInfo.rawValue)
774 | else {
775 | return nil
776 | }
777 |
778 | context.draw(self, in: CGRect(x: 0, y: 0, width: width, height: height))
779 | return pixelBuffer
780 | }
781 |
782 | func makeCMSampleBuffer(_ frameIdx: Int) -> CMSampleBuffer? {
783 | guard let pixelBuffer = makePixelBuffer() else { return nil }
784 | var newSampleBuffer: CMSampleBuffer? = nil
785 | let time = CMTime(value: CMTimeValue(frameIdx), timescale: 100)
786 | var timimgInfo: CMSampleTimingInfo = CMSampleTimingInfo(duration: time,
787 | presentationTimeStamp: time,
788 | decodeTimeStamp: time)
789 | var videoInfo: CMVideoFormatDescription? = nil
790 | CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil,
791 | imageBuffer: pixelBuffer,
792 | formatDescriptionOut: &videoInfo)
793 | guard let videoI = videoInfo else { return nil }
794 | CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
795 | imageBuffer: pixelBuffer,
796 | dataReady: true,
797 | makeDataReadyCallback: nil,
798 | refcon: nil,
799 | formatDescription: videoI,
800 | sampleTiming: &timimgInfo,
801 | sampleBufferOut: &newSampleBuffer)
802 | return newSampleBuffer
803 | }
804 | }
805 |
--------------------------------------------------------------------------------