├── .gitignore
├── AVMultiCamPiP.xcodeproj
├── .xcodesamplecode.plist
├── project.pbxproj
└── project.xcworkspace
│ └── xcshareddata
│ └── WorkspaceSettings.xcsettings
├── AVMultiCamPiP
├── AppDelegate.swift
├── Assets.xcassets
│ ├── AppIcon.appiconset
│ │ └── Contents.json
│ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── CameraViewController.swift
├── Info.plist
├── MovieRecorder.swift
├── PiPMixer.metal
├── PiPVideoMixer.swift
├── PreviewView.swift
├── UIView+Extension.swift
└── Utilities.swift
├── Configuration
└── SampleCode.xcconfig
├── LICENSE
└── LICENSE.txt
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | # See LICENSE folder for this sample’s licensing information.
2 | #
3 | # Apple sample code gitignore configuration.
4 |
5 | # Finder
6 | .DS_Store
7 |
8 | # Xcode - User files
9 | xcuserdata/
10 |
11 | **/*.xcodeproj/project.xcworkspace/*
12 | !**/*.xcodeproj/project.xcworkspace/xcshareddata
13 |
14 | **/*.xcodeproj/project.xcworkspace/xcshareddata/*
15 | !**/*.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
16 |
17 | **/*.playground/playground.xcworkspace/*
18 | !**/*.playground/playground.xcworkspace/xcshareddata
19 |
20 | **/*.playground/playground.xcworkspace/xcshareddata/*
21 | !**/*.playground/playground.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
22 |
--------------------------------------------------------------------------------
/AVMultiCamPiP.xcodeproj/.xcodesamplecode.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/AVMultiCamPiP.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 50;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 26584C1D2282323E00ADA373 /* MovieRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 26584C1C2282323E00ADA373 /* MovieRecorder.swift */; };
11 | 2673E417227D09AB008DB6F7 /* PiPVideoMixer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2673E415227D09AA008DB6F7 /* PiPVideoMixer.swift */; };
12 | 2673E41A227D0AB7008DB6F7 /* Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2673E419227D0AB7008DB6F7 /* Utilities.swift */; };
13 | 2673E41C227D0F13008DB6F7 /* PiPMixer.metal in Sources */ = {isa = PBXBuildFile; fileRef = 2673E41B227D0F13008DB6F7 /* PiPMixer.metal */; };
14 | 42E159E2227A481B00F81016 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 42E159E1227A481B00F81016 /* AppDelegate.swift */; };
15 | 42E159E4227A481B00F81016 /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 42E159E3227A481B00F81016 /* CameraViewController.swift */; };
16 | 42E159E7227A481B00F81016 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 42E159E5227A481B00F81016 /* Main.storyboard */; };
17 | 42E159E9227A481C00F81016 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 42E159E8227A481C00F81016 /* Assets.xcassets */; };
18 | 42E159EC227A481C00F81016 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 42E159EA227A481C00F81016 /* LaunchScreen.storyboard */; };
19 | 42E15A12227A5CB800F81016 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 42E15A11227A5CB800F81016 /* PreviewView.swift */; };
20 | 42E15A51227B8B3900F81016 /* README.md in Resources */ = {isa = PBXBuildFile; fileRef = 42E15A50227B8B3900F81016 /* README.md */; };
21 | D41A14E222AAD7DE00867F3F /* UIView+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = D41A14E122AAD7DE00867F3F /* UIView+Extension.swift */; };
22 | /* End PBXBuildFile section */
23 |
24 | /* Begin PBXFileReference section */
25 | 26584C1C2282323E00ADA373 /* MovieRecorder.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MovieRecorder.swift; sourceTree = ""; };
26 | 2673E415227D09AA008DB6F7 /* PiPVideoMixer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PiPVideoMixer.swift; sourceTree = ""; };
27 | 2673E419227D0AB7008DB6F7 /* Utilities.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Utilities.swift; sourceTree = ""; };
28 | 2673E41B227D0F13008DB6F7 /* PiPMixer.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; path = PiPMixer.metal; sourceTree = ""; };
29 | 42E159DE227A481B00F81016 /* AVMultiCamPiP.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AVMultiCamPiP.app; sourceTree = BUILT_PRODUCTS_DIR; };
30 | 42E159E1227A481B00F81016 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
31 | 42E159E3227A481B00F81016 /* CameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = ""; };
32 | 42E159E6227A481B00F81016 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
33 | 42E159E8227A481C00F81016 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
34 | 42E159EB227A481C00F81016 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
35 | 42E159ED227A481C00F81016 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
36 | 42E15A11227A5CB800F81016 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = ""; };
37 | 42E15A50227B8B3900F81016 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = ""; };
38 | 714C7BB0714C78D000000001 /* LICENSE.txt */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; path = LICENSE.txt; sourceTree = ""; };
39 | 7173EA707173ECA000000001 /* SampleCode.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = SampleCode.xcconfig; path = Configuration/SampleCode.xcconfig; sourceTree = ""; };
40 | D41A14E122AAD7DE00867F3F /* UIView+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIView+Extension.swift"; sourceTree = ""; };
41 | /* End PBXFileReference section */
42 |
43 | /* Begin PBXFrameworksBuildPhase section */
44 | 42E159DB227A481B00F81016 /* Frameworks */ = {
45 | isa = PBXFrameworksBuildPhase;
46 | buildActionMask = 2147483647;
47 | files = (
48 | );
49 | runOnlyForDeploymentPostprocessing = 0;
50 | };
51 | /* End PBXFrameworksBuildPhase section */
52 |
53 | /* Begin PBXGroup section */
54 | 42E159D5227A481B00F81016 = {
55 | isa = PBXGroup;
56 | children = (
57 | 42E15A50227B8B3900F81016 /* README.md */,
58 | 42E159E0227A481B00F81016 /* AVMultiCamPiP */,
59 | 42E159DF227A481B00F81016 /* Products */,
60 | 717428C07174296000000001 /* Configuration */,
61 | 714C7240714C74A000000001 /* LICENSE */,
62 | );
63 | sourceTree = "";
64 | };
65 | 42E159DF227A481B00F81016 /* Products */ = {
66 | isa = PBXGroup;
67 | children = (
68 | 42E159DE227A481B00F81016 /* AVMultiCamPiP.app */,
69 | );
70 | name = Products;
71 | sourceTree = "";
72 | };
73 | 42E159E0227A481B00F81016 /* AVMultiCamPiP */ = {
74 | isa = PBXGroup;
75 | children = (
76 | 42E159E1227A481B00F81016 /* AppDelegate.swift */,
77 | 42E159E3227A481B00F81016 /* CameraViewController.swift */,
78 | D41A14E122AAD7DE00867F3F /* UIView+Extension.swift */,
79 | 42E15A11227A5CB800F81016 /* PreviewView.swift */,
80 | 26584C1C2282323E00ADA373 /* MovieRecorder.swift */,
81 | 2673E419227D0AB7008DB6F7 /* Utilities.swift */,
82 | 2673E415227D09AA008DB6F7 /* PiPVideoMixer.swift */,
83 | 2673E41B227D0F13008DB6F7 /* PiPMixer.metal */,
84 | 42E159E5227A481B00F81016 /* Main.storyboard */,
85 | 42E159E8227A481C00F81016 /* Assets.xcassets */,
86 | 42E159EA227A481C00F81016 /* LaunchScreen.storyboard */,
87 | 42E159ED227A481C00F81016 /* Info.plist */,
88 | );
89 | path = AVMultiCamPiP;
90 | sourceTree = "";
91 | };
92 | 714C7240714C74A000000001 /* LICENSE */ = {
93 | isa = PBXGroup;
94 | children = (
95 | 714C7BB0714C78D000000001 /* LICENSE.txt */,
96 | );
97 | path = LICENSE;
98 | sourceTree = "";
99 | };
100 | 717428C07174296000000001 /* Configuration */ = {
101 | isa = PBXGroup;
102 | children = (
103 | 7173EA707173ECA000000001 /* SampleCode.xcconfig */,
104 | );
105 | name = Configuration;
106 | sourceTree = "";
107 | };
108 | /* End PBXGroup section */
109 |
110 | /* Begin PBXNativeTarget section */
111 | 42E159DD227A481B00F81016 /* AVMultiCamPiP */ = {
112 | isa = PBXNativeTarget;
113 | buildConfigurationList = 42E159F0227A481C00F81016 /* Build configuration list for PBXNativeTarget "AVMultiCamPiP" */;
114 | buildPhases = (
115 | 42E159DA227A481B00F81016 /* Sources */,
116 | 42E159DB227A481B00F81016 /* Frameworks */,
117 | 42E159DC227A481B00F81016 /* Resources */,
118 | );
119 | buildRules = (
120 | );
121 | dependencies = (
122 | );
123 | name = AVMultiCamPiP;
124 | productName = AVMultiCamPiP;
125 | productReference = 42E159DE227A481B00F81016 /* AVMultiCamPiP.app */;
126 | productType = "com.apple.product-type.application";
127 | };
128 | /* End PBXNativeTarget section */
129 |
130 | /* Begin PBXProject section */
131 | 42E159D6227A481B00F81016 /* Project object */ = {
132 | isa = PBXProject;
133 | attributes = {
134 | LastSwiftUpdateCheck = 1100;
135 | LastUpgradeCheck = 1100;
136 | ORGANIZATIONNAME = Apple;
137 | TargetAttributes = {
138 | 42E159DD227A481B00F81016 = {
139 | CreatedOnToolsVersion = 11.0;
140 | };
141 | };
142 | };
143 | buildConfigurationList = 42E159D9227A481B00F81016 /* Build configuration list for PBXProject "AVMultiCamPiP" */;
144 | compatibilityVersion = "Xcode 9.3";
145 | developmentRegion = en;
146 | hasScannedForEncodings = 0;
147 | knownRegions = (
148 | en,
149 | Base,
150 | );
151 | mainGroup = 42E159D5227A481B00F81016;
152 | productRefGroup = 42E159DF227A481B00F81016 /* Products */;
153 | projectDirPath = "";
154 | projectRoot = "";
155 | targets = (
156 | 42E159DD227A481B00F81016 /* AVMultiCamPiP */,
157 | );
158 | };
159 | /* End PBXProject section */
160 |
161 | /* Begin PBXResourcesBuildPhase section */
162 | 42E159DC227A481B00F81016 /* Resources */ = {
163 | isa = PBXResourcesBuildPhase;
164 | buildActionMask = 2147483647;
165 | files = (
166 | 42E159EC227A481C00F81016 /* LaunchScreen.storyboard in Resources */,
167 | 42E159E9227A481C00F81016 /* Assets.xcassets in Resources */,
168 | 42E15A51227B8B3900F81016 /* README.md in Resources */,
169 | 42E159E7227A481B00F81016 /* Main.storyboard in Resources */,
170 | );
171 | runOnlyForDeploymentPostprocessing = 0;
172 | };
173 | /* End PBXResourcesBuildPhase section */
174 |
175 | /* Begin PBXSourcesBuildPhase section */
176 | 42E159DA227A481B00F81016 /* Sources */ = {
177 | isa = PBXSourcesBuildPhase;
178 | buildActionMask = 2147483647;
179 | files = (
180 | 2673E41C227D0F13008DB6F7 /* PiPMixer.metal in Sources */,
181 | 2673E417227D09AB008DB6F7 /* PiPVideoMixer.swift in Sources */,
182 | D41A14E222AAD7DE00867F3F /* UIView+Extension.swift in Sources */,
183 | 42E159E4227A481B00F81016 /* CameraViewController.swift in Sources */,
184 | 42E159E2227A481B00F81016 /* AppDelegate.swift in Sources */,
185 | 26584C1D2282323E00ADA373 /* MovieRecorder.swift in Sources */,
186 | 2673E41A227D0AB7008DB6F7 /* Utilities.swift in Sources */,
187 | 42E15A12227A5CB800F81016 /* PreviewView.swift in Sources */,
188 | );
189 | runOnlyForDeploymentPostprocessing = 0;
190 | };
191 | /* End PBXSourcesBuildPhase section */
192 |
193 | /* Begin PBXVariantGroup section */
194 | 42E159E5227A481B00F81016 /* Main.storyboard */ = {
195 | isa = PBXVariantGroup;
196 | children = (
197 | 42E159E6227A481B00F81016 /* Base */,
198 | );
199 | name = Main.storyboard;
200 | sourceTree = "";
201 | };
202 | 42E159EA227A481C00F81016 /* LaunchScreen.storyboard */ = {
203 | isa = PBXVariantGroup;
204 | children = (
205 | 42E159EB227A481C00F81016 /* Base */,
206 | );
207 | name = LaunchScreen.storyboard;
208 | sourceTree = "";
209 | };
210 | /* End PBXVariantGroup section */
211 |
212 | /* Begin XCBuildConfiguration section */
213 | 42E159EE227A481C00F81016 /* Debug */ = {
214 | isa = XCBuildConfiguration;
215 | baseConfigurationReference = 7173EA707173ECA000000001 /* SampleCode.xcconfig */;
216 | buildSettings = {
217 | ALWAYS_SEARCH_USER_PATHS = NO;
218 | CLANG_ANALYZER_NONNULL = YES;
219 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
220 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
221 | CLANG_CXX_LIBRARY = "libc++";
222 | CLANG_ENABLE_MODULES = YES;
223 | CLANG_ENABLE_OBJC_ARC = YES;
224 | CLANG_ENABLE_OBJC_WEAK = YES;
225 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
226 | CLANG_WARN_BOOL_CONVERSION = YES;
227 | CLANG_WARN_COMMA = YES;
228 | CLANG_WARN_CONSTANT_CONVERSION = YES;
229 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
230 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
231 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
232 | CLANG_WARN_EMPTY_BODY = YES;
233 | CLANG_WARN_ENUM_CONVERSION = YES;
234 | CLANG_WARN_INFINITE_RECURSION = YES;
235 | CLANG_WARN_INT_CONVERSION = YES;
236 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
237 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
238 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
239 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
240 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
241 | CLANG_WARN_STRICT_PROTOTYPES = YES;
242 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
243 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
244 | CLANG_WARN_UNREACHABLE_CODE = YES;
245 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
246 | CODE_SIGN_IDENTITY = "iPhone Developer";
247 | COPY_PHASE_STRIP = NO;
248 | DEBUG_INFORMATION_FORMAT = dwarf;
249 | ENABLE_STRICT_OBJC_MSGSEND = YES;
250 | ENABLE_TESTABILITY = YES;
251 | GCC_C_LANGUAGE_STANDARD = gnu11;
252 | GCC_DYNAMIC_NO_PIC = NO;
253 | GCC_NO_COMMON_BLOCKS = YES;
254 | GCC_OPTIMIZATION_LEVEL = 0;
255 | GCC_PREPROCESSOR_DEFINITIONS = (
256 | "DEBUG=1",
257 | "$(inherited)",
258 | );
259 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
260 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
261 | GCC_WARN_UNDECLARED_SELECTOR = YES;
262 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
263 | GCC_WARN_UNUSED_FUNCTION = YES;
264 | GCC_WARN_UNUSED_VARIABLE = YES;
265 | IPHONEOS_DEPLOYMENT_TARGET = 13.0;
266 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
267 | MTL_FAST_MATH = YES;
268 | ONLY_ACTIVE_ARCH = YES;
269 | SDKROOT = iphoneos;
270 | SUPPORTED_PLATFORMS = iphoneos;
271 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
272 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
273 | };
274 | name = Debug;
275 | };
276 | 42E159EF227A481C00F81016 /* Release */ = {
277 | isa = XCBuildConfiguration;
278 | baseConfigurationReference = 7173EA707173ECA000000001 /* SampleCode.xcconfig */;
279 | buildSettings = {
280 | ALWAYS_SEARCH_USER_PATHS = NO;
281 | CLANG_ANALYZER_NONNULL = YES;
282 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
283 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
284 | CLANG_CXX_LIBRARY = "libc++";
285 | CLANG_ENABLE_MODULES = YES;
286 | CLANG_ENABLE_OBJC_ARC = YES;
287 | CLANG_ENABLE_OBJC_WEAK = YES;
288 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
289 | CLANG_WARN_BOOL_CONVERSION = YES;
290 | CLANG_WARN_COMMA = YES;
291 | CLANG_WARN_CONSTANT_CONVERSION = YES;
292 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
293 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
294 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
295 | CLANG_WARN_EMPTY_BODY = YES;
296 | CLANG_WARN_ENUM_CONVERSION = YES;
297 | CLANG_WARN_INFINITE_RECURSION = YES;
298 | CLANG_WARN_INT_CONVERSION = YES;
299 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
300 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
301 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
302 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
303 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
304 | CLANG_WARN_STRICT_PROTOTYPES = YES;
305 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
306 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
307 | CLANG_WARN_UNREACHABLE_CODE = YES;
308 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
309 | CODE_SIGN_IDENTITY = "iPhone Developer";
310 | COPY_PHASE_STRIP = NO;
311 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
312 | ENABLE_NS_ASSERTIONS = NO;
313 | ENABLE_STRICT_OBJC_MSGSEND = YES;
314 | GCC_C_LANGUAGE_STANDARD = gnu11;
315 | GCC_NO_COMMON_BLOCKS = YES;
316 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
317 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
318 | GCC_WARN_UNDECLARED_SELECTOR = YES;
319 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
320 | GCC_WARN_UNUSED_FUNCTION = YES;
321 | GCC_WARN_UNUSED_VARIABLE = YES;
322 | IPHONEOS_DEPLOYMENT_TARGET = 13.0;
323 | MTL_ENABLE_DEBUG_INFO = NO;
324 | MTL_FAST_MATH = YES;
325 | SDKROOT = iphoneos;
326 | SUPPORTED_PLATFORMS = iphoneos;
327 | SWIFT_COMPILATION_MODE = wholemodule;
328 | SWIFT_OPTIMIZATION_LEVEL = "-O";
329 | VALIDATE_PRODUCT = YES;
330 | };
331 | name = Release;
332 | };
333 | 42E159F1227A481C00F81016 /* Debug */ = {
334 | isa = XCBuildConfiguration;
335 | baseConfigurationReference = 7173EA707173ECA000000001 /* SampleCode.xcconfig */;
336 | buildSettings = {
337 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
338 | CODE_SIGN_IDENTITY = "iPhone Developer";
339 | CODE_SIGN_STYLE = Automatic;
340 | DEVELOPMENT_TEAM = 5S9BE29758;
341 | INFOPLIST_FILE = AVMultiCamPiP/Info.plist;
342 | LD_RUNPATH_SEARCH_PATHS = (
343 | "$(inherited)",
344 | "@executable_path/Frameworks",
345 | );
346 | PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.AVMultiCamPiP${SAMPLE_CODE_DISAMBIGUATOR}";
347 | PRODUCT_NAME = "$(TARGET_NAME)";
348 | PROVISIONING_PROFILE_SPECIFIER = "";
349 | SWIFT_VERSION = 5.0;
350 | TARGETED_DEVICE_FAMILY = "1,2";
351 | };
352 | name = Debug;
353 | };
354 | 42E159F2227A481C00F81016 /* Release */ = {
355 | isa = XCBuildConfiguration;
356 | baseConfigurationReference = 7173EA707173ECA000000001 /* SampleCode.xcconfig */;
357 | buildSettings = {
358 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
359 | CODE_SIGN_IDENTITY = "iPhone Developer";
360 | CODE_SIGN_STYLE = Automatic;
361 | DEVELOPMENT_TEAM = 5S9BE29758;
362 | INFOPLIST_FILE = AVMultiCamPiP/Info.plist;
363 | LD_RUNPATH_SEARCH_PATHS = (
364 | "$(inherited)",
365 | "@executable_path/Frameworks",
366 | );
367 | PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.AVMultiCamPiP${SAMPLE_CODE_DISAMBIGUATOR}";
368 | PRODUCT_NAME = "$(TARGET_NAME)";
369 | PROVISIONING_PROFILE_SPECIFIER = "";
370 | SWIFT_VERSION = 5.0;
371 | TARGETED_DEVICE_FAMILY = "1,2";
372 | };
373 | name = Release;
374 | };
375 | /* End XCBuildConfiguration section */
376 |
377 | /* Begin XCConfigurationList section */
378 | 42E159D9227A481B00F81016 /* Build configuration list for PBXProject "AVMultiCamPiP" */ = {
379 | isa = XCConfigurationList;
380 | buildConfigurations = (
381 | 42E159EE227A481C00F81016 /* Debug */,
382 | 42E159EF227A481C00F81016 /* Release */,
383 | );
384 | defaultConfigurationIsVisible = 0;
385 | defaultConfigurationName = Release;
386 | };
387 | 42E159F0227A481C00F81016 /* Build configuration list for PBXNativeTarget "AVMultiCamPiP" */ = {
388 | isa = XCConfigurationList;
389 | buildConfigurations = (
390 | 42E159F1227A481C00F81016 /* Debug */,
391 | 42E159F2227A481C00F81016 /* Release */,
392 | );
393 | defaultConfigurationIsVisible = 0;
394 | defaultConfigurationName = Release;
395 | };
396 | /* End XCConfigurationList section */
397 | };
398 | rootObject = 42E159D6227A481B00F81016 /* Project object */;
399 | }
400 |
--------------------------------------------------------------------------------
/AVMultiCamPiP.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | BuildSystemType
6 | Latest
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See LICENSE folder for this sample’s licensing information.
3 |
4 | Abstract:
5 | Implements the application delegate.
6 | */
7 |
8 | import UIKit
9 |
10 | @UIApplicationMain
11 | class AppDelegate: UIResponder, UIApplicationDelegate {
12 | var window: UIWindow?
13 | }
14 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/AVMultiCamPiP/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/AVMultiCamPiP/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
55 |
62 |
79 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/CameraViewController.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See LICENSE folder for this sample’s licensing information.
3 |
4 | Abstract:
5 | Implements the view controller for the camera interface.
6 | */
7 |
8 | import UIKit
9 | import AVFoundation
10 | import Photos
11 |
12 | func -(_ left:CGPoint, _ right:CGPoint)->CGPoint{
13 | return CGPoint(x:left.x - right.x, y:left.y - right.y)
14 | }
15 | // ユーティリティメソッド CGPoint同士の引き算を-で書けるようにする
16 | func +(_ left:CGPoint, _ right:CGPoint)->CGPoint{
17 | return CGPoint(x:left.x + right.x, y:left.y + right.y)
18 | }
19 |
20 | class ViewController: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {
21 |
22 | // MARK: View Controller Life Cycle
23 |
24 | override func viewDidLoad() {
25 | super.viewDidLoad()
26 |
27 | // Allow users to double tap to switch between the front and back cameras being in a PiP
28 | let togglePiPDoubleTapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(togglePiP))
29 | togglePiPDoubleTapGestureRecognizer.numberOfTapsRequired = 2
30 | view.addGestureRecognizer(togglePiPDoubleTapGestureRecognizer)
31 |
32 | // Disable UI. Enable the UI later, if and only if the session starts running.
33 | recordButton.isEnabled = false
34 |
35 | // Set up the back and front video preview views.
36 | backCameraVideoPreviewView.videoPreviewLayer.setSessionWithNoConnection(session)
37 | frontCameraVideoPreviewView.videoPreviewLayer.setSessionWithNoConnection(session)
38 |
39 | // Store the back and front video preview layers so we can connect them to their inputs
40 | backCameraVideoPreviewLayer = backCameraVideoPreviewView.videoPreviewLayer
41 | frontCameraVideoPreviewLayer = frontCameraVideoPreviewView.videoPreviewLayer
42 |
43 | // Store the location of the pip's frame in relation to the full screen video preview
44 | updateNormalizedPiPFrame()
45 |
46 | UIDevice.current.beginGeneratingDeviceOrientationNotifications()
47 |
48 | /*
49 | Configure the capture session.
50 | In general it is not safe to mutate an AVCaptureSession or any of its
51 | inputs, outputs, or connections from multiple threads at the same time.
52 |
53 | Don't do this on the main queue, because AVCaptureMultiCamSession.startRunning()
54 | is a blocking call, which can take a long time. Dispatch session setup
55 | to the sessionQueue so as not to block the main queue, which keeps the UI responsive.
56 | */
57 | sessionQueue.async {
58 | self.configureSession()
59 | }
60 |
61 | // Keep the screen awake
62 | UIApplication.shared.isIdleTimerDisabled = true
63 | }
64 |
65 | override func viewWillAppear(_ animated: Bool) {
66 | super.viewWillAppear(animated)
67 |
68 | sessionQueue.async {
69 | switch self.setupResult {
70 | case .success:
71 | // Only setup observers and start the session running if setup succeeded.
72 | self.addObservers()
73 | self.session.startRunning()
74 | self.isSessionRunning = self.session.isRunning
75 |
76 | case .notAuthorized:
77 | DispatchQueue.main.async {
78 | let changePrivacySetting = "\(Bundle.main.applicationName) doesn't have permission to use the camera, please change privacy settings"
79 | let message = NSLocalizedString(changePrivacySetting, comment: "Alert message when the user has denied access to the camera")
80 | let alertController = UIAlertController(title: Bundle.main.applicationName, message: message, preferredStyle: .alert)
81 |
82 | alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
83 | style: .cancel,
84 | handler: nil))
85 |
86 | alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"),
87 | style: .`default`,
88 | handler: { _ in
89 | if let settingsURL = URL(string: UIApplication.openSettingsURLString) {
90 | UIApplication.shared.open(settingsURL,
91 | options: [:],
92 | completionHandler: nil)
93 | }
94 | }))
95 |
96 | self.present(alertController, animated: true, completion: nil)
97 | }
98 |
99 | case .configurationFailed:
100 | DispatchQueue.main.async {
101 | let alertMsg = "Alert message when something goes wrong during capture session configuration"
102 | let message = NSLocalizedString("Unable to capture media", comment: alertMsg)
103 | let alertController = UIAlertController(title: Bundle.main.applicationName, message: message, preferredStyle: .alert)
104 |
105 | alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
106 | style: .cancel,
107 | handler: nil))
108 |
109 | self.present(alertController, animated: true, completion: nil)
110 | }
111 |
112 | case .multiCamNotSupported:
113 | DispatchQueue.main.async {
114 | let alertMessage = "Alert message when multi cam is not supported"
115 | let message = NSLocalizedString("Multi Cam Not Supported", comment: alertMessage)
116 | let alertController = UIAlertController(title: Bundle.main.applicationName, message: message, preferredStyle: .alert)
117 |
118 | self.present(alertController, animated: true, completion: nil)
119 | }
120 | }
121 | }
122 | }
123 |
124 | override func viewWillDisappear(_ animated: Bool) {
125 | sessionQueue.async {
126 | if self.setupResult == .success {
127 | self.session.stopRunning()
128 | self.isSessionRunning = self.session.isRunning
129 | self.removeObservers()
130 | }
131 | }
132 |
133 | super.viewWillDisappear(animated)
134 | }
135 |
136 | @objc // Expose to Objective-C for use with #selector()
137 | private func didEnterBackground(notification: NSNotification) {
138 | // Free up resources.
139 | dataOutputQueue.async {
140 | self.renderingEnabled = false
141 | self.videoMixer.reset()
142 | self.currentPiPSampleBuffer = nil
143 | }
144 | }
145 |
146 | @objc // Expose to Objective-C for use with #selector()
147 | func willEnterForground(notification: NSNotification) {
148 | dataOutputQueue.async {
149 | self.renderingEnabled = true
150 | }
151 | }
152 |
153 | // MARK: KVO and Notifications
154 |
155 | private var sessionRunningContext = 0
156 |
157 | private var keyValueObservations = [NSKeyValueObservation]()
158 |
159 | private func addObservers() {
160 | let keyValueObservation = session.observe(\.isRunning, options: .new) { _, change in
161 | guard let isSessionRunning = change.newValue else { return }
162 |
163 | DispatchQueue.main.async {
164 | self.recordButton.isEnabled = isSessionRunning
165 | }
166 | }
167 | keyValueObservations.append(keyValueObservation)
168 |
169 | let systemPressureStateObservation = observe(\.self.backCameraDeviceInput?.device.systemPressureState, options: .new) { _, change in
170 | guard let systemPressureState = change.newValue as? AVCaptureDevice.SystemPressureState else { return }
171 | self.setRecommendedFrameRateRangeForPressureState(systemPressureState)
172 | }
173 | keyValueObservations.append(systemPressureStateObservation)
174 |
175 | NotificationCenter.default.addObserver(self,
176 | selector: #selector(didEnterBackground),
177 | name: UIApplication.didEnterBackgroundNotification,
178 | object: nil)
179 |
180 | NotificationCenter.default.addObserver(self,
181 | selector: #selector(willEnterForground),
182 | name: UIApplication.willEnterForegroundNotification,
183 | object: nil)
184 |
185 | NotificationCenter.default.addObserver(self,
186 | selector: #selector(sessionRuntimeError),
187 | name: .AVCaptureSessionRuntimeError,
188 | object: session)
189 |
190 | // A session can run only when the app is full screen. It will be interrupted in a multi-app layout.
191 | // Add observers to handle these session interruptions and inform the user.
192 | // See AVCaptureSessionWasInterruptedNotification for other interruption reasons.
193 |
194 | NotificationCenter.default.addObserver(self,
195 | selector: #selector(sessionWasInterrupted),
196 | name: .AVCaptureSessionWasInterrupted,
197 | object: session)
198 |
199 | NotificationCenter.default.addObserver(self,
200 | selector: #selector(sessionInterruptionEnded),
201 | name: .AVCaptureSessionInterruptionEnded,
202 | object: session)
203 | }
204 |
205 | private func removeObservers() {
206 | for keyValueObservation in keyValueObservations {
207 | keyValueObservation.invalidate()
208 | }
209 |
210 | keyValueObservations.removeAll()
211 | }
212 |
213 | // MARK: Video Preview PiP Management
214 |
215 | private var pipDevicePosition: AVCaptureDevice.Position = .front
216 |
217 | private var normalizedPipFrame = CGRect.zero
218 |
219 | @IBOutlet private var frontCameraPiPConstraints: [NSLayoutConstraint]!
220 |
221 | @IBOutlet private var backCameraPiPConstraints: [NSLayoutConstraint]!
222 |
223 | @objc // Expose to Objective-C for use with #selector()
224 | private func togglePiP() {
225 | // Disable animations so the views move immediately
226 | CATransaction.begin()
227 | UIView.setAnimationsEnabled(false)
228 | CATransaction.setDisableActions(true)
229 |
230 | if pipDevicePosition == .front {
231 | NSLayoutConstraint.deactivate(frontCameraPiPConstraints)
232 | NSLayoutConstraint.activate(backCameraPiPConstraints)
233 | view.sendSubviewToBack(frontCameraVideoPreviewView)
234 | pipDevicePosition = .back
235 | } else {
236 | NSLayoutConstraint.deactivate(backCameraPiPConstraints)
237 | NSLayoutConstraint.activate(frontCameraPiPConstraints)
238 | view.sendSubviewToBack(backCameraVideoPreviewView)
239 | pipDevicePosition = .front
240 | }
241 |
242 | CATransaction.commit()
243 | }
244 |
245 | var fullScreenVideoPreviewView: PreviewView = PreviewView()
246 | var pipVideoPreviewView: PreviewView = PreviewView()
247 |
248 |
249 | @IBOutlet var gesturedayo: UIPinchGestureRecognizer!
250 | // TODO
251 |
252 | var orgOrigin: CGPoint!
253 | // タッチ開始時の親ビュー上のタッチ位置
254 | var orgParentPoint : CGPoint!
255 |
256 | // ユーティリティメソッド CGPoint同士の足し算を+で書けるようにする
257 |
258 |
259 | @IBAction func panGesture(_ sender: Any) {
260 | let sender = sender as! UIPanGestureRecognizer
261 |
262 | switch sender.state {
263 | case UIGestureRecognizerState.began:
264 | // タッチ開始:タッチされたビューのoriginと親ビュー上のタッチ位置を記録しておく
265 | orgOrigin = sender.view?.frame.origin
266 | orgParentPoint = sender.translation(in: self.view)
267 | // TODO
268 | break
269 | case UIGestureRecognizerState.changed:
270 | // 現在の親ビュー上でのタッチ位置を求める
271 | let newParentPoint = sender.translation(in: self.view)
272 | // パンジャスチャの継続:タッチ開始時のビューのoriginにタッチ開始からの移動量を加算する
273 | sender.view?.frame.origin = orgOrigin + newParentPoint - orgParentPoint
274 |
275 | break
276 | default:
277 | break
278 | }
279 |
280 | }
281 |
282 | @IBAction func pinchGesture(_ sender: Any) {
283 | let sender = sender as! UIPinchGestureRecognizer
284 | pipVideoPreviewView.transform = CGAffineTransform(scaleX: sender.scale, y: sender.scale)
285 |
286 | // pipVideoPreviewView.frame.size.width = 300.0
287 |
288 | }
289 |
290 |
291 | @IBAction func draguGesture(_ sender: UIPanGestureRecognizer) {
292 |
293 | pipVideoPreviewView.frame.size.width = 300.0
294 | }
295 |
296 |
297 |
298 | private func updateNormalizedPiPFrame() {
299 | // let fullScreenVideoPreviewView: PreviewView
300 | // let pipVideoPreviewView: PreviewView
301 | // //panジェスチャーのインスタンスを作成する
302 | // let gesture1 = UIPanGestureRecognizer(target: self, action: #selector(panGesture1(_:)))
303 | // let gesture2 = UIPanGestureRecognizer(target: self, action: #selector(panGesture2(_:)))
304 |
305 | if pipDevicePosition == .back {
306 | fullScreenVideoPreviewView = frontCameraVideoPreviewView
307 | pipVideoPreviewView = backCameraVideoPreviewView
308 |
309 | } else if pipDevicePosition == .front {
310 | fullScreenVideoPreviewView = backCameraVideoPreviewView
311 | pipVideoPreviewView = frontCameraVideoPreviewView
312 | // pipVideoPreviewView.addGestureRecognizer(gesture1)
313 | // pipVideoPreviewView.addGestureRecognizer(gesture2)
314 | } else {
315 | fatalError("Unexpected pip device position: \(pipDevicePosition)")
316 | }
317 |
318 | let pipFrameInFullScreenVideoPreview = pipVideoPreviewView.convert(pipVideoPreviewView.bounds, to: fullScreenVideoPreviewView)
319 | let normalizedTransform = CGAffineTransform(scaleX: 1.0 / fullScreenVideoPreviewView.frame.width, y: 1.0 / fullScreenVideoPreviewView.frame.height)
320 |
321 | normalizedPipFrame = pipFrameInFullScreenVideoPreview.applying(normalizedTransform)
322 | }
323 |
324 | // MARK: Capture Session Management
325 |
326 | @IBOutlet private var resumeButton: UIButton!
327 |
328 | @IBOutlet private var cameraUnavailableLabel: UILabel!
329 |
330 | private enum SessionSetupResult {
331 | case success
332 | case notAuthorized
333 | case configurationFailed
334 | case multiCamNotSupported
335 | }
336 |
337 | private let session = AVCaptureMultiCamSession()
338 |
339 | private var isSessionRunning = false
340 |
341 | private let sessionQueue = DispatchQueue(label: "session queue") // Communicate with the session and other session objects on this queue.
342 |
343 | private let dataOutputQueue = DispatchQueue(label: "data output queue")
344 |
345 | private var setupResult: SessionSetupResult = .success
346 |
347 | @objc dynamic private(set) var backCameraDeviceInput: AVCaptureDeviceInput?
348 | // TODO
349 | private let backCameraVideoDataOutput = AVCaptureVideoDataOutput()
350 |
351 | @IBOutlet private var backCameraVideoPreviewView: PreviewView!
352 |
353 | private weak var backCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
354 |
355 | private var frontCameraDeviceInput: AVCaptureDeviceInput?
356 |
357 | private let frontCameraVideoDataOutput = AVCaptureVideoDataOutput()
358 |
359 | @IBOutlet private var frontCameraVideoPreviewView: PreviewView!
360 |
361 | private weak var frontCameraVideoPreviewLayer: AVCaptureVideoPreviewLayer?
362 |
363 | private var microphoneDeviceInput: AVCaptureDeviceInput?
364 |
365 | private let backMicrophoneAudioDataOutput = AVCaptureAudioDataOutput()
366 |
367 | private let frontMicrophoneAudioDataOutput = AVCaptureAudioDataOutput()
368 |
369 | // Must be called on the session queue
370 | private func configureSession() {
371 | guard setupResult == .success else { return }
372 |
373 | guard AVCaptureMultiCamSession.isMultiCamSupported else {
374 | print("MultiCam not supported on this device")
375 | setupResult = .multiCamNotSupported
376 | return
377 | }
378 |
379 | // When using AVCaptureMultiCamSession, it is best to manually add connections from AVCaptureInputs to AVCaptureOutputs
380 | session.beginConfiguration()
381 | defer {
382 | session.commitConfiguration()
383 | if setupResult == .success {
384 | checkSystemCost()
385 | }
386 | }
387 |
388 | guard configureBackCamera() else {
389 | setupResult = .configurationFailed
390 | return
391 | }
392 |
393 | guard configureFrontCamera() else {
394 | setupResult = .configurationFailed
395 | return
396 | }
397 |
398 | guard configureMicrophone() else {
399 | setupResult = .configurationFailed
400 | return
401 | }
402 | }
403 |
404 | private func configureBackCamera() -> Bool {
405 | session.beginConfiguration()
406 | defer {
407 | session.commitConfiguration()
408 | }
409 |
410 | // Find the back camera
411 | guard let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {
412 | print("Could not find the back camera")
413 | return false
414 | }
415 |
416 | // Add the back camera input to the session
417 | do {
418 | backCameraDeviceInput = try AVCaptureDeviceInput(device: backCamera)
419 |
420 | guard let backCameraDeviceInput = backCameraDeviceInput,
421 | session.canAddInput(backCameraDeviceInput) else {
422 | print("Could not add back camera device input")
423 | return false
424 | }
425 | session.addInputWithNoConnections(backCameraDeviceInput)
426 | } catch {
427 | print("Could not create back camera device input: \(error)")
428 | return false
429 | }
430 |
431 | // Find the back camera device input's video port
432 | guard let backCameraDeviceInput = backCameraDeviceInput,
433 | let backCameraVideoPort = backCameraDeviceInput.ports(for: .video,
434 | sourceDeviceType: backCamera.deviceType,
435 | sourceDevicePosition: backCamera.position).first else {
436 | print("Could not find the back camera device input's video port")
437 | return false
438 | }
439 |
440 | // Add the back camera video data output
441 | guard session.canAddOutput(backCameraVideoDataOutput) else {
442 | print("Could not add the back camera video data output")
443 | return false
444 | }
445 | session.addOutputWithNoConnections(backCameraVideoDataOutput)
446 | backCameraVideoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
447 | backCameraVideoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
448 |
449 | // Connect the back camera device input to the back camera video data output
450 | let backCameraVideoDataOutputConnection = AVCaptureConnection(inputPorts: [backCameraVideoPort], output: backCameraVideoDataOutput)
451 | guard session.canAddConnection(backCameraVideoDataOutputConnection) else {
452 | print("Could not add a connection to the back camera video data output")
453 | return false
454 | }
455 | session.addConnection(backCameraVideoDataOutputConnection)
456 | backCameraVideoDataOutputConnection.videoOrientation = .portrait
457 |
458 | // Connect the back camera device input to the back camera video preview layer
459 | guard let backCameraVideoPreviewLayer = backCameraVideoPreviewLayer else {
460 | return false
461 | }
462 | let backCameraVideoPreviewLayerConnection = AVCaptureConnection(inputPort: backCameraVideoPort, videoPreviewLayer: backCameraVideoPreviewLayer)
463 | guard session.canAddConnection(backCameraVideoPreviewLayerConnection) else {
464 | print("Could not add a connection to the back camera video preview layer")
465 | return false
466 | }
467 | session.addConnection(backCameraVideoPreviewLayerConnection)
468 |
469 | return true
470 | }
471 |
472 | private func configureFrontCamera() -> Bool {
473 | session.beginConfiguration()
474 | defer {
475 | session.commitConfiguration()
476 | }
477 |
478 | // Find the front camera
479 | guard let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
480 | print("Could not find the front camera")
481 | return false
482 | }
483 |
484 | // Add the front camera input to the session
485 | do {
486 | frontCameraDeviceInput = try AVCaptureDeviceInput(device: frontCamera)
487 |
488 | guard let frontCameraDeviceInput = frontCameraDeviceInput,
489 | session.canAddInput(frontCameraDeviceInput) else {
490 | print("Could not add front camera device input")
491 | return false
492 | }
493 | session.addInputWithNoConnections(frontCameraDeviceInput)
494 | } catch {
495 | print("Could not create front camera device input: \(error)")
496 | return false
497 | }
498 |
499 | // Find the front camera device input's video port
500 | guard let frontCameraDeviceInput = frontCameraDeviceInput,
501 | let frontCameraVideoPort = frontCameraDeviceInput.ports(for: .video,
502 | sourceDeviceType: frontCamera.deviceType,
503 | sourceDevicePosition: frontCamera.position).first else {
504 | print("Could not find the front camera device input's video port")
505 | return false
506 | }
507 |
508 | // Add the front camera video data output
509 | guard session.canAddOutput(frontCameraVideoDataOutput) else {
510 | print("Could not add the front camera video data output")
511 | return false
512 | }
513 | session.addOutputWithNoConnections(frontCameraVideoDataOutput)
514 | frontCameraVideoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
515 | frontCameraVideoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
516 |
517 | // Connect the front camera device input to the front camera video data output
518 | let frontCameraVideoDataOutputConnection = AVCaptureConnection(inputPorts: [frontCameraVideoPort], output: frontCameraVideoDataOutput)
519 | guard session.canAddConnection(frontCameraVideoDataOutputConnection) else {
520 | print("Could not add a connection to the front camera video data output")
521 | return false
522 | }
523 | session.addConnection(frontCameraVideoDataOutputConnection)
524 | frontCameraVideoDataOutputConnection.videoOrientation = .portrait
525 | frontCameraVideoDataOutputConnection.automaticallyAdjustsVideoMirroring = false
526 | frontCameraVideoDataOutputConnection.isVideoMirrored = true
527 |
528 | // Connect the front camera device input to the front camera video preview layer
529 | guard let frontCameraVideoPreviewLayer = frontCameraVideoPreviewLayer else {
530 | return false
531 | }
532 | let frontCameraVideoPreviewLayerConnection = AVCaptureConnection(inputPort: frontCameraVideoPort, videoPreviewLayer: frontCameraVideoPreviewLayer)
533 | guard session.canAddConnection(frontCameraVideoPreviewLayerConnection) else {
534 | print("Could not add a connection to the front camera video preview layer")
535 | return false
536 | }
537 | session.addConnection(frontCameraVideoPreviewLayerConnection)
538 | frontCameraVideoPreviewLayerConnection.automaticallyAdjustsVideoMirroring = false
539 | frontCameraVideoPreviewLayerConnection.isVideoMirrored = true
540 |
541 | return true
542 | }
543 |
544 | private func configureMicrophone() -> Bool {
545 | session.beginConfiguration()
546 | defer {
547 | session.commitConfiguration()
548 | }
549 |
550 | // Find the microphone
551 | guard let microphone = AVCaptureDevice.default(for: .audio) else {
552 | print("Could not find the microphone")
553 | return false
554 | }
555 |
556 | // Add the microphone input to the session
557 | do {
558 | microphoneDeviceInput = try AVCaptureDeviceInput(device: microphone)
559 |
560 | guard let microphoneDeviceInput = microphoneDeviceInput,
561 | session.canAddInput(microphoneDeviceInput) else {
562 | print("Could not add microphone device input")
563 | return false
564 | }
565 | session.addInputWithNoConnections(microphoneDeviceInput)
566 | } catch {
567 | print("Could not create microphone input: \(error)")
568 | return false
569 | }
570 |
571 | // Find the audio device input's back audio port
572 | guard let microphoneDeviceInput = microphoneDeviceInput,
573 | let backMicrophonePort = microphoneDeviceInput.ports(for: .audio,
574 | sourceDeviceType: microphone.deviceType,
575 | sourceDevicePosition: .back).first else {
576 | print("Could not find the back camera device input's audio port")
577 | return false
578 | }
579 |
580 | // Find the audio device input's front audio port
581 | guard let frontMicrophonePort = microphoneDeviceInput.ports(for: .audio,
582 | sourceDeviceType: microphone.deviceType,
583 | sourceDevicePosition: .front).first else {
584 | print("Could not find the front camera device input's audio port")
585 | return false
586 | }
587 |
588 | // Add the back microphone audio data output
589 | guard session.canAddOutput(backMicrophoneAudioDataOutput) else {
590 | print("Could not add the back microphone audio data output")
591 | return false
592 | }
593 | session.addOutputWithNoConnections(backMicrophoneAudioDataOutput)
594 | backMicrophoneAudioDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
595 |
596 | // Add the front microphone audio data output
597 | guard session.canAddOutput(frontMicrophoneAudioDataOutput) else {
598 | print("Could not add the front microphone audio data output")
599 | return false
600 | }
601 | session.addOutputWithNoConnections(frontMicrophoneAudioDataOutput)
602 | frontMicrophoneAudioDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
603 |
604 | // Connect the back microphone to the back audio data output
605 | let backMicrophoneAudioDataOutputConnection = AVCaptureConnection(inputPorts: [backMicrophonePort], output: backMicrophoneAudioDataOutput)
606 | guard session.canAddConnection(backMicrophoneAudioDataOutputConnection) else {
607 | print("Could not add a connection to the back microphone audio data output")
608 | return false
609 | }
610 | session.addConnection(backMicrophoneAudioDataOutputConnection)
611 |
612 | // Connect the front microphone to the back audio data output
613 | let frontMicrophoneAudioDataOutputConnection = AVCaptureConnection(inputPorts: [frontMicrophonePort], output: frontMicrophoneAudioDataOutput)
614 | guard session.canAddConnection(frontMicrophoneAudioDataOutputConnection) else {
615 | print("Could not add a connection to the front microphone audio data output")
616 | return false
617 | }
618 | session.addConnection(frontMicrophoneAudioDataOutputConnection)
619 |
620 | return true
621 | }
622 |
623 | @objc // Expose to Objective-C for use with #selector()
624 | private func sessionWasInterrupted(notification: NSNotification) {
625 | // In iOS 9 and later, the userInfo dictionary contains information on why the session was interrupted.
626 | if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?,
627 | let reasonIntegerValue = userInfoValue.integerValue,
628 | let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
629 | print("Capture session was interrupted (\(reason))")
630 |
631 | if reason == .videoDeviceInUseByAnotherClient {
632 | // Simply fade-in a button to enable the user to try to resume the session running.
633 | resumeButton.isHidden = false
634 | resumeButton.alpha = 0.0
635 | UIView.animate(withDuration: 0.25) {
636 | self.resumeButton.alpha = 1.0
637 | }
638 | } else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps {
639 | // Simply fade-in a label to inform the user that the camera is unavailable.
640 | cameraUnavailableLabel.isHidden = false
641 | cameraUnavailableLabel.alpha = 0.0
642 | UIView.animate(withDuration: 0.25) {
643 | self.cameraUnavailableLabel.alpha = 1.0
644 | }
645 | }
646 | }
647 | }
648 |
649 | @objc // Expose to Objective-C for use with #selector()
650 | private func sessionInterruptionEnded(notification: NSNotification) {
651 | if !resumeButton.isHidden {
652 | UIView.animate(withDuration: 0.25,
653 | animations: {
654 | self.resumeButton.alpha = 0
655 | }, completion: { _ in
656 | self.resumeButton.isHidden = true
657 | })
658 | }
659 | if !cameraUnavailableLabel.isHidden {
660 | UIView.animate(withDuration: 0.25,
661 | animations: {
662 | self.cameraUnavailableLabel.alpha = 0
663 | }, completion: { _ in
664 | self.cameraUnavailableLabel.isHidden = true
665 | })
666 | }
667 | }
668 |
669 | @objc // Expose to Objective-C for use with #selector()
670 | private func sessionRuntimeError(notification: NSNotification) {
671 | guard let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else {
672 | return
673 | }
674 |
675 | let error = AVError(_nsError: errorValue)
676 | print("Capture session runtime error: \(error)")
677 |
678 | /*
679 | Automatically try to restart the session running if media services were
680 | reset and the last start running succeeded. Otherwise, enable the user
681 | to try to resume the session running.
682 | */
683 | if error.code == .mediaServicesWereReset {
684 | sessionQueue.async {
685 | if self.isSessionRunning {
686 | self.session.startRunning()
687 | self.isSessionRunning = self.session.isRunning
688 | } else {
689 | DispatchQueue.main.async {
690 | self.resumeButton.isHidden = false
691 | }
692 | }
693 | }
694 | } else {
695 | resumeButton.isHidden = false
696 | }
697 | }
698 |
699 | // TODO
700 | func saveToCamera(sender: UITapGestureRecognizer) {
701 | // if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo) {
702 | // stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection) {
703 | // (imageDataSampleBuffer, error) -> Void in
704 | // let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
705 | // UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData), nil, nil, nil)
706 | // }
707 | // }
708 | }
709 |
710 | @IBAction func shotButtonAction(_ sender: Any) {
711 | }
712 |
713 |
714 | @IBAction private func resumeInterruptedSession(_ sender: UIButton) {
715 | sessionQueue.async {
716 | /*
717 | The session might fail to start running. A failure to start the session running will be communicated via
718 | a session runtime error notification. To avoid repeatedly failing to start the session
719 | running, we only try to restart the session running in the session runtime error handler
720 | if we aren't trying to resume the session running.
721 | */
722 | self.session.startRunning()
723 | self.isSessionRunning = self.session.isRunning
724 | if !self.session.isRunning {
725 | DispatchQueue.main.async {
726 | let message = NSLocalizedString("Unable to resume", comment: "Alert message when unable to resume the session running")
727 | let actions = [
728 | UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
729 | style: .cancel,
730 | handler: nil)]
731 | self.alert(title: Bundle.main.applicationName, message: message, actions: actions)
732 | }
733 | } else {
734 | DispatchQueue.main.async {
735 | self.resumeButton.isHidden = true
736 | }
737 | }
738 | }
739 | }
740 |
741 | func alert(title: String, message: String, actions: [UIAlertAction]) {
742 | let alertController = UIAlertController(title: title,
743 | message: message,
744 | preferredStyle: .alert)
745 |
746 | actions.forEach {
747 | alertController.addAction($0)
748 | }
749 |
750 | self.present(alertController, animated: true, completion: nil)
751 | }
752 |
753 | // MARK: Recording Movies
754 |
755 | private var movieRecorder: MovieRecorder?
756 |
757 | private var currentPiPSampleBuffer: CMSampleBuffer?
758 |
759 | private var backgroundRecordingID: UIBackgroundTaskIdentifier?
760 |
761 | @IBOutlet private var recordButton: UIButton!
762 |
763 | private var renderingEnabled = true
764 |
765 | private var videoMixer = PiPVideoMixer()
766 |
767 | private var videoTrackSourceFormatDescription: CMFormatDescription?
768 |
769 | private func updateRecordButtonWithRecordingState(_ isRecording: Bool) {
770 | let color = isRecording ? UIColor.red : UIColor.yellow
771 | let title = isRecording ? "Stop" : "Record"
772 |
773 | recordButton.tintColor = color
774 | recordButton.setTitleColor(color, for: .normal)
775 | recordButton.setTitle(title, for: .normal)
776 | }
777 |
778 | @IBAction private func toggleMovieRecording(_ recordButton: UIButton) {
779 | recordButton.isEnabled = false
780 |
781 | dataOutputQueue.async {
782 | defer {
783 | DispatchQueue.main.async {
784 | recordButton.isEnabled = true
785 |
786 | if let recorder = self.movieRecorder {
787 | self.updateRecordButtonWithRecordingState(recorder.isRecording)
788 | }
789 | }
790 | }
791 |
792 | let isRecording = self.movieRecorder?.isRecording ?? false
793 | if !isRecording {
794 | if UIDevice.current.isMultitaskingSupported {
795 | self.backgroundRecordingID = UIApplication.shared.beginBackgroundTask(expirationHandler: nil)
796 | }
797 |
798 | guard let audioSettings = self.createAudioSettings() else {
799 | print("Could not create audio settings")
800 | return
801 | }
802 |
803 | guard let videoSettings = self.createVideoSettings() else {
804 | print("Could not create video settings")
805 | return
806 | }
807 |
808 | guard let videoTransform = self.createVideoTransform() else {
809 | print("Could not create video transform")
810 | return
811 | }
812 |
813 | self.movieRecorder = MovieRecorder(audioSettings: audioSettings,
814 | videoSettings: videoSettings,
815 | videoTransform: videoTransform)
816 |
817 | self.movieRecorder?.startRecording()
818 | } else {
819 | self.movieRecorder?.stopRecording { movieURL in
820 | self.saveMovieToPhotoLibrary(movieURL)
821 | }
822 | }
823 | }
824 | }
825 |
826 | private func createAudioSettings() -> [String: NSObject]? {
827 | guard let backMicrophoneAudioSettings = backMicrophoneAudioDataOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mov) as? [String: NSObject] else {
828 | print("Could not get back microphone audio settings")
829 | return nil
830 | }
831 | guard let frontMicrophoneAudioSettings = frontMicrophoneAudioDataOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mov) as? [String: NSObject] else {
832 | print("Could not get front microphone audio settings")
833 | return nil
834 | }
835 |
836 | if backMicrophoneAudioSettings == frontMicrophoneAudioSettings {
837 | // The front and back microphone audio settings are equal, so return either one
838 | return backMicrophoneAudioSettings
839 | } else {
840 | print("Front and back microphone audio settings are not equal. Check your AVCaptureAudioDataOutput configuration.")
841 | return nil
842 | }
843 | }
844 |
845 | private func createVideoSettings() -> [String: NSObject]? {
846 | guard let backCameraVideoSettings = backCameraVideoDataOutput.recommendedVideoSettingsForAssetWriter(writingTo: .mov) as? [String: NSObject] else {
847 | print("Could not get back camera video settings")
848 | return nil
849 | }
850 | guard let frontCameraVideoSettings = frontCameraVideoDataOutput.recommendedVideoSettingsForAssetWriter(writingTo: .mov) as? [String: NSObject] else {
851 | print("Could not get front camera video settings")
852 | return nil
853 | }
854 |
855 | if backCameraVideoSettings == frontCameraVideoSettings {
856 | // The front and back camera video settings are equal, so return either one
857 | return backCameraVideoSettings
858 | } else {
859 | print("Front and back camera video settings are not equal. Check your AVCaptureVideoDataOutput configuration.")
860 | return nil
861 | }
862 | }
863 |
864 | private func createVideoTransform() -> CGAffineTransform? {
865 | guard let backCameraVideoConnection = backCameraVideoDataOutput.connection(with: .video) else {
866 | print("Could not find the back and front camera video connections")
867 | return nil
868 | }
869 |
870 | let deviceOrientation = UIDevice.current.orientation
871 | let videoOrientation = AVCaptureVideoOrientation(deviceOrientation: deviceOrientation) ?? .portrait
872 |
873 | // Compute transforms from the back camera's video orientation to the device's orientation
874 | let backCameraTransform = backCameraVideoConnection.videoOrientationTransform(relativeTo: videoOrientation)
875 |
876 | return backCameraTransform
877 |
878 | }
879 |
880 | // 動画の保存
881 | private func saveMovieToPhotoLibrary(_ movieURL: URL) {
882 | PHPhotoLibrary.requestAuthorization { status in
883 | if status == .authorized {
884 | // Save the movie file to the photo library and clean up.
885 | PHPhotoLibrary.shared().performChanges({
886 | let options = PHAssetResourceCreationOptions()
887 | options.shouldMoveFile = true
888 | let creationRequest = PHAssetCreationRequest.forAsset()
889 | creationRequest.addResource(with: .video, fileURL: movieURL, options: options)
890 | }, completionHandler: { success, error in
891 | if !success {
892 | print("\(Bundle.main.applicationName) couldn't save the movie to your photo library: \(String(describing: error))")
893 | } else {
894 | // Clean up
895 | if FileManager.default.fileExists(atPath: movieURL.path) {
896 | do {
897 | try FileManager.default.removeItem(atPath: movieURL.path)
898 | } catch {
899 | print("Could not remove file at url: \(movieURL)")
900 | }
901 | }
902 |
903 | if let currentBackgroundRecordingID = self.backgroundRecordingID {
904 | self.backgroundRecordingID = UIBackgroundTaskIdentifier.invalid
905 |
906 | if currentBackgroundRecordingID != UIBackgroundTaskIdentifier.invalid {
907 | UIApplication.shared.endBackgroundTask(currentBackgroundRecordingID)
908 | }
909 | }
910 | }
911 | })
912 | } else {
913 | DispatchQueue.main.async {
914 | let alertMessage = "Alert message when the user has not authorized photo library access"
915 | let message = NSLocalizedString("\(Bundle.main.applicationName) does not have permission to access the photo library", comment: alertMessage)
916 | let alertController = UIAlertController(title: Bundle.main.applicationName, message: message, preferredStyle: .alert)
917 | self.present(alertController, animated: true, completion: nil)
918 | }
919 | }
920 | }
921 | }
922 |
923 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
924 | if let videoDataOutput = output as? AVCaptureVideoDataOutput {
925 | processVideoSampleBuffer(sampleBuffer, fromOutput: videoDataOutput)
926 | } else if let audioDataOutput = output as? AVCaptureAudioDataOutput {
927 | processsAudioSampleBuffer(sampleBuffer, fromOutput: audioDataOutput)
928 | }
929 | }
930 |
931 | private func processVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, fromOutput videoDataOutput: AVCaptureVideoDataOutput) {
932 | if videoTrackSourceFormatDescription == nil {
933 | videoTrackSourceFormatDescription = CMSampleBufferGetFormatDescription( sampleBuffer )
934 | }
935 |
936 | // Determine:
937 | // - which camera the sample buffer came from
938 | // - if the sample buffer is for the PiP
939 | var fullScreenSampleBuffer: CMSampleBuffer?
940 | var pipSampleBuffer: CMSampleBuffer?
941 |
942 | if pipDevicePosition == .back && videoDataOutput == backCameraVideoDataOutput {
943 | pipSampleBuffer = sampleBuffer
944 | } else if pipDevicePosition == .back && videoDataOutput == frontCameraVideoDataOutput {
945 | fullScreenSampleBuffer = sampleBuffer
946 | } else if pipDevicePosition == .front && videoDataOutput == backCameraVideoDataOutput {
947 | fullScreenSampleBuffer = sampleBuffer
948 | } else if pipDevicePosition == .front && videoDataOutput == frontCameraVideoDataOutput {
949 | pipSampleBuffer = sampleBuffer
950 | }
951 |
952 | if let fullScreenSampleBuffer = fullScreenSampleBuffer {
953 | processFullScreenSampleBuffer(fullScreenSampleBuffer)
954 | }
955 |
956 | if let pipSampleBuffer = pipSampleBuffer {
957 | processPiPSampleBuffer(pipSampleBuffer)
958 | }
959 | }
960 |
961 | private func processFullScreenSampleBuffer(_ fullScreenSampleBuffer: CMSampleBuffer) {
962 | guard renderingEnabled else {
963 | return
964 | }
965 |
966 | guard let fullScreenPixelBuffer = CMSampleBufferGetImageBuffer(fullScreenSampleBuffer),
967 | let formatDescription = CMSampleBufferGetFormatDescription(fullScreenSampleBuffer) else {
968 | return
969 | }
970 |
971 | guard let pipSampleBuffer = currentPiPSampleBuffer,
972 | let pipPixelBuffer = CMSampleBufferGetImageBuffer(pipSampleBuffer) else {
973 | return
974 | }
975 |
976 | if !videoMixer.isPrepared {
977 | videoMixer.prepare(with: formatDescription, outputRetainedBufferCountHint: 3)
978 | }
979 |
980 | videoMixer.pipFrame = normalizedPipFrame
981 |
982 | // Mix the full screen pixel buffer with the pip pixel buffer
983 | // When the PIP is the back camera, the primaryPixelBuffer is the front camera
984 | // TODO
985 | // var pipPosition = SIMD2(Float(pipFrame.origin.x) * Float(fullScreenTexture.width), Float(pipFrame.origin.y) * Float(fullScreenTexture.height))
986 | // var pipSize = SIMD2(Float(pipFrame.size.width) * Float(pipTexture.width), Float(pipFrame.size.height) * Float(pipTexture.height))
987 | //
988 | // var pipPosition = SIMD2(Float(757.82605), Float(1387.826))
989 | // var pipSize = SIMD2(Float(270.0), Float(480.0))
990 | // pipPosition = SIMD2(Float(pipVideoPreviewView.left*2), Float(pipVideoPreviewView.top*2))
991 | // pipSize = SIMD2(Float(pipVideoPreviewView.frame.width)*2 ,Float(pipVideoPreviewView.frame.height)*2)
992 | //
993 | let pipView = CGRect(x: pipVideoPreviewView.right, y: pipVideoPreviewView.bottom, width: pipVideoPreviewView.frame.width, height: pipVideoPreviewView.frame.height)
994 |
995 | guard let mixedPixelBuffer = videoMixer.mix(fullScreenPixelBuffer: fullScreenPixelBuffer,
996 | pipPixelBuffer: pipPixelBuffer,
997 | fullScreenPixelBufferIsFrontCamera: pipDevicePosition == .back, pipView: pipView) else {
998 | print("Unable to combine video")
999 | return
1000 | }
1001 |
1002 | // If we're recording, append this buffer to the movie
1003 | if let recorder = movieRecorder,
1004 | recorder.isRecording {
1005 | guard let finalVideoSampleBuffer = createVideoSampleBufferWithPixelBuffer(mixedPixelBuffer,
1006 | presentationTime: CMSampleBufferGetPresentationTimeStamp(fullScreenSampleBuffer)) else {
1007 | print("Error: Unable to create sample buffer from pixelbuffer")
1008 | return
1009 | }
1010 |
1011 | recorder.recordVideo(sampleBuffer: finalVideoSampleBuffer)
1012 | }
1013 | }
1014 |
1015 | private func processPiPSampleBuffer(_ pipSampleBuffer: CMSampleBuffer) {
1016 | guard renderingEnabled else {
1017 | return
1018 | }
1019 |
1020 | currentPiPSampleBuffer = pipSampleBuffer
1021 | }
1022 |
1023 | private func processsAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer, fromOutput audioDataOutput: AVCaptureAudioDataOutput) {
1024 |
1025 | guard (pipDevicePosition == .back && audioDataOutput == backMicrophoneAudioDataOutput) ||
1026 | (pipDevicePosition == .front && audioDataOutput == frontMicrophoneAudioDataOutput) else {
1027 | // Ignoring audio sample buffer
1028 | return
1029 | }
1030 |
1031 | // If we're recording, append this buffer to the movie
1032 | if let recorder = movieRecorder,
1033 | recorder.isRecording {
1034 | recorder.recordAudio(sampleBuffer: sampleBuffer)
1035 | }
1036 | }
1037 |
1038 | private func createVideoSampleBufferWithPixelBuffer(_ pixelBuffer: CVPixelBuffer, presentationTime: CMTime) -> CMSampleBuffer? {
1039 | guard let videoTrackSourceFormatDescription = videoTrackSourceFormatDescription else {
1040 | return nil
1041 | }
1042 |
1043 | var sampleBuffer: CMSampleBuffer?
1044 | var timingInfo = CMSampleTimingInfo(duration: .invalid, presentationTimeStamp: presentationTime, decodeTimeStamp: .invalid)
1045 |
1046 | let err = CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
1047 | imageBuffer: pixelBuffer,
1048 | dataReady: true,
1049 | makeDataReadyCallback: nil,
1050 | refcon: nil,
1051 | formatDescription: videoTrackSourceFormatDescription,
1052 | sampleTiming: &timingInfo,
1053 | sampleBufferOut: &sampleBuffer)
1054 | if sampleBuffer == nil {
1055 | print("Error: Sample buffer creation failed (error code: \(err))")
1056 | }
1057 |
1058 | return sampleBuffer
1059 | }
1060 |
1061 | // MARK: - Session Cost Check
1062 |
1063 | struct ExceededCaptureSessionCosts: OptionSet {
1064 | let rawValue: Int
1065 |
1066 | static let systemPressureCost = ExceededCaptureSessionCosts(rawValue: 1 << 0)
1067 | static let hardwareCost = ExceededCaptureSessionCosts(rawValue: 1 << 1)
1068 | }
1069 |
1070 | func checkSystemCost() {
1071 | var exceededSessionCosts: ExceededCaptureSessionCosts = []
1072 |
1073 | if session.systemPressureCost > 1.0 {
1074 | exceededSessionCosts.insert(.systemPressureCost)
1075 | }
1076 |
1077 | if session.hardwareCost > 1.0 {
1078 | exceededSessionCosts.insert(.hardwareCost)
1079 | }
1080 |
1081 | switch exceededSessionCosts {
1082 |
1083 | case .systemPressureCost:
1084 | // Choice #1: Reduce front camera resolution
1085 | if reduceResolutionForCamera(.front) {
1086 | checkSystemCost()
1087 | }
1088 |
1089 | // Choice 2: Reduce the number of video input ports
1090 | else if reduceVideoInputPorts() {
1091 | checkSystemCost()
1092 | }
1093 |
1094 | // Choice #3: Reduce back camera resolution
1095 | else if reduceResolutionForCamera(.back) {
1096 | checkSystemCost()
1097 | }
1098 |
1099 | // Choice #4: Reduce front camera frame rate
1100 | else if reduceFrameRateForCamera(.front) {
1101 | checkSystemCost()
1102 | }
1103 |
1104 | // Choice #5: Reduce frame rate of back camera
1105 | else if reduceFrameRateForCamera(.back) {
1106 | checkSystemCost()
1107 | } else {
1108 | print("Unable to further reduce session cost.")
1109 | }
1110 |
1111 | case .hardwareCost:
1112 | // Choice #1: Reduce front camera resolution
1113 | if reduceResolutionForCamera(.front) {
1114 | checkSystemCost()
1115 | }
1116 |
1117 | // Choice 2: Reduce back camera resolution
1118 | else if reduceResolutionForCamera(.back) {
1119 | checkSystemCost()
1120 | }
1121 |
1122 | // Choice #3: Reduce front camera frame rate
1123 | else if reduceFrameRateForCamera(.front) {
1124 | checkSystemCost()
1125 | }
1126 |
1127 | // Choice #4: Reduce back camera frame rate
1128 | else if reduceFrameRateForCamera(.back) {
1129 | checkSystemCost()
1130 | } else {
1131 | print("Unable to further reduce session cost.")
1132 | }
1133 |
1134 | case [.systemPressureCost, .hardwareCost]:
1135 | // Choice #1: Reduce front camera resolution
1136 | if reduceResolutionForCamera(.front) {
1137 | checkSystemCost()
1138 | }
1139 |
1140 | // Choice #2: Reduce back camera resolution
1141 | else if reduceResolutionForCamera(.back) {
1142 | checkSystemCost()
1143 | }
1144 |
1145 | // Choice #3: Reduce front camera frame rate
1146 | else if reduceFrameRateForCamera(.front) {
1147 | checkSystemCost()
1148 | }
1149 |
1150 | // Choice #4: Reduce back camera frame rate
1151 | else if reduceFrameRateForCamera(.back) {
1152 | checkSystemCost()
1153 | } else {
1154 | print("Unable to further reduce session cost.")
1155 | }
1156 |
1157 | default:
1158 | break
1159 | }
1160 | }
1161 |
1162 | func reduceResolutionForCamera(_ position: AVCaptureDevice.Position) -> Bool {
1163 | for connection in session.connections {
1164 | for inputPort in connection.inputPorts {
1165 | if inputPort.mediaType == .video && inputPort.sourceDevicePosition == position {
1166 | guard let videoDeviceInput: AVCaptureDeviceInput = inputPort.input as? AVCaptureDeviceInput else {
1167 | return false
1168 | }
1169 |
1170 | var dims: CMVideoDimensions
1171 |
1172 | var width: Int32
1173 | var height: Int32
1174 | var activeWidth: Int32
1175 | var activeHeight: Int32
1176 |
1177 | dims = CMVideoFormatDescriptionGetDimensions(videoDeviceInput.device.activeFormat.formatDescription)
1178 | activeWidth = dims.width
1179 | activeHeight = dims.height
1180 |
1181 | if ( activeHeight <= 480 ) && ( activeWidth <= 640 ) {
1182 | return false
1183 | }
1184 |
1185 | let formats = videoDeviceInput.device.formats
1186 | if let formatIndex = formats.firstIndex(of: videoDeviceInput.device.activeFormat) {
1187 |
1188 | for index in (0.. Bool {
1225 | for connection in session.connections {
1226 | for inputPort in connection.inputPorts {
1227 |
1228 | if inputPort.mediaType == .video && inputPort.sourceDevicePosition == position {
1229 | guard let videoDeviceInput: AVCaptureDeviceInput = inputPort.input as? AVCaptureDeviceInput else {
1230 | return false
1231 | }
1232 | let activeMinFrameDuration = videoDeviceInput.device.activeVideoMinFrameDuration
1233 | var activeMaxFrameRate: Double = Double(activeMinFrameDuration.timescale) / Double(activeMinFrameDuration.value)
1234 | activeMaxFrameRate -= 10.0
1235 |
1236 | // Cap the device frame rate to this new max, never allowing it to go below 15 fps
1237 | if activeMaxFrameRate >= 15.0 {
1238 | do {
1239 | try videoDeviceInput.device.lockForConfiguration()
1240 | videoDeviceInput.videoMinFrameDurationOverride = CMTimeMake(value: 1, timescale: Int32(activeMaxFrameRate))
1241 |
1242 | videoDeviceInput.device.unlockForConfiguration()
1243 |
1244 | print("reduced fps = \(activeMaxFrameRate)")
1245 |
1246 | return true
1247 | } catch {
1248 | print("Could not lock device for configuration: \(error)")
1249 | return false
1250 | }
1251 | } else {
1252 | return false
1253 | }
1254 | }
1255 | }
1256 | }
1257 |
1258 | return false
1259 | }
1260 |
1261 | func reduceVideoInputPorts () -> Bool {
1262 | var newConnection: AVCaptureConnection
1263 | var result = false
1264 |
1265 | for connection in session.connections {
1266 | for inputPort in connection.inputPorts where inputPort.sourceDeviceType == .builtInDualCamera {
1267 | print("Changing input from dual to single camera")
1268 |
1269 | guard let videoDeviceInput: AVCaptureDeviceInput = inputPort.input as? AVCaptureDeviceInput,
1270 | let wideCameraPort: AVCaptureInput.Port = videoDeviceInput.ports(for: .video,
1271 | sourceDeviceType: .builtInWideAngleCamera,
1272 | sourceDevicePosition: videoDeviceInput.device.position).first else {
1273 | return false
1274 | }
1275 |
1276 | if let previewLayer = connection.videoPreviewLayer {
1277 | newConnection = AVCaptureConnection(inputPort: wideCameraPort, videoPreviewLayer: previewLayer)
1278 | } else if let savedOutput = connection.output {
1279 | newConnection = AVCaptureConnection(inputPorts: [wideCameraPort], output: savedOutput)
1280 | } else {
1281 | continue
1282 | }
1283 | session.beginConfiguration()
1284 |
1285 | session.removeConnection(connection)
1286 |
1287 | if session.canAddConnection(newConnection) {
1288 | session.addConnection(newConnection)
1289 |
1290 | session.commitConfiguration()
1291 | result = true
1292 | } else {
1293 | print("Could not add new connection to the session")
1294 | session.commitConfiguration()
1295 | return false
1296 | }
1297 | }
1298 | }
1299 | return result
1300 | }
1301 |
1302 | private func setRecommendedFrameRateRangeForPressureState(_ systemPressureState: AVCaptureDevice.SystemPressureState) {
1303 | // The frame rates used here are for demonstrative purposes only for this app.
1304 | // Your frame rate throttling may be different depending on your app's camera configuration.
1305 | let pressureLevel = systemPressureState.level
1306 | if pressureLevel == .serious || pressureLevel == .critical {
1307 | if self.movieRecorder == nil || self.movieRecorder?.isRecording == false {
1308 | do {
1309 | try self.backCameraDeviceInput?.device.lockForConfiguration()
1310 |
1311 | print("WARNING: Reached elevated system pressure level: \(pressureLevel). Throttling frame rate.")
1312 |
1313 | self.backCameraDeviceInput?.device.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 20 )
1314 | self.backCameraDeviceInput?.device.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 15 )
1315 |
1316 | self.backCameraDeviceInput?.device.unlockForConfiguration()
1317 | } catch {
1318 | print("Could not lock device for configuration: \(error)")
1319 | }
1320 | }
1321 | } else if pressureLevel == .shutdown {
1322 | print("Session stopped running due to system pressure level.")
1323 | }
1324 | }
1325 | }
1326 |
1327 |
1328 | extension ViewController {
1329 | // 画像の保存
1330 | private func saveImage(image: UIImage, fileName: String = "hoge" ) -> Bool{
1331 | //pngで保存する場合
1332 | // let pngImageData = UIImagePNGRepresentation(image)
1333 | // jpgで保存する場合
1334 | let jpgImageData = image.jpegData(compressionQuality: 1.0)
1335 | let documentsURL = FileManager.default.urls(for: .libraryDirectory, in: .userDomainMask)[0]
1336 | let fileURL = documentsURL.appendingPathComponent(fileName)
1337 | do {
1338 | try jpgImageData!.write(to: fileURL)
1339 | } catch {
1340 | //エラー処理
1341 | return false
1342 | }
1343 | return true
1344 | }
1345 |
1346 | func save2(image: UIImage, fileName: String = "hoge") {
1347 |
1348 | // viewをimageとして取得
1349 | // let image : UIImage = self.viewToImage(view)
1350 |
1351 | // カメラロールに保存する
1352 | UIImageWriteToSavedPhotosAlbum(image,
1353 | self,
1354 | #selector(self.didFinishSavingImage(_:didFinishSavingWithError:contextInfo:)),
1355 | nil)
1356 | }
1357 |
1358 | // 保存を試みた結果を受け取る
1359 | @objc func didFinishSavingImage(_ image: UIImage, didFinishSavingWithError error: NSError!, contextInfo: UnsafeMutableRawPointer) {
1360 |
1361 | // 結果によって出すアラートを変更する
1362 | var title = "保存完了"
1363 | var message = "カメラロールに保存しました"
1364 |
1365 | if error != nil {
1366 | title = "エラー"
1367 | message = "保存に失敗しました"
1368 | }
1369 |
1370 | let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
1371 | alertController.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
1372 | self.present(alertController, animated: true, completion: nil)
1373 | }
1374 | }
1375 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | $(PRODUCT_BUNDLE_PACKAGE_TYPE)
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | NSCameraUsageDescription
24 | $(PRODUCT_NAME) uses the camera to record video.
25 | NSMicrophoneUsageDescription
26 | $(PRODUCT_NAME) uses the microphone to record audio.
27 | NSPhotoLibraryUsageDescription
28 | $(PRODUCT_NAME) saves recorded movies to your photo library.
29 | UILaunchStoryboardName
30 | LaunchScreen
31 | UIMainStoryboardFile
32 | Main
33 | UIRequiredDeviceCapabilities
34 |
35 | armv7
36 |
37 | UIRequiresFullScreen
38 |
39 | UIStatusBarHidden
40 |
41 | UISupportedInterfaceOrientations
42 |
43 | UIInterfaceOrientationPortrait
44 |
45 | UISupportedInterfaceOrientations~ipad
46 |
47 | UIInterfaceOrientationPortrait
48 |
49 | UIViewControllerBasedStatusBarAppearance
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/MovieRecorder.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See LICENSE folder for this sample’s licensing information.
3 |
4 | Abstract:
5 | Records movies using AVAssetWriter.
6 | */
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | class MovieRecorder {
12 |
13 | private var assetWriter: AVAssetWriter?
14 |
15 | private var assetWriterVideoInput: AVAssetWriterInput?
16 |
17 | private var assetWriterAudioInput: AVAssetWriterInput?
18 |
19 | private var videoTransform: CGAffineTransform
20 |
21 | private var videoSettings: [String: Any]
22 |
23 | private var audioSettings: [String: Any]
24 |
25 | private(set) var isRecording = false
26 |
27 | init(audioSettings: [String: Any], videoSettings: [String: Any], videoTransform: CGAffineTransform) {
28 | self.audioSettings = audioSettings
29 | self.videoSettings = videoSettings
30 | self.videoTransform = videoTransform
31 | }
32 |
33 | func startRecording() {
34 | //TODOSTART
35 | // Create an asset writer that records to a temporary file
36 | let outputFileName = NSUUID().uuidString
37 | let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(outputFileName).appendingPathExtension("MOV")
38 | guard let assetWriter = try? AVAssetWriter(url: outputFileURL, fileType: .mov) else {
39 | return
40 | }
41 |
42 | // Add an audio input
43 | let assetWriterAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
44 | assetWriterAudioInput.expectsMediaDataInRealTime = true
45 | assetWriter.add(assetWriterAudioInput)
46 |
47 | // Add a video input
48 | let assetWriterVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
49 | assetWriterVideoInput.expectsMediaDataInRealTime = true
50 | assetWriterVideoInput.transform = videoTransform
51 | assetWriter.add(assetWriterVideoInput)
52 |
53 | self.assetWriter = assetWriter
54 | self.assetWriterAudioInput = assetWriterAudioInput
55 | self.assetWriterVideoInput = assetWriterVideoInput
56 |
57 | isRecording = true
58 | }
59 |
60 | func stopRecording(completion: @escaping (URL) -> Void) {
61 | guard let assetWriter = assetWriter else {
62 | return
63 | }
64 |
65 | self.isRecording = false
66 | self.assetWriter = nil
67 |
68 | assetWriter.finishWriting {
69 | completion(assetWriter.outputURL)
70 | }
71 | }
72 |
73 | func recordVideo(sampleBuffer: CMSampleBuffer) {
74 | guard isRecording,
75 | let assetWriter = assetWriter else {
76 | return
77 | }
78 |
79 | if assetWriter.status == .unknown {
80 | assetWriter.startWriting()
81 | assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
82 | } else if assetWriter.status == .writing {
83 | if let input = assetWriterVideoInput,
84 | input.isReadyForMoreMediaData {
85 | input.append(sampleBuffer)
86 | }
87 | }
88 | }
89 |
90 | func recordAudio(sampleBuffer: CMSampleBuffer) {
91 | guard isRecording,
92 | let assetWriter = assetWriter,
93 | assetWriter.status == .writing,
94 | let input = assetWriterAudioInput,
95 | input.isReadyForMoreMediaData else {
96 | return
97 | }
98 |
99 | input.append(sampleBuffer)
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/PiPMixer.metal:
--------------------------------------------------------------------------------
1 | /*
2 | See LICENSE folder for this sample’s licensing information.
3 |
4 | Abstract:
5 | Shader that renders two input textures with one as a PiP and the other full screen.
6 | */
7 |
8 | #include
9 | using namespace metal;
10 |
11 | struct MixerParameters
12 | {
13 | float2 pipPosition;
14 | float2 pipSize;
15 | };
16 |
17 | constant sampler kBilinearSampler(filter::linear, coord::pixel, address::clamp_to_edge);
18 |
19 | // Compute kernel
20 | kernel void reporterMixer(texture2d fullScreenInput [[ texture(0) ]],
21 | texture2d pipInput [[ texture(1) ]],
22 | texture2d outputTexture [[ texture(2) ]],
23 | const device MixerParameters& mixerParameters [[ buffer(0) ]],
24 | uint2 gid [[thread_position_in_grid]])
25 |
26 | {
27 | uint2 pipPosition = uint2(mixerParameters.pipPosition);
28 | uint2 pipSize = uint2(mixerParameters.pipSize);
29 |
30 | half4 output;
31 |
32 | // Check if the output pixel should be from full screen or PIP
33 | if ( (gid.x >= pipPosition.x) && (gid.y >= pipPosition.y) &&
34 | (gid.x < (pipPosition.x + pipSize.x)) && (gid.y < (pipPosition.y + pipSize.y)) )
35 | {
36 | // Position and scale the PIP window
37 | float2 pipSamplingCoord = float2(gid - pipPosition) * float2(pipInput.get_width(), pipInput.get_height()) / float2(pipSize);
38 | output = pipInput.sample(kBilinearSampler, pipSamplingCoord + 0.5);
39 | }
40 | else
41 | {
42 | output = fullScreenInput.read(gid);
43 | }
44 |
45 | outputTexture.write(output, gid);
46 | }
47 |
48 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/PiPVideoMixer.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See LICENSE folder for this sample’s licensing information.
3 |
4 | Abstract:
5 | Combines video frames from two different sources.
6 | */
7 |
8 | import CoreMedia
9 | import CoreVideo
10 |
11 | class PiPVideoMixer {
12 |
13 | var description = "Video Mixer"
14 |
15 | private(set) var isPrepared = false
16 |
17 | /// A normalized CGRect representing the position and size of the PiP in relation to the full screen video preview
18 | var pipFrame = CGRect.zero
19 |
20 | private(set) var inputFormatDescription: CMFormatDescription?
21 |
22 | private(set) var outputFormatDescription: CMFormatDescription?
23 |
24 | private var outputPixelBufferPool: CVPixelBufferPool?
25 |
26 | private let metalDevice = MTLCreateSystemDefaultDevice()
27 |
28 | private var textureCache: CVMetalTextureCache?
29 |
30 | private lazy var commandQueue: MTLCommandQueue? = {
31 | guard let metalDevice = metalDevice else {
32 | return nil
33 | }
34 |
35 | return metalDevice.makeCommandQueue()
36 | }()
37 |
38 | private var fullRangeVertexBuffer: MTLBuffer?
39 |
40 | private var computePipelineState: MTLComputePipelineState?
41 |
42 | init() {
43 | guard let metalDevice = metalDevice,
44 | let defaultLibrary = metalDevice.makeDefaultLibrary(),
45 | let kernelFunction = defaultLibrary.makeFunction(name: "reporterMixer") else {
46 | return
47 | }
48 |
49 | do {
50 | computePipelineState = try metalDevice.makeComputePipelineState(function: kernelFunction)
51 | } catch {
52 | print("Could not create compute pipeline state: \(error)")
53 | }
54 | }
55 |
56 | func prepare(with videoFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) {
57 | reset()
58 |
59 | (outputPixelBufferPool, _, outputFormatDescription) = allocateOutputBufferPool(with: videoFormatDescription,
60 | outputRetainedBufferCountHint: outputRetainedBufferCountHint)
61 | if outputPixelBufferPool == nil {
62 | return
63 | }
64 | inputFormatDescription = videoFormatDescription
65 |
66 | guard let metalDevice = metalDevice else {
67 | return
68 | }
69 |
70 | var metalTextureCache: CVMetalTextureCache?
71 | if CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, metalDevice, nil, &metalTextureCache) != kCVReturnSuccess {
72 | assertionFailure("Unable to allocate video mixer texture cache")
73 | } else {
74 | textureCache = metalTextureCache
75 | }
76 |
77 | isPrepared = true
78 | }
79 |
80 | func reset() {
81 | outputPixelBufferPool = nil
82 | outputFormatDescription = nil
83 | inputFormatDescription = nil
84 | textureCache = nil
85 | isPrepared = false
86 | }
87 |
88 | struct MixerParameters {
89 | var pipPosition: SIMD2
90 | var pipSize: SIMD2
91 | }
92 |
93 | func mix(fullScreenPixelBuffer: CVPixelBuffer, pipPixelBuffer: CVPixelBuffer, fullScreenPixelBufferIsFrontCamera: Bool, pipView:CGRect) -> CVPixelBuffer? {
94 | guard isPrepared,
95 | let outputPixelBufferPool = outputPixelBufferPool else {
96 | assertionFailure("Invalid state: Not prepared")
97 | return nil
98 | }
99 |
100 | var newPixelBuffer: CVPixelBuffer?
101 | CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool, &newPixelBuffer)
102 | guard let outputPixelBuffer = newPixelBuffer else {
103 | print("Allocation failure: Could not get pixel buffer from pool (\(self.description))")
104 | return nil
105 | }
106 |
107 | guard let outputTexture = makeTextureFromCVPixelBuffer(pixelBuffer: outputPixelBuffer),
108 | let fullScreenTexture = makeTextureFromCVPixelBuffer(pixelBuffer: fullScreenPixelBuffer),
109 | let pipTexture = makeTextureFromCVPixelBuffer(pixelBuffer: pipPixelBuffer) else {
110 | return nil
111 | }
112 |
113 | // TODO ここを変える?
114 | // let pipSize = SIMD2(Float(pipFrame.size.width) * Float(pipTexture.width), Float(pipFrame.size.height) * Float(pipTexture.height))
115 | // let pipSize = SIMD2(Float(pipView.size.width) * Float(pipTexture.width), Float(pipView.size.height) * Float(pipTexture.height))
116 | let pipPosition = SIMD2(Float(pipView.origin.x)*2, Float(pipView.origin.y)*2+10.0)
117 | let pipSize = SIMD2(Float(pipView.size.width)*2, Float(pipView.size.height)*2)
118 |
119 | var parameters = MixerParameters(pipPosition: pipPosition, pipSize: pipSize)
120 |
121 | // Set up command queue, buffer, and encoder
122 | guard let commandQueue = commandQueue,
123 | let commandBuffer = commandQueue.makeCommandBuffer(),
124 | let commandEncoder = commandBuffer.makeComputeCommandEncoder(),
125 | let computePipelineState = computePipelineState else {
126 | print("Failed to create Metal command encoder")
127 |
128 | if let textureCache = textureCache {
129 | CVMetalTextureCacheFlush(textureCache, 0)
130 | }
131 |
132 | return nil
133 | }
134 |
135 | commandEncoder.label = "pip Video Mixer"
136 | commandEncoder.setComputePipelineState(computePipelineState)
137 | commandEncoder.setTexture(fullScreenTexture, index: 0)
138 | commandEncoder.setTexture(pipTexture, index: 1)
139 | commandEncoder.setTexture(outputTexture, index: 2)
140 | commandEncoder.setBytes(UnsafeMutableRawPointer(¶meters), length: MemoryLayout.size, index: 0)
141 |
142 | // Set up thread groups as described in https://developer.apple.com/reference/metal/mtlcomputecommandencoder
143 | let width = computePipelineState.threadExecutionWidth
144 | let height = computePipelineState.maxTotalThreadsPerThreadgroup / width
145 | let threadsPerThreadgroup = MTLSizeMake(width, height, 1)
146 | let threadgroupsPerGrid = MTLSize(width: (fullScreenTexture.width + width - 1) / width,
147 | height: (fullScreenTexture.height + height - 1) / height,
148 | depth: 1)
149 | commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup)
150 |
151 | commandEncoder.endEncoding()
152 | commandBuffer.commit()
153 |
154 | return outputPixelBuffer
155 | }
156 |
157 | private func makeTextureFromCVPixelBuffer(pixelBuffer: CVPixelBuffer) -> MTLTexture? {
158 | guard let textureCache = textureCache else {
159 | print("No texture cache")
160 | return nil
161 | }
162 |
163 | let width = CVPixelBufferGetWidth(pixelBuffer)
164 | let height = CVPixelBufferGetHeight(pixelBuffer)
165 |
166 | // Create a Metal texture from the image buffer
167 | var cvTextureOut: CVMetalTexture?
168 | CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, nil, .bgra8Unorm, width, height, 0, &cvTextureOut)
169 | guard let cvTexture = cvTextureOut, let texture = CVMetalTextureGetTexture(cvTexture) else {
170 | print("Video mixer failed to create preview texture")
171 |
172 | CVMetalTextureCacheFlush(textureCache, 0)
173 | return nil
174 | }
175 |
176 | return texture
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/PreviewView.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See LICENSE folder for this sample’s licensing information.
3 |
4 | Abstract:
5 | Application preview view.
6 | */
7 |
8 | import UIKit
9 | import AVFoundation
10 |
11 | class PreviewView: UIView {
12 | var videoPreviewLayer: AVCaptureVideoPreviewLayer {
13 | guard let layer = layer as? AVCaptureVideoPreviewLayer else {
14 | fatalError("Expected `AVCaptureVideoPreviewLayer` type for layer. Check PreviewView.layerClass implementation.")
15 | }
16 |
17 | return layer
18 | }
19 |
20 | override class var layerClass: AnyClass {
21 | return AVCaptureVideoPreviewLayer.self
22 | }
23 | }
24 |
25 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/UIView+Extension.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 |
3 | extension UIView {
4 |
5 | var top : CGFloat{
6 | get{
7 | return self.frame.origin.y
8 | }
9 | set{
10 | var frame = self.frame
11 | frame.origin.y = newValue
12 | self.frame = frame
13 | }
14 | }
15 |
16 | var bottom : CGFloat{
17 | get{
18 | return frame.origin.y + frame.size.height
19 | }
20 | set{
21 | var frame = self.frame
22 | frame.origin.y = newValue - self.frame.size.height
23 | self.frame = frame
24 | }
25 | }
26 |
27 | var right : CGFloat{
28 | get{
29 | return self.frame.origin.x + self.frame.size.width
30 | }
31 | set{
32 | var frame = self.frame
33 | frame.origin.x = newValue - self.frame.size.width
34 | self.frame = frame
35 | }
36 | }
37 |
38 | var left : CGFloat{
39 | get{
40 | return self.frame.origin.x
41 | }
42 | set{
43 | var frame = self.frame
44 | frame.origin.x = newValue
45 | self.frame = frame
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/AVMultiCamPiP/Utilities.swift:
--------------------------------------------------------------------------------
1 | /*
2 | See LICENSE folder for this sample’s licensing information.
3 |
4 | Abstract:
5 | Utilities
6 | */
7 |
8 | import AVFoundation
9 | import CoreMedia
10 | import Foundation
11 | import UIKit
12 |
13 | // Use bundle name instead of hard-coding app name in alerts
14 | extension Bundle {
15 |
16 | var applicationName: String {
17 | if let name = object(forInfoDictionaryKey: "CFBundleDisplayName") as? String {
18 | return name
19 | } else if let name = object(forInfoDictionaryKey: "CFBundleName") as? String {
20 | return name
21 | }
22 |
23 | return "-"
24 | }
25 | }
26 |
27 | extension AVCaptureVideoOrientation {
28 |
29 | init?(deviceOrientation: UIDeviceOrientation) {
30 | switch deviceOrientation {
31 | case .portrait: self = .portrait
32 | case .portraitUpsideDown: self = .portraitUpsideDown
33 | case .landscapeLeft: self = .landscapeRight
34 | case .landscapeRight: self = .landscapeLeft
35 | default: return nil
36 | }
37 | }
38 |
39 | init?(interfaceOrientation: UIInterfaceOrientation) {
40 | switch interfaceOrientation {
41 | case .portrait: self = .portrait
42 | case .portraitUpsideDown: self = .portraitUpsideDown
43 | case .landscapeLeft: self = .landscapeLeft
44 | case .landscapeRight: self = .landscapeRight
45 | default: return nil
46 | }
47 | }
48 |
49 | func angleOffsetFromPortraitOrientation(at position: AVCaptureDevice.Position) -> Double {
50 | switch self {
51 | case .portrait:
52 | return position == .front ? .pi : 0
53 | case .portraitUpsideDown:
54 | return position == .front ? 0 : .pi
55 | case .landscapeRight:
56 | return -.pi / 2.0
57 | case .landscapeLeft:
58 | return .pi / 2.0
59 | default:
60 | return 0
61 | }
62 | }
63 | }
64 |
65 | extension AVCaptureConnection {
66 | func videoOrientationTransform(relativeTo destinationVideoOrientation: AVCaptureVideoOrientation) -> CGAffineTransform {
67 | let videoDevice: AVCaptureDevice
68 | if let deviceInput = inputPorts.first?.input as? AVCaptureDeviceInput, deviceInput.device.hasMediaType(.video) {
69 | videoDevice = deviceInput.device
70 | } else {
71 | // Fatal error? Programmer error?
72 | print("Video data output's video connection does not have a video device")
73 | return .identity
74 | }
75 |
76 | let fromAngleOffset = videoOrientation.angleOffsetFromPortraitOrientation(at: videoDevice.position)
77 | let toAngleOffset = destinationVideoOrientation.angleOffsetFromPortraitOrientation(at: videoDevice.position)
78 | let angleOffset = CGFloat(toAngleOffset - fromAngleOffset)
79 | let transform = CGAffineTransform(rotationAngle: angleOffset)
80 |
81 | return transform
82 | }
83 | }
84 |
85 | extension AVCaptureSession.InterruptionReason: CustomStringConvertible {
86 | public var description: String {
87 | var descriptionString = ""
88 |
89 | switch self {
90 | case .videoDeviceNotAvailableInBackground:
91 | descriptionString = "video device is not available in the background"
92 | case .audioDeviceInUseByAnotherClient:
93 | descriptionString = "audio device is in use by another client"
94 | case .videoDeviceInUseByAnotherClient:
95 | descriptionString = "video device is in use by another client"
96 | case .videoDeviceNotAvailableWithMultipleForegroundApps:
97 | descriptionString = "video device is not available with multiple foreground apps"
98 | case .videoDeviceNotAvailableDueToSystemPressure:
99 | descriptionString = "video device is not available due to system pressure"
100 | @unknown default:
101 | descriptionString = "unknown (\(self.rawValue)"
102 | }
103 |
104 | return descriptionString
105 | }
106 | }
107 |
108 | func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (
109 | outputBufferPool: CVPixelBufferPool?,
110 | outputColorSpace: CGColorSpace?,
111 | outputFormatDescription: CMFormatDescription?) {
112 |
113 | let inputMediaSubType = CMFormatDescriptionGetMediaSubType(inputFormatDescription)
114 | if inputMediaSubType != kCVPixelFormatType_32BGRA {
115 | assertionFailure("Invalid input pixel buffer type \(inputMediaSubType)")
116 | return (nil, nil, nil)
117 | }
118 |
119 | let inputDimensions = CMVideoFormatDescriptionGetDimensions(inputFormatDescription)
120 | var pixelBufferAttributes: [String: Any] = [
121 | kCVPixelBufferPixelFormatTypeKey as String: UInt(inputMediaSubType),
122 | kCVPixelBufferWidthKey as String: Int(inputDimensions.width),
123 | kCVPixelBufferHeightKey as String: Int(inputDimensions.height),
124 | kCVPixelBufferIOSurfacePropertiesKey as String: [:]
125 | ]
126 |
127 | // Get pixel buffer attributes and color space from the input format description
128 | var cgColorSpace: CGColorSpace? = CGColorSpaceCreateDeviceRGB()
129 | if let inputFormatDescriptionExtension = CMFormatDescriptionGetExtensions(inputFormatDescription) as Dictionary? {
130 | let colorPrimaries = inputFormatDescriptionExtension[kCVImageBufferColorPrimariesKey]
131 |
132 | if let colorPrimaries = colorPrimaries {
133 | var colorSpaceProperties: [String: AnyObject] = [kCVImageBufferColorPrimariesKey as String: colorPrimaries]
134 |
135 | if let yCbCrMatrix = inputFormatDescriptionExtension[kCVImageBufferYCbCrMatrixKey] {
136 | colorSpaceProperties[kCVImageBufferYCbCrMatrixKey as String] = yCbCrMatrix
137 | }
138 |
139 | if let transferFunction = inputFormatDescriptionExtension[kCVImageBufferTransferFunctionKey] {
140 | colorSpaceProperties[kCVImageBufferTransferFunctionKey as String] = transferFunction
141 | }
142 |
143 | pixelBufferAttributes[kCVBufferPropagatedAttachmentsKey as String] = colorSpaceProperties
144 | }
145 |
146 | if let cvColorspace = inputFormatDescriptionExtension[kCVImageBufferCGColorSpaceKey],
147 | CFGetTypeID(cvColorspace) == CGColorSpace.typeID {
148 | cgColorSpace = (cvColorspace as! CGColorSpace)
149 | } else if (colorPrimaries as? String) == (kCVImageBufferColorPrimaries_P3_D65 as String) {
150 | cgColorSpace = CGColorSpace(name: CGColorSpace.displayP3)
151 | }
152 | }
153 |
154 | // Create a pixel buffer pool with the same pixel attributes as the input format description.
155 | let poolAttributes = [kCVPixelBufferPoolMinimumBufferCountKey as String: outputRetainedBufferCountHint]
156 | var cvPixelBufferPool: CVPixelBufferPool?
157 | CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as NSDictionary?, pixelBufferAttributes as NSDictionary?, &cvPixelBufferPool)
158 | guard let pixelBufferPool = cvPixelBufferPool else {
159 | assertionFailure("Allocation failure: Could not allocate pixel buffer pool.")
160 | return (nil, nil, nil)
161 | }
162 |
163 | preallocateBuffers(pool: pixelBufferPool, allocationThreshold: outputRetainedBufferCountHint)
164 |
165 | // Get the output format description
166 | var pixelBuffer: CVPixelBuffer?
167 | var outputFormatDescription: CMFormatDescription?
168 | let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: outputRetainedBufferCountHint] as NSDictionary
169 | CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pixelBufferPool, auxAttributes, &pixelBuffer)
170 | if let pixelBuffer = pixelBuffer {
171 | CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault,
172 | imageBuffer: pixelBuffer,
173 | formatDescriptionOut: &outputFormatDescription)
174 | }
175 | pixelBuffer = nil
176 |
177 | return (pixelBufferPool, cgColorSpace, outputFormatDescription)
178 | }
179 |
180 | private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) {
181 | var pixelBuffers = [CVPixelBuffer]()
182 | var error: CVReturn = kCVReturnSuccess
183 | let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary
184 | var pixelBuffer: CVPixelBuffer?
185 | while error == kCVReturnSuccess {
186 | error = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer)
187 | if let pixelBuffer = pixelBuffer {
188 | pixelBuffers.append(pixelBuffer)
189 | }
190 | pixelBuffer = nil
191 | }
192 | pixelBuffers.removeAll()
193 | }
194 |
--------------------------------------------------------------------------------
/Configuration/SampleCode.xcconfig:
--------------------------------------------------------------------------------
1 | //
2 | // See LICENSE folder for this sample’s licensing information.
3 | //
4 | // SampleCode.xcconfig
5 | //
6 |
7 | // The `SAMPLE_CODE_DISAMBIGUATOR` configuration is to make it easier to build
8 | // and run a sample code project. Once you set your project's development team,
9 | // you'll have a unique bundle identifier. This is because the bundle identifier
10 | // is derived based on the 'SAMPLE_CODE_DISAMBIGUATOR' value. Do not use this
11 | // approach in your own projects—it's only useful for sample code projects because
12 | // they are frequently downloaded and don't have a development team set.
13 | SAMPLE_CODE_DISAMBIGUATOR=${DEVELOPMENT_TEAM}
14 |
--------------------------------------------------------------------------------
/LICENSE/LICENSE.txt:
--------------------------------------------------------------------------------
1 | iPhone, iPad, iOS, and Xcode are trademarks of Apple Inc.
2 |
3 | Copyright © 2019 Apple Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6 |
7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
10 |
11 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # AVMultiCamPiP: Capturing from Multiple Cameras
2 | Simultaneously record the output from the front and back cameras into a single movie file by using a multi-camera capture session.
3 |
4 | ## Overview
5 |
6 | - Note: This sample code project is associated with WWDC 2019 session [225: Advances in Camera Capture & Portrait Segmentation](https://developer.apple.com/videos/play/wwdc19/225/).
7 |
8 |
9 | ## Configure the Sample Code Project
10 |
11 | You must run this sample code on one of these devices:
12 | - An iPhone with an A12 or later processor
13 | - An iPad Pro with an A12X or later processor
14 |
15 |
--------------------------------------------------------------------------------