├── LICENSE ├── README.md ├── ZHCamera.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ └── xcshareddata │ │ └── IDEWorkspaceChecks.plist └── xcuserdata │ └── xuzhenhao.xcuserdatad │ └── xcschemes │ └── xcschememanagement.plist └── ZHCamera ├── AppDelegate.swift ├── Assets.xcassets ├── AppIcon.appiconset │ └── Contents.json └── Contents.json ├── Base.lproj ├── LaunchScreen.storyboard └── Main.storyboard ├── Capture ├── Controller │ └── CameraViewController.swift ├── Manager │ ├── CaptureManager.swift │ └── VideoWriteManager.swift └── View │ └── CapturePreview.swift ├── Composition └── Controller │ └── CompositionViewController.swift ├── Info.plist ├── Resource ├── 01_nebula.mp4 ├── 02_blackhole.mp4 ├── 03_nebula.mp4 ├── 04_quasar.mp4 └── 05_blackhole.mp4 └── ViewController.swift /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 cloud 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LearningAVFoundation 2 | 3 | ## 前言 4 | AVFoundation框架的功能十分强大,同时也充斥着各种类,比如各种session,各种input,各种output。之前并没有对整个框架进行过完整梳理,虽然也能东拼西凑的那各种类完成需求,但遇到深度定制的需求以及踩到坑之后,总会比较吃力。最近,在看《learning AV Foundation》这本书,看完后尝试着梳理了一个整体脉络,并将理解的知识点整合用Swift重写了一个demo,包括拍摄+实时滤镜+实时写入+自定义导出分辨率等。本篇为视频拍摄部分,下一章会更新视频的组合编辑及音频混合部分。 5 | 6 | ## 整体脉络 7 | 8 | ![整体架构](https://user-gold-cdn.xitu.io/2018/11/10/166fc73b59861193?w=1202&h=716&f=png&s=72981) 9 | 10 | 可以看到,整个流程可以分为三部分,分别为数据采集、数据加工和数据保存。 11 | 12 | 数据采集: 数据采集不仅限于本文场景中的通过摄像头和麦克分设备采集,还比如AVAssetReader从AVAsset实例中读取。无论以什么方式采集,这一阶段的输出都为CMSampleBuffer。值得一提的是,摄像头采集的是yuv格式的压缩的视频信号,在输出时要还原成可以处理的数字信号,这一配置在AVCaptureVideoDataOutput的videoSettings属性中。 13 | 14 | 数据加工: 数据加工阶段可以基于CMSampleBuffer进行各种处理,包括加滤镜等,都在这个阶段。sampleBuffer会包含一个CVPixelBuffer,它是一个带有单个视频帧原始像素数据的Core Video对象,据此我们可以进行像素级别的加工。 15 | 16 | 数据保存: 数据保存阶段将加工后的媒体资源进行编码并写入到容器文件中,比如mp4文件或.mov文件等。此处使用AVAssetWriter,它支持实时写入。它期望接收到的数据也是CMSampleBuffer格式。同时我们也可以传入其他数据,通过pixelBufferAdaptor适配成它所期望的数据。比如,本文的demo中传入了CIImage,可以先将CIImage渲染成CVPixelBuffer,再进行写入。 17 | 18 | 以上就是整体脉络,本文的demo也是基于这个思路封装的。CaptureManager负责数据采集,通过block回调输出CMSampleBuffer数据。VideoWriteManager负责数据保存,接收CMSampleBuffer数据,通过block回调生成的文件路径。CameraViewController作为两者的调度方,并承担数据加工工作以及将加工后的数据展示给用户进行预览。 19 | 20 | ## 设备采集 21 | 22 | 通过设备采集的核心类是AVCaptureSession,它的输入是AVCaptureDeviceInput,输出是AVCaptureOutput,管理从物理设备中得到的数据流,并按照output的配置输出指定的文件或数据。AVCaptureOutput是一个抽象基类,如果定制化需求不高,可以直接使用AVCaptureMovieOut高级类来直接输出文件。但是如果需要进行底层的数据加工或自定义配置,就需要使用AVCaptureVideoDataout和AVCaptureAudioDataOutput来输出CMSampleBuffer格式的原始数据。此外,为了不阻塞主线程,我们一般会为AVCaptureOutput分配专门的串行队列。 23 | 24 | ## 数据加工-添加滤镜 25 | 26 | 滤镜大多使用GPUImage框架,但这不是本文的核心,因此,demo中仅使用了CoreImage框架来实现滤镜效果。为了达到实时滤镜的效果,需要在每一帧的回调数据中,都对每一帧的图像数据都应用当前滤镜的效果,从而用户可以在拍摄过程中不断切换各种滤镜。 27 | 28 | 此处在学习《learning AV Foundation》书中的demo时,书中的demo是把原始数据分别传给用户预览界面和写入类,两者各自处理。但我觉得这样相当于同样的加工处理了两遍,从代码维护和性能上都不是很可取。因为,此处我是处理完后,把处理完的数据分别传给预览界面显示和写入类保存。 29 | 30 | ## 数据保存 31 | 32 | AVAssetWriter通过多个(音频、视频等)AVAssetWriterInput对象配置。AVAssetWriterInput通过mediaType和outputSettings来初始化,我们可以在outputSettings中进行视频比特率、视频宽高、关键帧间隔等细致的配置,这也是AVAssetWrite相比AVAssetExportSession明显的优势。AVAssetWriterInput在附加数据后会在最终输出时生成一个独立的AVAssetTrack. 33 | 34 | 此处用到了PixelBufferAdaptor来附加CVPixelBuffer类型的数据,它在附加CVPixelBuffer对象的视频样本时能提供最优性能。 35 | 36 | 以上就是AVFoundation Caputure部分的整体思路和脉络,更多采坑细节如视频旋转问题等请参考[Demo](https://github.com/xuzhenhao/LearningAVFoundation.git),内有详细注释。 37 | -------------------------------------------------------------------------------- /ZHCamera.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 50; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | E7252167219581B200BD8FEF /* CompositionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = E7252166219581B200BD8FEF /* CompositionViewController.swift */; }; 11 | E7531545219D70D500FC1243 /* 01_nebula.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = E7531540219D70D400FC1243 /* 01_nebula.mp4 */; }; 12 | E7531546219D70D500FC1243 /* 02_blackhole.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = E7531541219D70D400FC1243 /* 02_blackhole.mp4 */; }; 13 | E7531547219D70D500FC1243 /* 03_nebula.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = E7531542219D70D500FC1243 /* 03_nebula.mp4 */; }; 14 | E7531548219D70D500FC1243 /* 04_quasar.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = E7531543219D70D500FC1243 /* 04_quasar.mp4 */; }; 15 | E7531549219D70D500FC1243 /* 05_blackhole.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = E7531544219D70D500FC1243 /* 05_blackhole.mp4 */; }; 16 | E76CEAEA219404EB0080F456 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E76CEAE9219404EB0080F456 /* AppDelegate.swift */; }; 17 | E76CEAEC219404EB0080F456 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = E76CEAEB219404EB0080F456 /* ViewController.swift */; }; 18 | E76CEAEF219404EB0080F456 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E76CEAED219404EB0080F456 /* Main.storyboard */; }; 19 | E76CEAF1219404ED0080F456 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = E76CEAF0219404ED0080F456 /* Assets.xcassets */; }; 20 | E76CEAF4219404ED0080F456 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E76CEAF2219404ED0080F456 /* LaunchScreen.storyboard */; }; 21 | E76CEB002194062A0080F456 /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = E76CEAFF2194062A0080F456 /* CameraViewController.swift */; }; 22 | E76CEB02219408FC0080F456 /* CapturePreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = E76CEB01219408FC0080F456 /* CapturePreview.swift */; }; 23 | E76CEB0521940EEE0080F456 /* CaptureManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = E76CEB0421940EEE0080F456 /* CaptureManager.swift */; }; 24 | E772E8B6219442A50035E819 /* VideoWriteManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = E772E8B5219442A50035E819 /* VideoWriteManager.swift */; }; 25 | /* End PBXBuildFile section */ 26 | 27 | /* Begin PBXFileReference section */ 28 | E7252166219581B200BD8FEF /* CompositionViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CompositionViewController.swift; sourceTree = ""; }; 29 | E7531540219D70D400FC1243 /* 01_nebula.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = 01_nebula.mp4; sourceTree = ""; }; 30 | E7531541219D70D400FC1243 /* 02_blackhole.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = 02_blackhole.mp4; sourceTree = ""; }; 31 | E7531542219D70D500FC1243 /* 03_nebula.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = 03_nebula.mp4; sourceTree = ""; }; 32 | E7531543219D70D500FC1243 /* 04_quasar.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = 04_quasar.mp4; sourceTree = ""; }; 33 | E7531544219D70D500FC1243 /* 05_blackhole.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = 05_blackhole.mp4; sourceTree = ""; }; 34 | E76CEAE6219404EB0080F456 /* ZHCamera.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = ZHCamera.app; sourceTree = BUILT_PRODUCTS_DIR; }; 35 | E76CEAE9219404EB0080F456 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 36 | E76CEAEB219404EB0080F456 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 37 | E76CEAEE219404EB0080F456 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 38 | E76CEAF0219404ED0080F456 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 39 | E76CEAF3219404ED0080F456 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 40 | E76CEAF5219404ED0080F456 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 41 | E76CEAFF2194062A0080F456 /* CameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = ""; }; 42 | E76CEB01219408FC0080F456 /* CapturePreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CapturePreview.swift; sourceTree = ""; }; 43 | E76CEB0421940EEE0080F456 /* CaptureManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureManager.swift; sourceTree = ""; }; 44 | E772E8B5219442A50035E819 /* VideoWriteManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoWriteManager.swift; sourceTree = ""; }; 45 | /* End PBXFileReference section */ 46 | 47 | /* Begin PBXFrameworksBuildPhase section */ 48 | E76CEAE3219404EB0080F456 /* Frameworks */ = { 49 | isa = PBXFrameworksBuildPhase; 50 | buildActionMask = 2147483647; 51 | files = ( 52 | ); 53 | runOnlyForDeploymentPostprocessing = 0; 54 | }; 55 | /* End PBXFrameworksBuildPhase section */ 56 | 57 | /* Begin PBXGroup section */ 58 | E725216321957A0B00BD8FEF /* Composition */ = { 59 | isa = PBXGroup; 60 | children = ( 61 | E7B9173A219BC40200F1DD8C /* View */, 62 | E7B91739219BC3B100F1DD8C /* Model */, 63 | E72521652195818A00BD8FEF /* Controller */, 64 | ); 65 | path = Composition; 66 | sourceTree = ""; 67 | }; 68 | E72521642195817300BD8FEF /* Resource */ = { 69 | isa = PBXGroup; 70 | children = ( 71 | E7531540219D70D400FC1243 /* 01_nebula.mp4 */, 72 | E7531541219D70D400FC1243 /* 02_blackhole.mp4 */, 73 | E7531542219D70D500FC1243 /* 03_nebula.mp4 */, 74 | E7531543219D70D500FC1243 /* 04_quasar.mp4 */, 75 | E7531544219D70D500FC1243 /* 05_blackhole.mp4 */, 76 | ); 77 | path = Resource; 78 | sourceTree = ""; 79 | }; 80 | E72521652195818A00BD8FEF /* Controller */ = { 81 | isa = PBXGroup; 82 | children = ( 83 | E7252166219581B200BD8FEF /* CompositionViewController.swift */, 84 | ); 85 | path = Controller; 86 | sourceTree = ""; 87 | }; 88 | E76CEADD219404EB0080F456 = { 89 | isa = PBXGroup; 90 | children = ( 91 | E76CEAE8219404EB0080F456 /* ZHCamera */, 92 | E76CEAE7219404EB0080F456 /* Products */, 93 | ); 94 | sourceTree = ""; 95 | }; 96 | E76CEAE7219404EB0080F456 /* Products */ = { 97 | isa = PBXGroup; 98 | children = ( 99 | E76CEAE6219404EB0080F456 /* ZHCamera.app */, 100 | ); 101 | name = Products; 102 | sourceTree = ""; 103 | }; 104 | E76CEAE8219404EB0080F456 /* ZHCamera */ = { 105 | isa = PBXGroup; 106 | children = ( 107 | E72521642195817300BD8FEF /* Resource */, 108 | E725216321957A0B00BD8FEF /* Composition */, 109 | E76CEAFB219405B70080F456 /* Capture */, 110 | E76CEAE9219404EB0080F456 /* AppDelegate.swift */, 111 | E76CEAEB219404EB0080F456 /* ViewController.swift */, 112 | E76CEAED219404EB0080F456 /* Main.storyboard */, 113 | E76CEAF0219404ED0080F456 /* Assets.xcassets */, 114 | E76CEAF2219404ED0080F456 /* LaunchScreen.storyboard */, 115 | E76CEAF5219404ED0080F456 /* Info.plist */, 116 | ); 117 | path = ZHCamera; 118 | sourceTree = ""; 119 | }; 120 | E76CEAFB219405B70080F456 /* Capture */ = { 121 | isa = PBXGroup; 122 | children = ( 123 | E76CEB0321940E990080F456 /* Manager */, 124 | E76CEAFE219406150080F456 /* Controller */, 125 | E76CEAFD2194060F0080F456 /* View */, 126 | E76CEAFC219406080080F456 /* Model */, 127 | ); 128 | path = Capture; 129 | sourceTree = ""; 130 | }; 131 | E76CEAFC219406080080F456 /* Model */ = { 132 | isa = PBXGroup; 133 | children = ( 134 | ); 135 | path = Model; 136 | sourceTree = ""; 137 | }; 138 | E76CEAFD2194060F0080F456 /* View */ = { 139 | isa = PBXGroup; 140 | children = ( 141 | E76CEB01219408FC0080F456 /* CapturePreview.swift */, 142 | ); 143 | path = View; 144 | sourceTree = ""; 145 | }; 146 | E76CEAFE219406150080F456 /* Controller */ = { 147 | isa = PBXGroup; 148 | children = ( 149 | E76CEAFF2194062A0080F456 /* CameraViewController.swift */, 150 | ); 151 | path = Controller; 152 | sourceTree = ""; 153 | }; 154 | E76CEB0321940E990080F456 /* Manager */ = { 155 | isa = PBXGroup; 156 | children = ( 157 | E76CEB0421940EEE0080F456 /* CaptureManager.swift */, 158 | E772E8B5219442A50035E819 /* VideoWriteManager.swift */, 159 | ); 160 | path = Manager; 161 | sourceTree = ""; 162 | }; 163 | E7B91739219BC3B100F1DD8C /* Model */ = { 164 | isa = PBXGroup; 165 | children = ( 166 | ); 167 | path = Model; 168 | sourceTree = ""; 169 | }; 170 | E7B9173A219BC40200F1DD8C /* View */ = { 171 | isa = PBXGroup; 172 | children = ( 173 | ); 174 | path = View; 175 | sourceTree = ""; 176 | }; 177 | /* End PBXGroup section */ 178 | 179 | /* Begin PBXNativeTarget section */ 180 | E76CEAE5219404EB0080F456 /* ZHCamera */ = { 181 | isa = PBXNativeTarget; 182 | buildConfigurationList = E76CEAF8219404ED0080F456 /* Build configuration list for PBXNativeTarget "ZHCamera" */; 183 | buildPhases = ( 184 | E76CEAE2219404EB0080F456 /* Sources */, 185 | E76CEAE3219404EB0080F456 /* Frameworks */, 186 | E76CEAE4219404EB0080F456 /* Resources */, 187 | ); 188 | buildRules = ( 189 | ); 190 | dependencies = ( 191 | ); 192 | name = ZHCamera; 193 | productName = ZHCamera; 194 | productReference = E76CEAE6219404EB0080F456 /* ZHCamera.app */; 195 | productType = "com.apple.product-type.application"; 196 | }; 197 | /* End PBXNativeTarget section */ 198 | 199 | /* Begin PBXProject section */ 200 | E76CEADE219404EB0080F456 /* Project object */ = { 201 | isa = PBXProject; 202 | attributes = { 203 | LastSwiftUpdateCheck = 1010; 204 | LastUpgradeCheck = 1010; 205 | ORGANIZATIONNAME = xuzhenhao; 206 | TargetAttributes = { 207 | E76CEAE5219404EB0080F456 = { 208 | CreatedOnToolsVersion = 10.1; 209 | }; 210 | }; 211 | }; 212 | buildConfigurationList = E76CEAE1219404EB0080F456 /* Build configuration list for PBXProject "ZHCamera" */; 213 | compatibilityVersion = "Xcode 9.3"; 214 | developmentRegion = en; 215 | hasScannedForEncodings = 0; 216 | knownRegions = ( 217 | en, 218 | Base, 219 | ); 220 | mainGroup = E76CEADD219404EB0080F456; 221 | productRefGroup = E76CEAE7219404EB0080F456 /* Products */; 222 | projectDirPath = ""; 223 | projectRoot = ""; 224 | targets = ( 225 | E76CEAE5219404EB0080F456 /* ZHCamera */, 226 | ); 227 | }; 228 | /* End PBXProject section */ 229 | 230 | /* Begin PBXResourcesBuildPhase section */ 231 | E76CEAE4219404EB0080F456 /* Resources */ = { 232 | isa = PBXResourcesBuildPhase; 233 | buildActionMask = 2147483647; 234 | files = ( 235 | E7531545219D70D500FC1243 /* 01_nebula.mp4 in Resources */, 236 | E7531546219D70D500FC1243 /* 02_blackhole.mp4 in Resources */, 237 | E76CEAF4219404ED0080F456 /* LaunchScreen.storyboard in Resources */, 238 | E7531547219D70D500FC1243 /* 03_nebula.mp4 in Resources */, 239 | E7531548219D70D500FC1243 /* 04_quasar.mp4 in Resources */, 240 | E76CEAF1219404ED0080F456 /* Assets.xcassets in Resources */, 241 | E76CEAEF219404EB0080F456 /* Main.storyboard in Resources */, 242 | E7531549219D70D500FC1243 /* 05_blackhole.mp4 in Resources */, 243 | ); 244 | runOnlyForDeploymentPostprocessing = 0; 245 | }; 246 | /* End PBXResourcesBuildPhase section */ 247 | 248 | /* Begin PBXSourcesBuildPhase section */ 249 | E76CEAE2219404EB0080F456 /* Sources */ = { 250 | isa = PBXSourcesBuildPhase; 251 | buildActionMask = 2147483647; 252 | files = ( 253 | E76CEAEC219404EB0080F456 /* ViewController.swift in Sources */, 254 | E772E8B6219442A50035E819 /* VideoWriteManager.swift in Sources */, 255 | E76CEB02219408FC0080F456 /* CapturePreview.swift in Sources */, 256 | E7252167219581B200BD8FEF /* CompositionViewController.swift in Sources */, 257 | E76CEAEA219404EB0080F456 /* AppDelegate.swift in Sources */, 258 | E76CEB002194062A0080F456 /* CameraViewController.swift in Sources */, 259 | E76CEB0521940EEE0080F456 /* CaptureManager.swift in Sources */, 260 | ); 261 | runOnlyForDeploymentPostprocessing = 0; 262 | }; 263 | /* End PBXSourcesBuildPhase section */ 264 | 265 | /* Begin PBXVariantGroup section */ 266 | E76CEAED219404EB0080F456 /* Main.storyboard */ = { 267 | isa = PBXVariantGroup; 268 | children = ( 269 | E76CEAEE219404EB0080F456 /* Base */, 270 | ); 271 | name = Main.storyboard; 272 | sourceTree = ""; 273 | }; 274 | E76CEAF2219404ED0080F456 /* LaunchScreen.storyboard */ = { 275 | isa = PBXVariantGroup; 276 | children = ( 277 | E76CEAF3219404ED0080F456 /* Base */, 278 | ); 279 | name = LaunchScreen.storyboard; 280 | sourceTree = ""; 281 | }; 282 | /* End PBXVariantGroup section */ 283 | 284 | /* Begin XCBuildConfiguration section */ 285 | E76CEAF6219404ED0080F456 /* Debug */ = { 286 | isa = XCBuildConfiguration; 287 | buildSettings = { 288 | ALWAYS_SEARCH_USER_PATHS = NO; 289 | CLANG_ANALYZER_NONNULL = YES; 290 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 291 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 292 | CLANG_CXX_LIBRARY = "libc++"; 293 | CLANG_ENABLE_MODULES = YES; 294 | CLANG_ENABLE_OBJC_ARC = YES; 295 | CLANG_ENABLE_OBJC_WEAK = YES; 296 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 297 | CLANG_WARN_BOOL_CONVERSION = YES; 298 | CLANG_WARN_COMMA = YES; 299 | CLANG_WARN_CONSTANT_CONVERSION = YES; 300 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 301 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 302 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 303 | CLANG_WARN_EMPTY_BODY = YES; 304 | CLANG_WARN_ENUM_CONVERSION = YES; 305 | CLANG_WARN_INFINITE_RECURSION = YES; 306 | CLANG_WARN_INT_CONVERSION = YES; 307 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 308 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 309 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 310 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 311 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 312 | CLANG_WARN_STRICT_PROTOTYPES = YES; 313 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 314 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 315 | CLANG_WARN_UNREACHABLE_CODE = YES; 316 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 317 | CODE_SIGN_IDENTITY = "iPhone Developer"; 318 | COPY_PHASE_STRIP = NO; 319 | DEBUG_INFORMATION_FORMAT = dwarf; 320 | ENABLE_STRICT_OBJC_MSGSEND = YES; 321 | ENABLE_TESTABILITY = YES; 322 | GCC_C_LANGUAGE_STANDARD = gnu11; 323 | GCC_DYNAMIC_NO_PIC = NO; 324 | GCC_NO_COMMON_BLOCKS = YES; 325 | GCC_OPTIMIZATION_LEVEL = 0; 326 | GCC_PREPROCESSOR_DEFINITIONS = ( 327 | "DEBUG=1", 328 | "$(inherited)", 329 | ); 330 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 331 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 332 | GCC_WARN_UNDECLARED_SELECTOR = YES; 333 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 334 | GCC_WARN_UNUSED_FUNCTION = YES; 335 | GCC_WARN_UNUSED_VARIABLE = YES; 336 | IPHONEOS_DEPLOYMENT_TARGET = 12.1; 337 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 338 | MTL_FAST_MATH = YES; 339 | ONLY_ACTIVE_ARCH = YES; 340 | SDKROOT = iphoneos; 341 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 342 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 343 | }; 344 | name = Debug; 345 | }; 346 | E76CEAF7219404ED0080F456 /* Release */ = { 347 | isa = XCBuildConfiguration; 348 | buildSettings = { 349 | ALWAYS_SEARCH_USER_PATHS = NO; 350 | CLANG_ANALYZER_NONNULL = YES; 351 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 352 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 353 | CLANG_CXX_LIBRARY = "libc++"; 354 | CLANG_ENABLE_MODULES = YES; 355 | CLANG_ENABLE_OBJC_ARC = YES; 356 | CLANG_ENABLE_OBJC_WEAK = YES; 357 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 358 | CLANG_WARN_BOOL_CONVERSION = YES; 359 | CLANG_WARN_COMMA = YES; 360 | CLANG_WARN_CONSTANT_CONVERSION = YES; 361 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 362 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 363 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 364 | CLANG_WARN_EMPTY_BODY = YES; 365 | CLANG_WARN_ENUM_CONVERSION = YES; 366 | CLANG_WARN_INFINITE_RECURSION = YES; 367 | CLANG_WARN_INT_CONVERSION = YES; 368 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 369 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 370 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 371 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 372 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 373 | CLANG_WARN_STRICT_PROTOTYPES = YES; 374 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 375 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 376 | CLANG_WARN_UNREACHABLE_CODE = YES; 377 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 378 | CODE_SIGN_IDENTITY = "iPhone Developer"; 379 | COPY_PHASE_STRIP = NO; 380 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 381 | ENABLE_NS_ASSERTIONS = NO; 382 | ENABLE_STRICT_OBJC_MSGSEND = YES; 383 | GCC_C_LANGUAGE_STANDARD = gnu11; 384 | GCC_NO_COMMON_BLOCKS = YES; 385 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 386 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 387 | GCC_WARN_UNDECLARED_SELECTOR = YES; 388 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 389 | GCC_WARN_UNUSED_FUNCTION = YES; 390 | GCC_WARN_UNUSED_VARIABLE = YES; 391 | IPHONEOS_DEPLOYMENT_TARGET = 12.1; 392 | MTL_ENABLE_DEBUG_INFO = NO; 393 | MTL_FAST_MATH = YES; 394 | SDKROOT = iphoneos; 395 | SWIFT_COMPILATION_MODE = wholemodule; 396 | SWIFT_OPTIMIZATION_LEVEL = "-O"; 397 | VALIDATE_PRODUCT = YES; 398 | }; 399 | name = Release; 400 | }; 401 | E76CEAF9219404ED0080F456 /* Debug */ = { 402 | isa = XCBuildConfiguration; 403 | buildSettings = { 404 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 405 | CODE_SIGN_STYLE = Automatic; 406 | DEVELOPMENT_TEAM = F3X6AR78KA; 407 | INFOPLIST_FILE = ZHCamera/Info.plist; 408 | IPHONEOS_DEPLOYMENT_TARGET = 9.0; 409 | LD_RUNPATH_SEARCH_PATHS = ( 410 | "$(inherited)", 411 | "@executable_path/Frameworks", 412 | ); 413 | PRODUCT_BUNDLE_IDENTIFIER = xuzhenhao.ZHCamera; 414 | PRODUCT_NAME = "$(TARGET_NAME)"; 415 | SWIFT_VERSION = 4.2; 416 | TARGETED_DEVICE_FAMILY = 1; 417 | }; 418 | name = Debug; 419 | }; 420 | E76CEAFA219404ED0080F456 /* Release */ = { 421 | isa = XCBuildConfiguration; 422 | buildSettings = { 423 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 424 | CODE_SIGN_STYLE = Automatic; 425 | DEVELOPMENT_TEAM = F3X6AR78KA; 426 | INFOPLIST_FILE = ZHCamera/Info.plist; 427 | IPHONEOS_DEPLOYMENT_TARGET = 9.0; 428 | LD_RUNPATH_SEARCH_PATHS = ( 429 | "$(inherited)", 430 | "@executable_path/Frameworks", 431 | ); 432 | PRODUCT_BUNDLE_IDENTIFIER = xuzhenhao.ZHCamera; 433 | PRODUCT_NAME = "$(TARGET_NAME)"; 434 | SWIFT_VERSION = 4.2; 435 | TARGETED_DEVICE_FAMILY = 1; 436 | }; 437 | name = Release; 438 | }; 439 | /* End XCBuildConfiguration section */ 440 | 441 | /* Begin XCConfigurationList section */ 442 | E76CEAE1219404EB0080F456 /* Build configuration list for PBXProject "ZHCamera" */ = { 443 | isa = XCConfigurationList; 444 | buildConfigurations = ( 445 | E76CEAF6219404ED0080F456 /* Debug */, 446 | E76CEAF7219404ED0080F456 /* Release */, 447 | ); 448 | defaultConfigurationIsVisible = 0; 449 | defaultConfigurationName = Release; 450 | }; 451 | E76CEAF8219404ED0080F456 /* Build configuration list for PBXNativeTarget "ZHCamera" */ = { 452 | isa = XCConfigurationList; 453 | buildConfigurations = ( 454 | E76CEAF9219404ED0080F456 /* Debug */, 455 | E76CEAFA219404ED0080F456 /* Release */, 456 | ); 457 | defaultConfigurationIsVisible = 0; 458 | defaultConfigurationName = Release; 459 | }; 460 | /* End XCConfigurationList section */ 461 | }; 462 | rootObject = E76CEADE219404EB0080F456 /* Project object */; 463 | } 464 | -------------------------------------------------------------------------------- /ZHCamera.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /ZHCamera.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /ZHCamera.xcodeproj/xcuserdata/xuzhenhao.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | ZHCamera.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /ZHCamera/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // ZHCamera 4 | // 5 | // Created by xuzhenhao on 2018/11/8. 6 | // Copyright © 2018年 xuzhenhao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | 12 | @UIApplicationMain 13 | class AppDelegate: UIResponder, UIApplicationDelegate { 14 | 15 | var window: UIWindow? 16 | 17 | 18 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 19 | // Override point for customization after application launch. 20 | do { 21 | try AVAudioSession.sharedInstance().setMode(.videoRecording) 22 | } catch { 23 | 24 | } 25 | 26 | 27 | return true 28 | } 29 | 30 | func applicationWillResignActive(_ application: UIApplication) { 31 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 32 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 33 | } 34 | 35 | func applicationDidEnterBackground(_ application: UIApplication) { 36 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 37 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 38 | } 39 | 40 | func applicationWillEnterForeground(_ application: UIApplication) { 41 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 42 | } 43 | 44 | func applicationDidBecomeActive(_ application: UIApplication) { 45 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 46 | } 47 | 48 | func applicationWillTerminate(_ application: UIApplication) { 49 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 50 | } 51 | 52 | 53 | } 54 | 55 | -------------------------------------------------------------------------------- /ZHCamera/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /ZHCamera/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /ZHCamera/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /ZHCamera/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 76 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | -------------------------------------------------------------------------------- /ZHCamera/Capture/Controller/CameraViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CameraViewController.swift 3 | // ZHCamera 4 | // 5 | // Created by xuzhenhao on 2018/11/8. 6 | // Copyright © 2018年 xuzhenhao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | import Photos 12 | 13 | class CameraViewController: UIViewController { 14 | 15 | let captureManager = CaptureManager() 16 | var videoWriteManager : VideoWriteManager? 17 | var isRecording = false 18 | 19 | @IBOutlet weak var preview: CapturePreview! 20 | let filters = ["CIPhotoEffectChrome", 21 | "CIPhotoEffectFade", 22 | "CIPhotoEffectInstant", 23 | "CIPhotoEffectMono", 24 | "CIPhotoEffectNoir", 25 | "CIPhotoEffectProcess", 26 | "CIPhotoEffectTransfer"] 27 | var currentFilter: String = "CIPhotoEffectTonal" 28 | 29 | class func cameraViewController() -> UIViewController { 30 | return UIStoryboard.init(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "CameraViewController") 31 | } 32 | 33 | override func viewDidLoad() { 34 | super.viewDidLoad() 35 | 36 | setupCaptureManager() 37 | } 38 | func setupCaptureManager() { 39 | captureManager.setupSession { (isSuccess, error) in 40 | if isSuccess { 41 | captureManager.startSession() 42 | } 43 | } 44 | captureManager.videoDataCallback = { [weak self] (sampleBuffer) in 45 | guard let strongSelf = self,let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 46 | //1. 处理图像数据,输出结果为CIImage,作为后续展示和写入的基础数据 47 | let ciImage = CIImage.init(cvImageBuffer: imageBuffer) 48 | //加滤镜 49 | let filter = CIFilter.init(name: strongSelf.currentFilter)! 50 | filter.setValue(ciImage, forKey: kCIInputImageKey) 51 | 52 | guard let finalImage = filter.outputImage else { 53 | return 54 | } 55 | //2. 用户界面展示 56 | let image = UIImage.init(ciImage: finalImage) 57 | DispatchQueue.main.async { 58 | strongSelf.preview.ciImage = image 59 | } 60 | //3. 保存写入文件 61 | strongSelf.videoWriteManager?.processImageData(CIImage: finalImage, atTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) 62 | } 63 | captureManager.audioDataCallback = {[weak self] sample in 64 | guard let strongSelf = self else { return } 65 | strongSelf.videoWriteManager?.processAudioData(CMSampleBuffer: sample) 66 | } 67 | 68 | } 69 | func setupMoiveWriter() { 70 | //输出视频的参数设置,如果要自定义视频分辨率,在此设置。否则可使用相应格式的推荐参数 71 | guard let videoSetings = self.captureManager.recommendedVideoSettingsForAssetWriter(writingTo: .mp4), 72 | let audioSetings = self.captureManager.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) 73 | else{ 74 | return 75 | } 76 | videoWriteManager = VideoWriteManager(videoSetting: videoSetings, audioSetting: audioSetings, fileType: .mp4) 77 | //录制成功回调 78 | videoWriteManager?.finishWriteCallback = { [weak self] url in 79 | guard let strongSelf = self else {return} 80 | strongSelf.saveToAlbum(atURL: url, complete: { (success) in 81 | DispatchQueue.main.async { 82 | strongSelf.showSaveResult(isSuccess: success) 83 | } 84 | 85 | }) 86 | } 87 | } 88 | 89 | 90 | @IBAction func didClickChangeFilter(_ sender: UIButton) { 91 | self.currentFilter = self.filters.randomElement()! 92 | } 93 | 94 | @IBAction func didClickCaptureButton(_ sender: UIButton) { 95 | //未开始录制,开始录制 96 | if !isRecording { 97 | //连续拍摄多段时,每次都需要重新生成一个实例。之前的writer会因为已经完成写入,无法再次使用 98 | setupMoiveWriter() 99 | videoWriteManager?.startWriting() 100 | isRecording = true 101 | sender.isSelected = true 102 | }else { 103 | //录制中,停止录制 104 | videoWriteManager?.stopWriting() 105 | isRecording = false 106 | sender.isSelected = false 107 | } 108 | } 109 | func saveToAlbum(atURL url: URL,complete: @escaping ((Bool) -> Void)){ 110 | 111 | PHPhotoLibrary.shared().performChanges({ 112 | PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url) 113 | }, completionHandler: { (success, error) in 114 | complete(success) 115 | }) 116 | } 117 | func showSaveResult(isSuccess: Bool) { 118 | let message = isSuccess ? "保存成功" : "保存失败" 119 | 120 | let alertController = UIAlertController.init(title: nil, message: message, preferredStyle: .alert) 121 | alertController.addAction(UIAlertAction.init(title: "确定", style: .default, handler: { (action) in 122 | 123 | })) 124 | self .present(alertController, animated: true, completion: nil) 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /ZHCamera/Capture/Manager/CaptureManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CaptureManager.swift 3 | // ZHCamera 4 | // 5 | // Created by xuzhenhao on 2018/11/8. 6 | // Copyright © 2018年 xuzhenhao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | 12 | /// 录制管理类,管理录制输入、录制过程,输出SampleBuffer数据 13 | class CaptureManager: NSObject { 14 | let captureSession = AVCaptureSession() 15 | //当前正在使用的输入设备(摄像头) 16 | weak var activeCamera : AVCaptureDeviceInput? 17 | //视频数据处理队列 18 | let videoDataQueue = DispatchQueue(label: "com.xzh.videoDataCaptureQueue") 19 | //音频数据处理队列 20 | let audioDataQueue = DispatchQueue(label: "com.xzh.audioDataCaptureQueue") 21 | //捕捉的视频数据输出对象 22 | let videoDataOutput = AVCaptureVideoDataOutput() 23 | //捕捉的音频数据输出对象 24 | let audioDataOutput = AVCaptureAudioDataOutput() 25 | //视频数据回调 26 | var videoDataCallback: ((CMSampleBuffer) -> Void)? 27 | //音频数据回调 28 | var audioDataCallback: ((CMSampleBuffer) -> Void)? 29 | 30 | // MARK: - SessionConfig 31 | 32 | typealias SetupCompletionHandler = ((Bool,Error?) -> Void) 33 | public func setupSession(completion:SetupCompletionHandler){ 34 | captureSession.sessionPreset = .hd1920x1080 35 | setupSessionInput { (isSuccess, error) in 36 | if !isSuccess { 37 | completion(isSuccess,error); 38 | return; 39 | } 40 | } 41 | setupSessionOutput { (isSuccess, error) in 42 | completion(isSuccess,error) 43 | } 44 | } 45 | private func setupSessionInput(completion:SetupCompletionHandler) { 46 | let deviceError = NSError.init( 47 | domain: "com.session.error", 48 | code: 0, 49 | userInfo: [NSLocalizedDescriptionKey:NSLocalizedString("配置录制设备出错", comment: "")]) 50 | 51 | //配置摄像头 52 | guard let videoDevice = AVCaptureDevice.default(for: .video) else { 53 | completion(false,deviceError) 54 | return 55 | } 56 | do { 57 | let videoInput = try AVCaptureDeviceInput.init(device: videoDevice) 58 | if captureSession.canAddInput(videoInput) { 59 | captureSession.addInput(videoInput) 60 | activeCamera = videoInput 61 | } 62 | } catch { 63 | completion(false,deviceError) 64 | return 65 | } 66 | 67 | //配置麦克风 68 | guard let audioDevice = AVCaptureDevice.default(for: .audio) else { 69 | completion(false,deviceError) 70 | return 71 | } 72 | do { 73 | let audioInput = try AVCaptureDeviceInput.init(device: audioDevice) 74 | if captureSession.canAddInput(audioInput) { 75 | captureSession.addInput(audioInput) 76 | } 77 | } catch { 78 | completion(false,deviceError) 79 | return 80 | } 81 | completion(true,nil) 82 | } 83 | private func setupSessionOutput(completion: SetupCompletionHandler){ 84 | let outputError = NSError.init( 85 | domain: "com.session.error", 86 | code: 0, 87 | userInfo: [NSLocalizedDescriptionKey:NSLocalizedString("输出设置出错", comment: "")]) 88 | 89 | //摄像头采集的yuv是压缩的视频信号,要还原成可以处理的数字信号 90 | let outputSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA] 91 | videoDataOutput.videoSettings = outputSettings 92 | //不丢弃迟到帧,但会增加内存开销 93 | videoDataOutput.alwaysDiscardsLateVideoFrames = false 94 | videoDataOutput.setSampleBufferDelegate(self, queue: videoDataQueue) 95 | if captureSession.canAddOutput(videoDataOutput){ 96 | captureSession.addOutput(videoDataOutput) 97 | }else{ 98 | completion(false,outputError) 99 | return 100 | } 101 | 102 | audioDataOutput.setSampleBufferDelegate(self, queue: audioDataQueue) 103 | if captureSession.canAddOutput(audioDataOutput) { 104 | captureSession.addOutput(audioDataOutput) 105 | }else{ 106 | completion(false,outputError) 107 | return 108 | } 109 | 110 | completion(true,nil) 111 | } 112 | // MARK: - Session operation 113 | public func startSession() { 114 | //防止阻塞主线程 115 | videoDataQueue.async { 116 | if !self.captureSession.isRunning { 117 | self.captureSession.startRunning() 118 | } 119 | } 120 | } 121 | public func stopSession() { 122 | videoDataQueue.async { 123 | if self.captureSession.isRunning { 124 | self.captureSession.stopRunning() 125 | } 126 | } 127 | } 128 | // MARK: - utils 129 | public func recommendedVideoSettingsForAssetWriter(writingTo outputFileType: AVFileType) -> [String: Any]? { 130 | return videoDataOutput.recommendedVideoSettingsForAssetWriter(writingTo: outputFileType) 131 | } 132 | public func recommendedAudioSettingsForAssetWriter(writingTo outputFileType: AVFileType) -> [String: Any]? { 133 | return audioDataOutput.recommendedAudioSettingsForAssetWriter(writingTo: outputFileType) as? [String: Any] 134 | } 135 | } 136 | 137 | extension CaptureManager : AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{ 138 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 139 | if output == videoDataOutput { 140 | //数据处理 141 | guard let callback = videoDataCallback else { 142 | return; 143 | } 144 | callback(sampleBuffer) 145 | 146 | }else{ 147 | guard let callback = audioDataCallback else { 148 | return; 149 | } 150 | callback(sampleBuffer) 151 | } 152 | } 153 | func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 154 | 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /ZHCamera/Capture/Manager/VideoWriteManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VideoWriteManager.swift 3 | // ZHCamera 4 | // 5 | // Created by xuzhenhao on 2018/11/8. 6 | // Copyright © 2018年 xuzhenhao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | 12 | class VideoWriteManager: NSObject { 13 | var videoSettings: [String:Any] 14 | var audioSettings: [String:Any] 15 | let fileType: AVFileType 16 | let assetWriter: AVAssetWriter 17 | let videoInput: AVAssetWriterInput 18 | let audioInput: AVAssetWriterInput 19 | let pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor 20 | let processQueue = DispatchQueue(label: "com.xzh.vieoWriteQueue") 21 | let ciContext: CIContext = { 22 | let eaglContext = EAGLContext.init(api: .openGLES2)! 23 | //因为需要实时处理图像,通过EAGL上下文来生成CIContext对象。此时,渲染的对象被保存在GPU,并且不会被拷贝到CPU内存。 24 | return CIContext.init(eaglContext: eaglContext, options: [CIContextOption.workingColorSpace: NSNull()]) 25 | 26 | }() 27 | let colorSpace = CGColorSpaceCreateDeviceRGB() 28 | //是否正在写入 29 | var isWriting = false 30 | //标记接下来接收到的作为第一帧数据 31 | var firstSampleFlag = true 32 | var finishWriteCallback: ((URL) -> Void)? 33 | 34 | init(videoSetting: [String:Any],audioSetting: [String:Any],fileType: AVFileType) { 35 | 36 | self.videoSettings = videoSetting 37 | self.audioSettings = audioSetting 38 | self.fileType = fileType 39 | //如果要修改输出视频的宽高等,可修改videoInput配置中的AVVideoHeightKey,AVVideoWidthKey 40 | self.videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: self.videoSettings) 41 | self.audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSetting) 42 | //针对实时性进行优化 43 | self.videoInput.expectsMediaDataInRealTime = true 44 | self.audioInput.expectsMediaDataInRealTime = true 45 | //手机默认是头部向左拍摄的,需要旋转调整 46 | self.videoInput.transform = VideoWriteManager.fixTransform(deviceOrientation: UIDevice.current.orientation) 47 | //每个AssetWriterInput都期望接收CMSampelBufferRef格式的数据,如果是CVPixelBuffer格式的数据,就需要通过adaptor来格式化后再写入 48 | let attributes = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA, 49 | kCVPixelBufferWidthKey: videoSetting[AVVideoWidthKey]!, 50 | kCVPixelBufferHeightKey: videoSetting[AVVideoHeightKey]!, 51 | kCVPixelFormatOpenGLCompatibility: true] as [String : Any] 52 | self.pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.videoInput, sourcePixelBufferAttributes: attributes ) 53 | 54 | let outputURL = VideoWriteManager.createTemplateFileURL() 55 | do { 56 | self.assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: fileType) 57 | if self.assetWriter.canAdd(videoInput) { 58 | self.assetWriter.add(videoInput) 59 | } 60 | if self.assetWriter.canAdd(audioInput) { 61 | self.assetWriter.add(audioInput) 62 | } 63 | } catch { 64 | fatalError() 65 | } 66 | 67 | super.init() 68 | } 69 | 70 | //MARK: - Operation 71 | public func startWriting() { 72 | processQueue.async { 73 | self.isWriting = true 74 | } 75 | } 76 | public func stopWriting() { 77 | isWriting = false 78 | processQueue.async { 79 | self.assetWriter.finishWriting(completionHandler: { 80 | if self.assetWriter.status.rawValue == 2 { 81 | DispatchQueue.main.async { 82 | guard let callback = self.finishWriteCallback else { 83 | return 84 | } 85 | callback(self.assetWriter.outputURL) 86 | } 87 | } 88 | }) 89 | } 90 | } 91 | public func processImageData(CIImage image: CIImage,atTime time: CMTime) { 92 | guard isWriting != false else { return } 93 | 94 | if firstSampleFlag { 95 | //收到第一帧视频数据,开始写入 96 | let result = assetWriter.startWriting() 97 | guard result != false else { 98 | print("开启录制失败") 99 | return 100 | } 101 | assetWriter.startSession(atSourceTime: time) 102 | firstSampleFlag = false 103 | } 104 | 105 | var outputRenderBuffer: CVPixelBuffer? 106 | guard let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool else { 107 | return 108 | } 109 | let result = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &outputRenderBuffer) 110 | if result != kCVReturnSuccess { 111 | return 112 | } 113 | ciContext.render(image, to: outputRenderBuffer!, bounds: image.extent, colorSpace: colorSpace) 114 | 115 | if videoInput.isReadyForMoreMediaData { 116 | let result = pixelBufferAdaptor.append(outputRenderBuffer!, withPresentationTime: time) 117 | if !result { 118 | print("拼接视频数据失败") 119 | } 120 | } 121 | 122 | } 123 | public func processAudioData(CMSampleBuffer buffer: CMSampleBuffer) { 124 | guard firstSampleFlag == false else { 125 | return 126 | } 127 | if audioInput.isReadyForMoreMediaData { 128 | let result = audioInput.append(buffer) 129 | if !result { 130 | print("拼接音频数据失败") 131 | } 132 | } 133 | } 134 | 135 | //MARK: - utils 136 | private class func createTemplateFileURL() -> URL { 137 | 138 | NSHomeDirectory() 139 | let path = NSTemporaryDirectory() + "writeTemp.mp4" 140 | let fileURL = URL(fileURLWithPath: path) 141 | if FileManager.default.fileExists(atPath: fileURL.path) { 142 | do { try FileManager.default.removeItem(at: fileURL) } catch { 143 | 144 | } 145 | } 146 | return fileURL 147 | } 148 | private class func fixTransform(deviceOrientation: UIDeviceOrientation) -> CGAffineTransform { 149 | let orientation = deviceOrientation == .unknown ? .portrait : deviceOrientation 150 | var result: CGAffineTransform 151 | 152 | switch orientation { 153 | case .landscapeRight: 154 | result = CGAffineTransform(rotationAngle: CGFloat(Double.pi)) 155 | case .portraitUpsideDown: 156 | result = CGAffineTransform(rotationAngle: CGFloat(Double.pi / 2 * 3)) 157 | case .portrait,.faceUp,.faceDown: 158 | result = CGAffineTransform(rotationAngle: CGFloat(Double.pi / 2)) 159 | default: 160 | result = CGAffineTransform.identity 161 | } 162 | return result; 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /ZHCamera/Capture/View/CapturePreview.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CapturePreview.swift 3 | // ZHCamera 4 | // 5 | // Created by xuzhenhao on 2018/11/8. 6 | // Copyright © 2018年 xuzhenhao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | /// 录制时的用户预览界面 12 | class CapturePreview: UIView { 13 | 14 | var image:Data? 15 | var ciImage: UIImage?{ 16 | didSet{ 17 | guard let image = ciImage else { 18 | return 19 | } 20 | imageView.image = image 21 | imageView.frame = self.bounds 22 | } 23 | } 24 | let imageView: UIImageView = { 25 | let view = UIImageView() 26 | view.contentMode = .scaleAspectFit 27 | view.transform = CGAffineTransform(rotationAngle: CGFloat(Double.pi / 2)) 28 | return view 29 | }() 30 | override init(frame: CGRect) { 31 | super.init(frame: frame) 32 | } 33 | 34 | required init?(coder aDecoder: NSCoder) { 35 | super.init(coder: aDecoder) 36 | self.addSubview(imageView) 37 | } 38 | 39 | } 40 | -------------------------------------------------------------------------------- /ZHCamera/Composition/Controller/CompositionViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CompositionViewController.swift 3 | // ZHCamera 4 | // 5 | // Created by xuzhenhao on 2018/11/9. 6 | // Copyright © 2018年 xuzhenhao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | import Photos 12 | 13 | enum TransitionType { 14 | case Dissolve//溶解效果 15 | case Push 16 | } 17 | 18 | class CompositionViewController: UIViewController { 19 | var videos: [AVAsset] = [] 20 | let composition = AVMutableComposition() 21 | var videoComposition: AVMutableVideoComposition! 22 | var overLayer: CALayer? 23 | 24 | class func compositionViewController() -> UIViewController { 25 | return UIStoryboard.init(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "CompositionViewController") 26 | } 27 | override func viewDidLoad() { 28 | super.viewDidLoad() 29 | //加载bundle中的视频 30 | prepareResource() 31 | //构建视频轨道 32 | buildCompositionVideoTracks() 33 | //构建音频轨道 34 | buildCompositionAudioTracks() 35 | //设置视频效果 36 | buildVideoComposition() 37 | //添加贴纸效果 38 | buildOverLayer() 39 | export() 40 | } 41 | 42 | func prepareResource() { 43 | guard let urls = Bundle.main.urls(forResourcesWithExtension: ".mp4", subdirectory: nil) else { 44 | return 45 | } 46 | for url in urls { 47 | let asset = AVAsset(url: url) 48 | videos.append(asset) 49 | } 50 | } 51 | // MARK: - 编辑视频 52 | func buildCompositionVideoTracks() { 53 | //使用invalid,系统会自动分配一个有效的trackId 54 | let trackId = kCMPersistentTrackID_Invalid 55 | //创建AB两条视频轨道,视频片段交叉插入到轨道中,通过对两条轨道的叠加编辑各种效果。如0-5秒内,A轨道内容alpha逐渐到0,B轨道内容alpha逐渐到1 56 | guard let trackA = composition.addMutableTrack(withMediaType: .video, preferredTrackID: trackId) else { 57 | return 58 | } 59 | guard let trackB = composition.addMutableTrack(withMediaType: .video, preferredTrackID: trackId) else { 60 | return 61 | } 62 | let videoTracks = [trackA,trackB] 63 | 64 | //视频片段插入时间轴时的起始点 65 | var cursorTime = CMTime.zero 66 | //转场动画时间 67 | let transitionDuration = CMTime(value: 2, timescale: 1) 68 | for (index,value) in videos.enumerated() { 69 | //交叉循环A,B轨道 70 | let trackIndex = index % 2 71 | let currentTrack = videoTracks[trackIndex] 72 | //获取视频资源中的视频轨道 73 | guard let assetTrack = value.tracks(withMediaType: .video).first else { 74 | continue 75 | } 76 | do { 77 | //插入提取的视频轨道到 空白(编辑)轨道的指定位置中 78 | try currentTrack.insertTimeRange(CMTimeRange(start: .zero, duration: value.duration), of: assetTrack, at: cursorTime) 79 | //光标移动到视频末尾处,以便插入下一段视频 80 | cursorTime = CMTimeAdd(cursorTime, value.duration) 81 | //光标回退转场动画时长的距离,这一段前后视频重叠部分组合成转场动画 82 | cursorTime = CMTimeSubtract(cursorTime, transitionDuration) 83 | } catch { 84 | 85 | } 86 | } 87 | } 88 | func buildCompositionAudioTracks() { 89 | let trackId = kCMPersistentTrackID_Invalid 90 | guard let trackAudio = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: trackId) else { 91 | return 92 | } 93 | var cursorTime = CMTime.zero 94 | for (_,value) in videos.enumerated() { 95 | //获取视频资源中的音频轨道 96 | guard let assetTrack = value.tracks(withMediaType: .audio).first else { 97 | continue 98 | } 99 | do { 100 | try trackAudio.insertTimeRange(CMTimeRange(start: .zero, duration: value.duration), of: assetTrack, at: cursorTime) 101 | cursorTime = CMTimeAdd(cursorTime, value.duration) 102 | } catch { 103 | 104 | } 105 | } 106 | } 107 | 108 | /// 设置videoComposition来描述A、B轨道该如何显示 109 | func buildVideoComposition() { 110 | //创建默认配置的videoComposition 111 | let videoComposition = AVMutableVideoComposition.init(propertiesOf: composition) 112 | self.videoComposition = videoComposition 113 | filterTransitionInstructions(of: videoComposition) 114 | } 115 | /// 过滤出转场动画指令 116 | func filterTransitionInstructions(of videoCompostion: AVMutableVideoComposition) -> Void { 117 | let instructions = videoCompostion.instructions as! [AVMutableVideoCompositionInstruction] 118 | for (index,instruct) in instructions.enumerated() { 119 | //非转场动画区域只有单轨道(另一个的空的),只有两个轨道重叠的情况是我们要处理的转场区域 120 | guard instruct.layerInstructions.count > 1 else { 121 | continue 122 | } 123 | var transitionType: TransitionType 124 | //需要判断转场动画是从A轨道到B轨道,还是B-A 125 | var fromLayerInstruction: AVMutableVideoCompositionLayerInstruction 126 | var toLayerInstruction: AVMutableVideoCompositionLayerInstruction 127 | //获取前一段画面的轨道id 128 | let beforeTrackId = instructions[index - 1].layerInstructions[0].trackID; 129 | //跟前一段画面同一轨道的为转场起点,另一轨道为终点 130 | let tempTrackId = instruct.layerInstructions[0].trackID 131 | if beforeTrackId == tempTrackId { 132 | fromLayerInstruction = instruct.layerInstructions[0] as! AVMutableVideoCompositionLayerInstruction 133 | toLayerInstruction = instruct.layerInstructions[1] as! AVMutableVideoCompositionLayerInstruction 134 | transitionType = TransitionType.Dissolve 135 | }else{ 136 | fromLayerInstruction = instruct.layerInstructions[1] as! AVMutableVideoCompositionLayerInstruction 137 | toLayerInstruction = instruct.layerInstructions[0] as! AVMutableVideoCompositionLayerInstruction 138 | transitionType = TransitionType.Push 139 | } 140 | 141 | setupTransition(for: instruct, fromLayer: fromLayerInstruction, toLayer: toLayerInstruction,type: transitionType) 142 | } 143 | } 144 | /// 设置转场动画 145 | func setupTransition(for instruction: AVMutableVideoCompositionInstruction, fromLayer: AVMutableVideoCompositionLayerInstruction, toLayer: AVMutableVideoCompositionLayerInstruction ,type: TransitionType) { 146 | let identityTransform = CGAffineTransform.identity 147 | let timeRange = instruction.timeRange 148 | let videoWidth = self.videoComposition.renderSize.width 149 | if type == TransitionType.Push{ 150 | let fromEndTranform = CGAffineTransform(translationX: -videoWidth, y: 0) 151 | let toStartTranform = CGAffineTransform(translationX: videoWidth, y: 0) 152 | 153 | fromLayer.setTransformRamp(fromStart: identityTransform, toEnd: fromEndTranform, timeRange: timeRange) 154 | toLayer.setTransformRamp(fromStart: toStartTranform, toEnd: identityTransform, timeRange: timeRange) 155 | }else { 156 | fromLayer.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: timeRange) 157 | } 158 | 159 | //重新赋值 160 | instruction.layerInstructions = [fromLayer,toLayer] 161 | } 162 | func buildOverLayer() { 163 | let layer = CALayer() 164 | layer.frame = CGRect(x: 0, y: 0, width: 40, height: 40) 165 | layer.opacity = 0; 166 | layer.backgroundColor = UIColor.yellow.cgColor 167 | 168 | let fadeInFadeOutAni = CAKeyframeAnimation(keyPath: "opacity") 169 | fadeInFadeOutAni.values = [0.0,1.0,1.0,0.0] 170 | fadeInFadeOutAni.keyTimes = [0.0,0.25,0.75,1] 171 | //动画时间与时间轴时间绑定 172 | fadeInFadeOutAni.beginTime = CMTimeGetSeconds(CMTime(seconds: 3, preferredTimescale: 1)) 173 | fadeInFadeOutAni.duration = CMTimeGetSeconds(CMTime(seconds: 5, preferredTimescale: 1)) 174 | fadeInFadeOutAni.isRemovedOnCompletion = false 175 | 176 | layer.add(fadeInFadeOutAni, forKey: nil) 177 | overLayer = layer 178 | 179 | } 180 | // MARK: - 导出合成的视频 181 | func export(){ 182 | let session = AVAssetExportSession.init(asset: composition.copy() as! AVAsset, presetName: AVAssetExportPreset640x480) 183 | 184 | session?.outputURL = CompositionViewController.createTemplateFileURL() 185 | session?.outputFileType = AVFileType.mp4 186 | if overLayer != nil { 187 | let videoLayer = CALayer() 188 | videoLayer.frame = CGRect(x: 0, y: 0, width: 1280, height: 720) 189 | let animateLayer = CALayer() 190 | animateLayer.frame = CGRect(x: 0, y: 0, width: 1280, height: 720) 191 | //videoLayer必须在animateLayer层级中 192 | animateLayer.addSublayer(videoLayer) 193 | animateLayer.addSublayer(overLayer!) 194 | animateLayer.isGeometryFlipped = true 195 | 196 | let animateTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: animateLayer) 197 | videoComposition.animationTool = animateTool 198 | } 199 | session?.videoComposition = videoComposition 200 | 201 | session?.exportAsynchronously(completionHandler: {[weak self] in 202 | guard let strongSelf = self else {return} 203 | let status = session?.status 204 | if status == AVAssetExportSession.Status.completed { 205 | strongSelf.saveToAlbum(atURL: session!.outputURL!, complete: { (success) in 206 | DispatchQueue.main.async { 207 | strongSelf.showSaveResult(isSuccess: success) 208 | } 209 | }) 210 | } 211 | }) 212 | } 213 | // MARK: - utils 214 | private class func createTemplateFileURL() -> URL { 215 | 216 | NSHomeDirectory() 217 | let path = NSTemporaryDirectory() + "composition.mp4" 218 | let fileURL = URL(fileURLWithPath: path) 219 | if FileManager.default.fileExists(atPath: fileURL.path) { 220 | do { try FileManager.default.removeItem(at: fileURL) } catch { 221 | 222 | } 223 | } 224 | return fileURL 225 | } 226 | private func saveToAlbum(atURL url: URL,complete: @escaping ((Bool) -> Void)){ 227 | 228 | PHPhotoLibrary.shared().performChanges({ 229 | PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url) 230 | }, completionHandler: { (success, error) in 231 | complete(success) 232 | }) 233 | } 234 | private func showSaveResult(isSuccess: Bool) { 235 | let message = isSuccess ? "已保存到相册" : "保存失败" 236 | 237 | let alertController = UIAlertController.init(title: nil, message: message, preferredStyle: .alert) 238 | alertController.addAction(UIAlertAction.init(title: "确定", style: .default, handler: { (action) in 239 | })) 240 | self .present(alertController, animated: true, completion: nil) 241 | } 242 | } 243 | -------------------------------------------------------------------------------- /ZHCamera/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | NSCameraUsageDescription 24 | 25 | NSMicrophoneUsageDescription 26 | 27 | NSPhotoLibraryAddUsageDescription 28 | 29 | NSPhotoLibraryUsageDescription 30 | 31 | UILaunchStoryboardName 32 | LaunchScreen 33 | UIMainStoryboardFile 34 | Main 35 | UIRequiredDeviceCapabilities 36 | 37 | armv7 38 | 39 | UISupportedInterfaceOrientations 40 | 41 | UIInterfaceOrientationPortrait 42 | 43 | UISupportedInterfaceOrientations~ipad 44 | 45 | UIInterfaceOrientationPortrait 46 | UIInterfaceOrientationPortraitUpsideDown 47 | UIInterfaceOrientationLandscapeLeft 48 | UIInterfaceOrientationLandscapeRight 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /ZHCamera/Resource/01_nebula.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xuzhenhao/LearningAVFoundation/18ddadc2f6a9e74e2f9538d473559592ffac7b09/ZHCamera/Resource/01_nebula.mp4 -------------------------------------------------------------------------------- /ZHCamera/Resource/02_blackhole.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xuzhenhao/LearningAVFoundation/18ddadc2f6a9e74e2f9538d473559592ffac7b09/ZHCamera/Resource/02_blackhole.mp4 -------------------------------------------------------------------------------- /ZHCamera/Resource/03_nebula.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xuzhenhao/LearningAVFoundation/18ddadc2f6a9e74e2f9538d473559592ffac7b09/ZHCamera/Resource/03_nebula.mp4 -------------------------------------------------------------------------------- /ZHCamera/Resource/04_quasar.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xuzhenhao/LearningAVFoundation/18ddadc2f6a9e74e2f9538d473559592ffac7b09/ZHCamera/Resource/04_quasar.mp4 -------------------------------------------------------------------------------- /ZHCamera/Resource/05_blackhole.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xuzhenhao/LearningAVFoundation/18ddadc2f6a9e74e2f9538d473559592ffac7b09/ZHCamera/Resource/05_blackhole.mp4 -------------------------------------------------------------------------------- /ZHCamera/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // ZHCamera 4 | // 5 | // Created by xuzhenhao on 2018/11/8. 6 | // Copyright © 2018年 xuzhenhao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | class ViewController: UIViewController { 12 | 13 | @IBOutlet weak var tableView: UITableView! 14 | let items = ["拍摄+滤镜+导出", 15 | "视频片段合成+转场动画"] 16 | 17 | override func viewDidLoad() { 18 | super.viewDidLoad() 19 | 20 | } 21 | 22 | 23 | } 24 | 25 | extension ViewController: UITableViewDataSource,UITableViewDelegate{ 26 | func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { 27 | return items.count 28 | } 29 | 30 | func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { 31 | let cell = UITableViewCell(style: .default, reuseIdentifier: "entrance") 32 | cell.textLabel?.text = items[indexPath.row] 33 | return cell 34 | } 35 | func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { 36 | tableView.deselectRow(at: indexPath, animated: true) 37 | 38 | let vc = viewController(at: indexPath) 39 | self.navigationController? 40 | .pushViewController(vc, animated: true) 41 | } 42 | 43 | func viewController(at indexPath:IndexPath) -> UIViewController { 44 | 45 | let index = indexPath.row 46 | if index == 0 { 47 | return CameraViewController.cameraViewController() 48 | }else { 49 | return CompositionViewController.compositionViewController() 50 | } 51 | } 52 | } 53 | --------------------------------------------------------------------------------