├── .DS_Store ├── CCCamera.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ ├── xcshareddata │ │ └── IDEWorkspaceChecks.plist │ └── xcuserdata │ │ ├── baidaojuhe.xcuserdatad │ │ └── UserInterfaceState.xcuserstate │ │ ├── cyd.xcuserdatad │ │ └── UserInterfaceState.xcuserstate │ │ └── wsk.xcuserdatad │ │ └── UserInterfaceState.xcuserstate └── xcuserdata │ ├── baidaojuhe.xcuserdatad │ ├── xcdebugger │ │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ │ ├── CCCamera.xcscheme │ │ └── xcschememanagement.plist │ ├── cyd.xcuserdatad │ ├── xcdebugger │ │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ │ └── xcschememanagement.plist │ └── wsk.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ ├── CCCamera.xcscheme │ └── xcschememanagement.plist ├── CCCamera ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ └── AppIcon.appiconset │ │ ├── Contents.json │ │ ├── IMG_0772.png │ │ ├── Icon-120.png │ │ ├── Icon-121.png │ │ ├── Icon-180.png │ │ ├── Icon-40.png │ │ ├── Icon-58.png │ │ ├── Icon-60.png │ │ ├── Icon-80.png │ │ └── Icon-87.png ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── CCBaseNavigationController.h ├── CCBaseNavigationController.m ├── CCBaseViewController.h ├── CCBaseViewController.m ├── CCCamera-Bridging-Header.h ├── CCCamera.pch ├── CCCamera.xcdatamodeld │ ├── .xccurrentversion │ └── CCCamera.xcdatamodel │ │ └── contents ├── CCCameraManager.h ├── CCCameraManager.m ├── CCCameraView.h ├── CCCameraView.m ├── CCCameraViewController.h ├── CCCameraViewController.m ├── CCFilterRenderer.swift ├── CCFilterViewController.swift ├── CCGLRenderCameraViewController.h ├── CCGLRenderCameraViewController.m ├── CCImagePreviewController.h ├── CCImagePreviewController.m ├── CCMotionManager.h ├── CCMotionManager.m ├── CCMovieManager.h ├── CCMovieManager.m ├── CCPhotoRenderer.swift ├── CCPreviewView.swift ├── CCTools.h ├── CCTools.m ├── CCVideoPreview.h ├── CCVideoPreview.m ├── Info.plist ├── PassThrough.metal ├── UIView+CCAdditions.h ├── UIView+CCAdditions.m ├── UIView+CCHUD.h ├── UIView+CCHUD.m ├── ViewController.h ├── ViewController.m └── main.m ├── CCCameraTests ├── CCCameraTests.m └── Info.plist ├── CCCameraUITests ├── CCCameraUITests.m └── Info.plist └── README.md /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/.DS_Store -------------------------------------------------------------------------------- /CCCamera.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 0D2DF7CB2146017200F98BE0 /* CCPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0D2DF7CA2146017200F98BE0 /* CCPreviewView.swift */; }; 11 | 0D2DF7CD2146145A00F98BE0 /* PassThrough.metal in Sources */ = {isa = PBXBuildFile; fileRef = 0D2DF7CC2146145A00F98BE0 /* PassThrough.metal */; }; 12 | 0D69AF61214211B20046CD43 /* CCFilterViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0D69AF60214211B20046CD43 /* CCFilterViewController.swift */; }; 13 | 0D69AF63214213DE0046CD43 /* CCFilterRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0D69AF62214213DE0046CD43 /* CCFilterRenderer.swift */; }; 14 | 0D69AF65214215260046CD43 /* CCPhotoRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0D69AF64214215260046CD43 /* CCPhotoRenderer.swift */; }; 15 | 0DB3C60821211EE500B54690 /* CCCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 0DB3C60721211EE500B54690 /* CCCameraManager.m */; }; 16 | 0DB3C60E2121370000B54690 /* CCMovieManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 0DB3C60D2121370000B54690 /* CCMovieManager.m */; }; 17 | 7C4B0F341D73EC5D005935EE /* CCGLRenderCameraViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C4B0F331D73EC5D005935EE /* CCGLRenderCameraViewController.m */; }; 18 | 7C4B0F381D73F642005935EE /* CCMotionManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C4B0F371D73F642005935EE /* CCMotionManager.m */; }; 19 | 7C7E42BC1D6D943F0001AF0F /* UIView+CCHUD.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C7E42BB1D6D943F0001AF0F /* UIView+CCHUD.m */; }; 20 | 7C85FB771D6AAAE4002374C2 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FB761D6AAAE4002374C2 /* main.m */; }; 21 | 7C85FB7A1D6AAAE4002374C2 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FB791D6AAAE4002374C2 /* AppDelegate.m */; }; 22 | 7C85FB7D1D6AAAE4002374C2 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FB7C1D6AAAE4002374C2 /* ViewController.m */; }; 23 | 7C85FB801D6AAAE4002374C2 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7C85FB7E1D6AAAE4002374C2 /* Main.storyboard */; }; 24 | 7C85FB831D6AAAE4002374C2 /* CCCamera.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FB811D6AAAE4002374C2 /* CCCamera.xcdatamodeld */; }; 25 | 7C85FB851D6AAAE4002374C2 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7C85FB841D6AAAE4002374C2 /* Assets.xcassets */; }; 26 | 7C85FB881D6AAAE4002374C2 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7C85FB861D6AAAE4002374C2 /* LaunchScreen.storyboard */; }; 27 | 7C85FB931D6AAAE4002374C2 /* CCCameraTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FB921D6AAAE4002374C2 /* CCCameraTests.m */; }; 28 | 7C85FB9E1D6AAAE4002374C2 /* CCCameraUITests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FB9D1D6AAAE4002374C2 /* CCCameraUITests.m */; }; 29 | 7C85FBB81D6AABC4002374C2 /* CCBaseNavigationController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FBB71D6AABC4002374C2 /* CCBaseNavigationController.m */; }; 30 | 7C85FBC31D6AACB9002374C2 /* UIView+CCAdditions.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FBC21D6AACB9002374C2 /* UIView+CCAdditions.m */; }; 31 | 7C85FBCB1D6AADBF002374C2 /* CCTools.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FBCA1D6AADBF002374C2 /* CCTools.m */; }; 32 | 7C85FBD21D6AAE49002374C2 /* CCCameraViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FBD11D6AAE49002374C2 /* CCCameraViewController.m */; }; 33 | 7C85FBD51D6AAE7B002374C2 /* CCImagePreviewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FBD41D6AAE7B002374C2 /* CCImagePreviewController.m */; }; 34 | 7C85FBD91D6AAECE002374C2 /* CCVideoPreview.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FBD81D6AAECE002374C2 /* CCVideoPreview.m */; }; 35 | 7C85FBDC1D6AB380002374C2 /* CCBaseViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7C85FBDB1D6AB380002374C2 /* CCBaseViewController.m */; }; 36 | D95283E01F0CBFDF006EE61E /* CCCameraView.m in Sources */ = {isa = PBXBuildFile; fileRef = D95283DF1F0CBFDF006EE61E /* CCCameraView.m */; }; 37 | /* End PBXBuildFile section */ 38 | 39 | /* Begin PBXContainerItemProxy section */ 40 | 7C85FB8F1D6AAAE4002374C2 /* PBXContainerItemProxy */ = { 41 | isa = PBXContainerItemProxy; 42 | containerPortal = 7C85FB6A1D6AAAE4002374C2 /* Project object */; 43 | proxyType = 1; 44 | remoteGlobalIDString = 7C85FB711D6AAAE4002374C2; 45 | remoteInfo = CCCamera; 46 | }; 47 | 7C85FB9A1D6AAAE4002374C2 /* PBXContainerItemProxy */ = { 48 | isa = PBXContainerItemProxy; 49 | containerPortal = 7C85FB6A1D6AAAE4002374C2 /* Project object */; 50 | proxyType = 1; 51 | remoteGlobalIDString = 7C85FB711D6AAAE4002374C2; 52 | remoteInfo = CCCamera; 53 | }; 54 | /* End PBXContainerItemProxy section */ 55 | 56 | /* Begin PBXFileReference section */ 57 | 0D2DF7CA2146017200F98BE0 /* CCPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCPreviewView.swift; sourceTree = ""; }; 58 | 0D2DF7CC2146145A00F98BE0 /* PassThrough.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; path = PassThrough.metal; sourceTree = ""; }; 59 | 0D69AF5F214211B20046CD43 /* CCCamera-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "CCCamera-Bridging-Header.h"; sourceTree = ""; }; 60 | 0D69AF60214211B20046CD43 /* CCFilterViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCFilterViewController.swift; sourceTree = ""; }; 61 | 0D69AF62214213DE0046CD43 /* CCFilterRenderer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCFilterRenderer.swift; sourceTree = ""; }; 62 | 0D69AF64214215260046CD43 /* CCPhotoRenderer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CCPhotoRenderer.swift; sourceTree = ""; }; 63 | 0DB3C60621211EE500B54690 /* CCCameraManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CCCameraManager.h; sourceTree = ""; }; 64 | 0DB3C60721211EE500B54690 /* CCCameraManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CCCameraManager.m; sourceTree = ""; }; 65 | 0DB3C60C2121370000B54690 /* CCMovieManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CCMovieManager.h; sourceTree = ""; }; 66 | 0DB3C60D2121370000B54690 /* CCMovieManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CCMovieManager.m; sourceTree = ""; }; 67 | 7C4B0F321D73EC5D005935EE /* CCGLRenderCameraViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCGLRenderCameraViewController.h; sourceTree = ""; }; 68 | 7C4B0F331D73EC5D005935EE /* CCGLRenderCameraViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CCGLRenderCameraViewController.m; sourceTree = ""; }; 69 | 7C4B0F361D73F642005935EE /* CCMotionManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCMotionManager.h; sourceTree = ""; }; 70 | 7C4B0F371D73F642005935EE /* CCMotionManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CCMotionManager.m; sourceTree = ""; }; 71 | 7C7E42BA1D6D943F0001AF0F /* UIView+CCHUD.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIView+CCHUD.h"; sourceTree = ""; }; 72 | 7C7E42BB1D6D943F0001AF0F /* UIView+CCHUD.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIView+CCHUD.m"; sourceTree = ""; }; 73 | 7C85FB721D6AAAE4002374C2 /* CCCamera.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = CCCamera.app; sourceTree = BUILT_PRODUCTS_DIR; }; 74 | 7C85FB761D6AAAE4002374C2 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 75 | 7C85FB781D6AAAE4002374C2 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 76 | 7C85FB791D6AAAE4002374C2 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 77 | 7C85FB7B1D6AAAE4002374C2 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 78 | 7C85FB7C1D6AAAE4002374C2 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 79 | 7C85FB7F1D6AAAE4002374C2 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 80 | 7C85FB821D6AAAE4002374C2 /* CCCamera.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = CCCamera.xcdatamodel; sourceTree = ""; }; 81 | 7C85FB841D6AAAE4002374C2 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 82 | 7C85FB871D6AAAE4002374C2 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 83 | 7C85FB891D6AAAE4002374C2 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 84 | 7C85FB8E1D6AAAE4002374C2 /* CCCameraTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = CCCameraTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 85 | 7C85FB921D6AAAE4002374C2 /* CCCameraTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CCCameraTests.m; sourceTree = ""; }; 86 | 7C85FB941D6AAAE4002374C2 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 87 | 7C85FB991D6AAAE4002374C2 /* CCCameraUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = CCCameraUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 88 | 7C85FB9D1D6AAAE4002374C2 /* CCCameraUITests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CCCameraUITests.m; sourceTree = ""; }; 89 | 7C85FB9F1D6AAAE4002374C2 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 90 | 7C85FBB61D6AABC4002374C2 /* CCBaseNavigationController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCBaseNavigationController.h; sourceTree = ""; }; 91 | 7C85FBB71D6AABC4002374C2 /* CCBaseNavigationController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CCBaseNavigationController.m; sourceTree = ""; }; 92 | 7C85FBBC1D6AAC14002374C2 /* CCCamera.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CCCamera.pch; sourceTree = ""; }; 93 | 7C85FBC11D6AACB9002374C2 /* UIView+CCAdditions.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIView+CCAdditions.h"; sourceTree = ""; }; 94 | 7C85FBC21D6AACB9002374C2 /* UIView+CCAdditions.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIView+CCAdditions.m"; sourceTree = ""; }; 95 | 7C85FBC91D6AADBF002374C2 /* CCTools.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCTools.h; sourceTree = ""; }; 96 | 7C85FBCA1D6AADBF002374C2 /* CCTools.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CCTools.m; sourceTree = ""; }; 97 | 7C85FBD01D6AAE49002374C2 /* CCCameraViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCCameraViewController.h; sourceTree = ""; }; 98 | 7C85FBD11D6AAE49002374C2 /* CCCameraViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CCCameraViewController.m; sourceTree = ""; }; 99 | 7C85FBD31D6AAE7B002374C2 /* CCImagePreviewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCImagePreviewController.h; sourceTree = ""; }; 100 | 7C85FBD41D6AAE7B002374C2 /* CCImagePreviewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CCImagePreviewController.m; sourceTree = ""; }; 101 | 7C85FBD71D6AAECE002374C2 /* CCVideoPreview.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCVideoPreview.h; sourceTree = ""; }; 102 | 7C85FBD81D6AAECE002374C2 /* CCVideoPreview.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CCVideoPreview.m; sourceTree = ""; }; 103 | 7C85FBDA1D6AB380002374C2 /* CCBaseViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCBaseViewController.h; sourceTree = ""; }; 104 | 7C85FBDB1D6AB380002374C2 /* CCBaseViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CCBaseViewController.m; sourceTree = ""; }; 105 | D95283DE1F0CBFDF006EE61E /* CCCameraView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CCCameraView.h; sourceTree = ""; }; 106 | D95283DF1F0CBFDF006EE61E /* CCCameraView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CCCameraView.m; sourceTree = ""; }; 107 | /* End PBXFileReference section */ 108 | 109 | /* Begin PBXFrameworksBuildPhase section */ 110 | 7C85FB6F1D6AAAE4002374C2 /* Frameworks */ = { 111 | isa = PBXFrameworksBuildPhase; 112 | buildActionMask = 2147483647; 113 | files = ( 114 | ); 115 | runOnlyForDeploymentPostprocessing = 0; 116 | }; 117 | 7C85FB8B1D6AAAE4002374C2 /* Frameworks */ = { 118 | isa = PBXFrameworksBuildPhase; 119 | buildActionMask = 2147483647; 120 | files = ( 121 | ); 122 | runOnlyForDeploymentPostprocessing = 0; 123 | }; 124 | 7C85FB961D6AAAE4002374C2 /* Frameworks */ = { 125 | isa = PBXFrameworksBuildPhase; 126 | buildActionMask = 2147483647; 127 | files = ( 128 | ); 129 | runOnlyForDeploymentPostprocessing = 0; 130 | }; 131 | /* End PBXFrameworksBuildPhase section */ 132 | 133 | /* Begin PBXGroup section */ 134 | 0D69AF5D214210ED0046CD43 /* Filter */ = { 135 | isa = PBXGroup; 136 | children = ( 137 | 0D2DF7CC2146145A00F98BE0 /* PassThrough.metal */, 138 | 0D2DF7CA2146017200F98BE0 /* CCPreviewView.swift */, 139 | 0D69AF62214213DE0046CD43 /* CCFilterRenderer.swift */, 140 | 0D69AF64214215260046CD43 /* CCPhotoRenderer.swift */, 141 | 0D69AF60214211B20046CD43 /* CCFilterViewController.swift */, 142 | ); 143 | name = Filter; 144 | sourceTree = ""; 145 | }; 146 | 0D69AF5E2142111C0046CD43 /* AppDelegate */ = { 147 | isa = PBXGroup; 148 | children = ( 149 | 7C85FB781D6AAAE4002374C2 /* AppDelegate.h */, 150 | 7C85FB791D6AAAE4002374C2 /* AppDelegate.m */, 151 | ); 152 | name = AppDelegate; 153 | sourceTree = ""; 154 | }; 155 | 0DB3C60521211EBA00B54690 /* Manager */ = { 156 | isa = PBXGroup; 157 | children = ( 158 | 0DB3C60C2121370000B54690 /* CCMovieManager.h */, 159 | 0DB3C60D2121370000B54690 /* CCMovieManager.m */, 160 | 0DB3C60621211EE500B54690 /* CCCameraManager.h */, 161 | 0DB3C60721211EE500B54690 /* CCCameraManager.m */, 162 | ); 163 | name = Manager; 164 | sourceTree = ""; 165 | }; 166 | 7C4B0F311D73EC2F005935EE /* GLRender */ = { 167 | isa = PBXGroup; 168 | children = ( 169 | 7C4B0F321D73EC5D005935EE /* CCGLRenderCameraViewController.h */, 170 | 7C4B0F331D73EC5D005935EE /* CCGLRenderCameraViewController.m */, 171 | ); 172 | name = GLRender; 173 | sourceTree = ""; 174 | }; 175 | 7C85FB691D6AAAE4002374C2 = { 176 | isa = PBXGroup; 177 | children = ( 178 | 7C85FB741D6AAAE4002374C2 /* CCCamera */, 179 | 7C85FB911D6AAAE4002374C2 /* CCCameraTests */, 180 | 7C85FB9C1D6AAAE4002374C2 /* CCCameraUITests */, 181 | 7C85FB731D6AAAE4002374C2 /* Products */, 182 | ); 183 | sourceTree = ""; 184 | }; 185 | 7C85FB731D6AAAE4002374C2 /* Products */ = { 186 | isa = PBXGroup; 187 | children = ( 188 | 7C85FB721D6AAAE4002374C2 /* CCCamera.app */, 189 | 7C85FB8E1D6AAAE4002374C2 /* CCCameraTests.xctest */, 190 | 7C85FB991D6AAAE4002374C2 /* CCCameraUITests.xctest */, 191 | ); 192 | name = Products; 193 | sourceTree = ""; 194 | }; 195 | 7C85FB741D6AAAE4002374C2 /* CCCamera */ = { 196 | isa = PBXGroup; 197 | children = ( 198 | 7C85FBB01D6AAB55002374C2 /* Base */, 199 | 7C85FBC81D6AADAC002374C2 /* Manager */, 200 | 7C85FBBD1D6AAC7B002374C2 /* Categories */, 201 | 0D69AF5E2142111C0046CD43 /* AppDelegate */, 202 | 0D69AF5D214210ED0046CD43 /* Filter */, 203 | 7C85FBC71D6AAD61002374C2 /* Camera */, 204 | 7C4B0F311D73EC2F005935EE /* GLRender */, 205 | 7C85FB7B1D6AAAE4002374C2 /* ViewController.h */, 206 | 7C85FB7C1D6AAAE4002374C2 /* ViewController.m */, 207 | 7C85FB7E1D6AAAE4002374C2 /* Main.storyboard */, 208 | 7C85FB861D6AAAE4002374C2 /* LaunchScreen.storyboard */, 209 | 7C85FB841D6AAAE4002374C2 /* Assets.xcassets */, 210 | 7C85FB811D6AAAE4002374C2 /* CCCamera.xcdatamodeld */, 211 | 7C85FB751D6AAAE4002374C2 /* Supporting Files */, 212 | ); 213 | path = CCCamera; 214 | sourceTree = ""; 215 | }; 216 | 7C85FB751D6AAAE4002374C2 /* Supporting Files */ = { 217 | isa = PBXGroup; 218 | children = ( 219 | 7C85FB761D6AAAE4002374C2 /* main.m */, 220 | 7C85FB891D6AAAE4002374C2 /* Info.plist */, 221 | 7C85FBBC1D6AAC14002374C2 /* CCCamera.pch */, 222 | 0D69AF5F214211B20046CD43 /* CCCamera-Bridging-Header.h */, 223 | ); 224 | name = "Supporting Files"; 225 | sourceTree = ""; 226 | }; 227 | 7C85FB911D6AAAE4002374C2 /* CCCameraTests */ = { 228 | isa = PBXGroup; 229 | children = ( 230 | 7C85FB921D6AAAE4002374C2 /* CCCameraTests.m */, 231 | 7C85FB941D6AAAE4002374C2 /* Info.plist */, 232 | ); 233 | path = CCCameraTests; 234 | sourceTree = ""; 235 | }; 236 | 7C85FB9C1D6AAAE4002374C2 /* CCCameraUITests */ = { 237 | isa = PBXGroup; 238 | children = ( 239 | 7C85FB9D1D6AAAE4002374C2 /* CCCameraUITests.m */, 240 | 7C85FB9F1D6AAAE4002374C2 /* Info.plist */, 241 | ); 242 | path = CCCameraUITests; 243 | sourceTree = ""; 244 | }; 245 | 7C85FBB01D6AAB55002374C2 /* Base */ = { 246 | isa = PBXGroup; 247 | children = ( 248 | 7C85FBB21D6AAB6D002374C2 /* ViewController */, 249 | 7C85FBB11D6AAB5C002374C2 /* NavigationController */, 250 | ); 251 | name = Base; 252 | sourceTree = ""; 253 | }; 254 | 7C85FBB11D6AAB5C002374C2 /* NavigationController */ = { 255 | isa = PBXGroup; 256 | children = ( 257 | 7C85FBB61D6AABC4002374C2 /* CCBaseNavigationController.h */, 258 | 7C85FBB71D6AABC4002374C2 /* CCBaseNavigationController.m */, 259 | ); 260 | name = NavigationController; 261 | sourceTree = ""; 262 | }; 263 | 7C85FBB21D6AAB6D002374C2 /* ViewController */ = { 264 | isa = PBXGroup; 265 | children = ( 266 | 7C85FBDA1D6AB380002374C2 /* CCBaseViewController.h */, 267 | 7C85FBDB1D6AB380002374C2 /* CCBaseViewController.m */, 268 | ); 269 | name = ViewController; 270 | sourceTree = ""; 271 | }; 272 | 7C85FBBD1D6AAC7B002374C2 /* Categories */ = { 273 | isa = PBXGroup; 274 | children = ( 275 | 7C7E42BA1D6D943F0001AF0F /* UIView+CCHUD.h */, 276 | 7C7E42BB1D6D943F0001AF0F /* UIView+CCHUD.m */, 277 | 7C85FBC11D6AACB9002374C2 /* UIView+CCAdditions.h */, 278 | 7C85FBC21D6AACB9002374C2 /* UIView+CCAdditions.m */, 279 | ); 280 | name = Categories; 281 | sourceTree = ""; 282 | }; 283 | 7C85FBC71D6AAD61002374C2 /* Camera */ = { 284 | isa = PBXGroup; 285 | children = ( 286 | 7C85FBD61D6AAE89002374C2 /* View */, 287 | 0DB3C60521211EBA00B54690 /* Manager */, 288 | 7C85FBCF1D6AAE10002374C2 /* ViewController */, 289 | ); 290 | name = Camera; 291 | sourceTree = ""; 292 | }; 293 | 7C85FBC81D6AADAC002374C2 /* Manager */ = { 294 | isa = PBXGroup; 295 | children = ( 296 | 7C85FBC91D6AADBF002374C2 /* CCTools.h */, 297 | 7C85FBCA1D6AADBF002374C2 /* CCTools.m */, 298 | 7C4B0F361D73F642005935EE /* CCMotionManager.h */, 299 | 7C4B0F371D73F642005935EE /* CCMotionManager.m */, 300 | ); 301 | name = Manager; 302 | sourceTree = ""; 303 | }; 304 | 7C85FBCF1D6AAE10002374C2 /* ViewController */ = { 305 | isa = PBXGroup; 306 | children = ( 307 | 7C85FBD01D6AAE49002374C2 /* CCCameraViewController.h */, 308 | 7C85FBD11D6AAE49002374C2 /* CCCameraViewController.m */, 309 | ); 310 | name = ViewController; 311 | sourceTree = ""; 312 | }; 313 | 7C85FBD61D6AAE89002374C2 /* View */ = { 314 | isa = PBXGroup; 315 | children = ( 316 | D95283DE1F0CBFDF006EE61E /* CCCameraView.h */, 317 | D95283DF1F0CBFDF006EE61E /* CCCameraView.m */, 318 | 7C85FBD71D6AAECE002374C2 /* CCVideoPreview.h */, 319 | 7C85FBD81D6AAECE002374C2 /* CCVideoPreview.m */, 320 | 7C85FBD31D6AAE7B002374C2 /* CCImagePreviewController.h */, 321 | 7C85FBD41D6AAE7B002374C2 /* CCImagePreviewController.m */, 322 | ); 323 | name = View; 324 | sourceTree = ""; 325 | }; 326 | /* End PBXGroup section */ 327 | 328 | /* Begin PBXNativeTarget section */ 329 | 7C85FB711D6AAAE4002374C2 /* CCCamera */ = { 330 | isa = PBXNativeTarget; 331 | buildConfigurationList = 7C85FBA21D6AAAE4002374C2 /* Build configuration list for PBXNativeTarget "CCCamera" */; 332 | buildPhases = ( 333 | 7C85FB6E1D6AAAE4002374C2 /* Sources */, 334 | 7C85FB6F1D6AAAE4002374C2 /* Frameworks */, 335 | 7C85FB701D6AAAE4002374C2 /* Resources */, 336 | ); 337 | buildRules = ( 338 | ); 339 | dependencies = ( 340 | ); 341 | name = CCCamera; 342 | productName = CCCamera; 343 | productReference = 7C85FB721D6AAAE4002374C2 /* CCCamera.app */; 344 | productType = "com.apple.product-type.application"; 345 | }; 346 | 7C85FB8D1D6AAAE4002374C2 /* CCCameraTests */ = { 347 | isa = PBXNativeTarget; 348 | buildConfigurationList = 7C85FBA51D6AAAE4002374C2 /* Build configuration list for PBXNativeTarget "CCCameraTests" */; 349 | buildPhases = ( 350 | 7C85FB8A1D6AAAE4002374C2 /* Sources */, 351 | 7C85FB8B1D6AAAE4002374C2 /* Frameworks */, 352 | 7C85FB8C1D6AAAE4002374C2 /* Resources */, 353 | ); 354 | buildRules = ( 355 | ); 356 | dependencies = ( 357 | 7C85FB901D6AAAE4002374C2 /* PBXTargetDependency */, 358 | ); 359 | name = CCCameraTests; 360 | productName = CCCameraTests; 361 | productReference = 7C85FB8E1D6AAAE4002374C2 /* CCCameraTests.xctest */; 362 | productType = "com.apple.product-type.bundle.unit-test"; 363 | }; 364 | 7C85FB981D6AAAE4002374C2 /* CCCameraUITests */ = { 365 | isa = PBXNativeTarget; 366 | buildConfigurationList = 7C85FBA81D6AAAE4002374C2 /* Build configuration list for PBXNativeTarget "CCCameraUITests" */; 367 | buildPhases = ( 368 | 7C85FB951D6AAAE4002374C2 /* Sources */, 369 | 7C85FB961D6AAAE4002374C2 /* Frameworks */, 370 | 7C85FB971D6AAAE4002374C2 /* Resources */, 371 | ); 372 | buildRules = ( 373 | ); 374 | dependencies = ( 375 | 7C85FB9B1D6AAAE4002374C2 /* PBXTargetDependency */, 376 | ); 377 | name = CCCameraUITests; 378 | productName = CCCameraUITests; 379 | productReference = 7C85FB991D6AAAE4002374C2 /* CCCameraUITests.xctest */; 380 | productType = "com.apple.product-type.bundle.ui-testing"; 381 | }; 382 | /* End PBXNativeTarget section */ 383 | 384 | /* Begin PBXProject section */ 385 | 7C85FB6A1D6AAAE4002374C2 /* Project object */ = { 386 | isa = PBXProject; 387 | attributes = { 388 | LastUpgradeCheck = 1130; 389 | ORGANIZATIONNAME = cyd; 390 | TargetAttributes = { 391 | 7C85FB711D6AAAE4002374C2 = { 392 | CreatedOnToolsVersion = 7.3.1; 393 | DevelopmentTeam = 25AP33B4X5; 394 | LastSwiftMigration = 1000; 395 | }; 396 | 7C85FB8D1D6AAAE4002374C2 = { 397 | CreatedOnToolsVersion = 7.3.1; 398 | TestTargetID = 7C85FB711D6AAAE4002374C2; 399 | }; 400 | 7C85FB981D6AAAE4002374C2 = { 401 | CreatedOnToolsVersion = 7.3.1; 402 | TestTargetID = 7C85FB711D6AAAE4002374C2; 403 | }; 404 | }; 405 | }; 406 | buildConfigurationList = 7C85FB6D1D6AAAE4002374C2 /* Build configuration list for PBXProject "CCCamera" */; 407 | compatibilityVersion = "Xcode 3.2"; 408 | developmentRegion = en; 409 | hasScannedForEncodings = 0; 410 | knownRegions = ( 411 | en, 412 | Base, 413 | ); 414 | mainGroup = 7C85FB691D6AAAE4002374C2; 415 | productRefGroup = 7C85FB731D6AAAE4002374C2 /* Products */; 416 | projectDirPath = ""; 417 | projectRoot = ""; 418 | targets = ( 419 | 7C85FB711D6AAAE4002374C2 /* CCCamera */, 420 | 7C85FB8D1D6AAAE4002374C2 /* CCCameraTests */, 421 | 7C85FB981D6AAAE4002374C2 /* CCCameraUITests */, 422 | ); 423 | }; 424 | /* End PBXProject section */ 425 | 426 | /* Begin PBXResourcesBuildPhase section */ 427 | 7C85FB701D6AAAE4002374C2 /* Resources */ = { 428 | isa = PBXResourcesBuildPhase; 429 | buildActionMask = 2147483647; 430 | files = ( 431 | 7C85FB881D6AAAE4002374C2 /* LaunchScreen.storyboard in Resources */, 432 | 7C85FB851D6AAAE4002374C2 /* Assets.xcassets in Resources */, 433 | 7C85FB801D6AAAE4002374C2 /* Main.storyboard in Resources */, 434 | ); 435 | runOnlyForDeploymentPostprocessing = 0; 436 | }; 437 | 7C85FB8C1D6AAAE4002374C2 /* Resources */ = { 438 | isa = PBXResourcesBuildPhase; 439 | buildActionMask = 2147483647; 440 | files = ( 441 | ); 442 | runOnlyForDeploymentPostprocessing = 0; 443 | }; 444 | 7C85FB971D6AAAE4002374C2 /* Resources */ = { 445 | isa = PBXResourcesBuildPhase; 446 | buildActionMask = 2147483647; 447 | files = ( 448 | ); 449 | runOnlyForDeploymentPostprocessing = 0; 450 | }; 451 | /* End PBXResourcesBuildPhase section */ 452 | 453 | /* Begin PBXSourcesBuildPhase section */ 454 | 7C85FB6E1D6AAAE4002374C2 /* Sources */ = { 455 | isa = PBXSourcesBuildPhase; 456 | buildActionMask = 2147483647; 457 | files = ( 458 | 0DB3C60E2121370000B54690 /* CCMovieManager.m in Sources */, 459 | 0D69AF65214215260046CD43 /* CCPhotoRenderer.swift in Sources */, 460 | 7C85FBC31D6AACB9002374C2 /* UIView+CCAdditions.m in Sources */, 461 | 7C85FB831D6AAAE4002374C2 /* CCCamera.xcdatamodeld in Sources */, 462 | 7C4B0F381D73F642005935EE /* CCMotionManager.m in Sources */, 463 | 0D69AF63214213DE0046CD43 /* CCFilterRenderer.swift in Sources */, 464 | 0D2DF7CD2146145A00F98BE0 /* PassThrough.metal in Sources */, 465 | D95283E01F0CBFDF006EE61E /* CCCameraView.m in Sources */, 466 | 7C85FB7D1D6AAAE4002374C2 /* ViewController.m in Sources */, 467 | 7C85FBB81D6AABC4002374C2 /* CCBaseNavigationController.m in Sources */, 468 | 0DB3C60821211EE500B54690 /* CCCameraManager.m in Sources */, 469 | 0D2DF7CB2146017200F98BE0 /* CCPreviewView.swift in Sources */, 470 | 7C4B0F341D73EC5D005935EE /* CCGLRenderCameraViewController.m in Sources */, 471 | 7C85FBCB1D6AADBF002374C2 /* CCTools.m in Sources */, 472 | 7C7E42BC1D6D943F0001AF0F /* UIView+CCHUD.m in Sources */, 473 | 7C85FBD91D6AAECE002374C2 /* CCVideoPreview.m in Sources */, 474 | 7C85FBDC1D6AB380002374C2 /* CCBaseViewController.m in Sources */, 475 | 7C85FB7A1D6AAAE4002374C2 /* AppDelegate.m in Sources */, 476 | 0D69AF61214211B20046CD43 /* CCFilterViewController.swift in Sources */, 477 | 7C85FBD21D6AAE49002374C2 /* CCCameraViewController.m in Sources */, 478 | 7C85FB771D6AAAE4002374C2 /* main.m in Sources */, 479 | 7C85FBD51D6AAE7B002374C2 /* CCImagePreviewController.m in Sources */, 480 | ); 481 | runOnlyForDeploymentPostprocessing = 0; 482 | }; 483 | 7C85FB8A1D6AAAE4002374C2 /* Sources */ = { 484 | isa = PBXSourcesBuildPhase; 485 | buildActionMask = 2147483647; 486 | files = ( 487 | 7C85FB931D6AAAE4002374C2 /* CCCameraTests.m in Sources */, 488 | ); 489 | runOnlyForDeploymentPostprocessing = 0; 490 | }; 491 | 7C85FB951D6AAAE4002374C2 /* Sources */ = { 492 | isa = PBXSourcesBuildPhase; 493 | buildActionMask = 2147483647; 494 | files = ( 495 | 7C85FB9E1D6AAAE4002374C2 /* CCCameraUITests.m in Sources */, 496 | ); 497 | runOnlyForDeploymentPostprocessing = 0; 498 | }; 499 | /* End PBXSourcesBuildPhase section */ 500 | 501 | /* Begin PBXTargetDependency section */ 502 | 7C85FB901D6AAAE4002374C2 /* PBXTargetDependency */ = { 503 | isa = PBXTargetDependency; 504 | target = 7C85FB711D6AAAE4002374C2 /* CCCamera */; 505 | targetProxy = 7C85FB8F1D6AAAE4002374C2 /* PBXContainerItemProxy */; 506 | }; 507 | 7C85FB9B1D6AAAE4002374C2 /* PBXTargetDependency */ = { 508 | isa = PBXTargetDependency; 509 | target = 7C85FB711D6AAAE4002374C2 /* CCCamera */; 510 | targetProxy = 7C85FB9A1D6AAAE4002374C2 /* PBXContainerItemProxy */; 511 | }; 512 | /* End PBXTargetDependency section */ 513 | 514 | /* Begin PBXVariantGroup section */ 515 | 7C85FB7E1D6AAAE4002374C2 /* Main.storyboard */ = { 516 | isa = PBXVariantGroup; 517 | children = ( 518 | 7C85FB7F1D6AAAE4002374C2 /* Base */, 519 | ); 520 | name = Main.storyboard; 521 | sourceTree = ""; 522 | }; 523 | 7C85FB861D6AAAE4002374C2 /* LaunchScreen.storyboard */ = { 524 | isa = PBXVariantGroup; 525 | children = ( 526 | 7C85FB871D6AAAE4002374C2 /* Base */, 527 | ); 528 | name = LaunchScreen.storyboard; 529 | sourceTree = ""; 530 | }; 531 | /* End PBXVariantGroup section */ 532 | 533 | /* Begin XCBuildConfiguration section */ 534 | 7C85FBA01D6AAAE4002374C2 /* Debug */ = { 535 | isa = XCBuildConfiguration; 536 | buildSettings = { 537 | ALWAYS_SEARCH_USER_PATHS = NO; 538 | CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES; 539 | CLANG_ANALYZER_NONNULL = YES; 540 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 541 | CLANG_CXX_LIBRARY = "libc++"; 542 | CLANG_ENABLE_MODULES = YES; 543 | CLANG_ENABLE_OBJC_ARC = YES; 544 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 545 | CLANG_WARN_BOOL_CONVERSION = YES; 546 | CLANG_WARN_COMMA = YES; 547 | CLANG_WARN_CONSTANT_CONVERSION = YES; 548 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 549 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 550 | CLANG_WARN_EMPTY_BODY = YES; 551 | CLANG_WARN_ENUM_CONVERSION = YES; 552 | CLANG_WARN_INFINITE_RECURSION = YES; 553 | CLANG_WARN_INT_CONVERSION = YES; 554 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 555 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 556 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 557 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 558 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 559 | CLANG_WARN_STRICT_PROTOTYPES = YES; 560 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 561 | CLANG_WARN_UNREACHABLE_CODE = YES; 562 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 563 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 564 | COPY_PHASE_STRIP = NO; 565 | DEBUG_INFORMATION_FORMAT = dwarf; 566 | ENABLE_STRICT_OBJC_MSGSEND = YES; 567 | ENABLE_TESTABILITY = YES; 568 | GCC_C_LANGUAGE_STANDARD = gnu99; 569 | GCC_DYNAMIC_NO_PIC = NO; 570 | GCC_NO_COMMON_BLOCKS = YES; 571 | GCC_OPTIMIZATION_LEVEL = 0; 572 | GCC_PREPROCESSOR_DEFINITIONS = ( 573 | "DEBUG=1", 574 | "$(inherited)", 575 | ); 576 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 577 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 578 | GCC_WARN_UNDECLARED_SELECTOR = YES; 579 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 580 | GCC_WARN_UNUSED_FUNCTION = YES; 581 | GCC_WARN_UNUSED_VARIABLE = YES; 582 | IPHONEOS_DEPLOYMENT_TARGET = 9.3; 583 | MTL_ENABLE_DEBUG_INFO = YES; 584 | ONLY_ACTIVE_ARCH = YES; 585 | SDKROOT = iphoneos; 586 | }; 587 | name = Debug; 588 | }; 589 | 7C85FBA11D6AAAE4002374C2 /* Release */ = { 590 | isa = XCBuildConfiguration; 591 | buildSettings = { 592 | ALWAYS_SEARCH_USER_PATHS = NO; 593 | CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES; 594 | CLANG_ANALYZER_NONNULL = YES; 595 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 596 | CLANG_CXX_LIBRARY = "libc++"; 597 | CLANG_ENABLE_MODULES = YES; 598 | CLANG_ENABLE_OBJC_ARC = YES; 599 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 600 | CLANG_WARN_BOOL_CONVERSION = YES; 601 | CLANG_WARN_COMMA = YES; 602 | CLANG_WARN_CONSTANT_CONVERSION = YES; 603 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 604 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 605 | CLANG_WARN_EMPTY_BODY = YES; 606 | CLANG_WARN_ENUM_CONVERSION = YES; 607 | CLANG_WARN_INFINITE_RECURSION = YES; 608 | CLANG_WARN_INT_CONVERSION = YES; 609 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 610 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 611 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 612 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 613 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 614 | CLANG_WARN_STRICT_PROTOTYPES = YES; 615 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 616 | CLANG_WARN_UNREACHABLE_CODE = YES; 617 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 618 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 619 | COPY_PHASE_STRIP = NO; 620 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 621 | ENABLE_NS_ASSERTIONS = NO; 622 | ENABLE_STRICT_OBJC_MSGSEND = YES; 623 | GCC_C_LANGUAGE_STANDARD = gnu99; 624 | GCC_NO_COMMON_BLOCKS = YES; 625 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 626 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 627 | GCC_WARN_UNDECLARED_SELECTOR = YES; 628 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 629 | GCC_WARN_UNUSED_FUNCTION = YES; 630 | GCC_WARN_UNUSED_VARIABLE = YES; 631 | IPHONEOS_DEPLOYMENT_TARGET = 9.3; 632 | MTL_ENABLE_DEBUG_INFO = NO; 633 | SDKROOT = iphoneos; 634 | SWIFT_COMPILATION_MODE = wholemodule; 635 | VALIDATE_PRODUCT = YES; 636 | }; 637 | name = Release; 638 | }; 639 | 7C85FBA31D6AAAE4002374C2 /* Debug */ = { 640 | isa = XCBuildConfiguration; 641 | buildSettings = { 642 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 643 | CLANG_ENABLE_MODULES = YES; 644 | DEVELOPMENT_TEAM = 25AP33B4X5; 645 | FRAMEWORK_SEARCH_PATHS = ( 646 | "$(inherited)", 647 | "$(PROJECT_DIR)", 648 | ); 649 | GCC_PREFIX_HEADER = "$(SRCROOT)/$(PRODUCT_NAME)/CCCamera.pch"; 650 | GCC_WARN_ABOUT_DEPRECATED_FUNCTIONS = NO; 651 | INFOPLIST_FILE = CCCamera/Info.plist; 652 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 653 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 654 | PRODUCT_BUNDLE_IDENTIFIER = com.cyd.ios.CCCamera; 655 | PRODUCT_NAME = "$(TARGET_NAME)"; 656 | SWIFT_OBJC_BRIDGING_HEADER = "CCCamera/CCCamera-Bridging-Header.h"; 657 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 658 | SWIFT_VERSION = 4.0; 659 | TARGETED_DEVICE_FAMILY = 1; 660 | }; 661 | name = Debug; 662 | }; 663 | 7C85FBA41D6AAAE4002374C2 /* Release */ = { 664 | isa = XCBuildConfiguration; 665 | buildSettings = { 666 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 667 | CLANG_ENABLE_MODULES = YES; 668 | DEVELOPMENT_TEAM = 25AP33B4X5; 669 | FRAMEWORK_SEARCH_PATHS = ( 670 | "$(inherited)", 671 | "$(PROJECT_DIR)", 672 | ); 673 | GCC_PREFIX_HEADER = "$(SRCROOT)/$(PRODUCT_NAME)/CCCamera.pch"; 674 | GCC_WARN_ABOUT_DEPRECATED_FUNCTIONS = NO; 675 | INFOPLIST_FILE = CCCamera/Info.plist; 676 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 677 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 678 | PRODUCT_BUNDLE_IDENTIFIER = com.cyd.ios.CCCamera; 679 | PRODUCT_NAME = "$(TARGET_NAME)"; 680 | SWIFT_OBJC_BRIDGING_HEADER = "CCCamera/CCCamera-Bridging-Header.h"; 681 | SWIFT_VERSION = 4.0; 682 | TARGETED_DEVICE_FAMILY = 1; 683 | }; 684 | name = Release; 685 | }; 686 | 7C85FBA61D6AAAE4002374C2 /* Debug */ = { 687 | isa = XCBuildConfiguration; 688 | buildSettings = { 689 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 690 | BUNDLE_LOADER = "$(TEST_HOST)"; 691 | INFOPLIST_FILE = CCCameraTests/Info.plist; 692 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 693 | PRODUCT_BUNDLE_IDENTIFIER = com.cyd.ios.CCCameraTests; 694 | PRODUCT_NAME = "$(TARGET_NAME)"; 695 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/CCCamera.app/CCCamera"; 696 | }; 697 | name = Debug; 698 | }; 699 | 7C85FBA71D6AAAE4002374C2 /* Release */ = { 700 | isa = XCBuildConfiguration; 701 | buildSettings = { 702 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 703 | BUNDLE_LOADER = "$(TEST_HOST)"; 704 | INFOPLIST_FILE = CCCameraTests/Info.plist; 705 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 706 | PRODUCT_BUNDLE_IDENTIFIER = com.cyd.ios.CCCameraTests; 707 | PRODUCT_NAME = "$(TARGET_NAME)"; 708 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/CCCamera.app/CCCamera"; 709 | }; 710 | name = Release; 711 | }; 712 | 7C85FBA91D6AAAE4002374C2 /* Debug */ = { 713 | isa = XCBuildConfiguration; 714 | buildSettings = { 715 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 716 | INFOPLIST_FILE = CCCameraUITests/Info.plist; 717 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 718 | PRODUCT_BUNDLE_IDENTIFIER = com.cyd.ios.CCCameraUITests; 719 | PRODUCT_NAME = "$(TARGET_NAME)"; 720 | TEST_TARGET_NAME = CCCamera; 721 | }; 722 | name = Debug; 723 | }; 724 | 7C85FBAA1D6AAAE4002374C2 /* Release */ = { 725 | isa = XCBuildConfiguration; 726 | buildSettings = { 727 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 728 | INFOPLIST_FILE = CCCameraUITests/Info.plist; 729 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 730 | PRODUCT_BUNDLE_IDENTIFIER = com.cyd.ios.CCCameraUITests; 731 | PRODUCT_NAME = "$(TARGET_NAME)"; 732 | TEST_TARGET_NAME = CCCamera; 733 | }; 734 | name = Release; 735 | }; 736 | /* End XCBuildConfiguration section */ 737 | 738 | /* Begin XCConfigurationList section */ 739 | 7C85FB6D1D6AAAE4002374C2 /* Build configuration list for PBXProject "CCCamera" */ = { 740 | isa = XCConfigurationList; 741 | buildConfigurations = ( 742 | 7C85FBA01D6AAAE4002374C2 /* Debug */, 743 | 7C85FBA11D6AAAE4002374C2 /* Release */, 744 | ); 745 | defaultConfigurationIsVisible = 0; 746 | defaultConfigurationName = Release; 747 | }; 748 | 7C85FBA21D6AAAE4002374C2 /* Build configuration list for PBXNativeTarget "CCCamera" */ = { 749 | isa = XCConfigurationList; 750 | buildConfigurations = ( 751 | 7C85FBA31D6AAAE4002374C2 /* Debug */, 752 | 7C85FBA41D6AAAE4002374C2 /* Release */, 753 | ); 754 | defaultConfigurationIsVisible = 0; 755 | defaultConfigurationName = Release; 756 | }; 757 | 7C85FBA51D6AAAE4002374C2 /* Build configuration list for PBXNativeTarget "CCCameraTests" */ = { 758 | isa = XCConfigurationList; 759 | buildConfigurations = ( 760 | 7C85FBA61D6AAAE4002374C2 /* Debug */, 761 | 7C85FBA71D6AAAE4002374C2 /* Release */, 762 | ); 763 | defaultConfigurationIsVisible = 0; 764 | defaultConfigurationName = Release; 765 | }; 766 | 7C85FBA81D6AAAE4002374C2 /* Build configuration list for PBXNativeTarget "CCCameraUITests" */ = { 767 | isa = XCConfigurationList; 768 | buildConfigurations = ( 769 | 7C85FBA91D6AAAE4002374C2 /* Debug */, 770 | 7C85FBAA1D6AAAE4002374C2 /* Release */, 771 | ); 772 | defaultConfigurationIsVisible = 0; 773 | defaultConfigurationName = Release; 774 | }; 775 | /* End XCConfigurationList section */ 776 | 777 | /* Begin XCVersionGroup section */ 778 | 7C85FB811D6AAAE4002374C2 /* CCCamera.xcdatamodeld */ = { 779 | isa = XCVersionGroup; 780 | children = ( 781 | 7C85FB821D6AAAE4002374C2 /* CCCamera.xcdatamodel */, 782 | ); 783 | currentVersion = 7C85FB821D6AAAE4002374C2 /* CCCamera.xcdatamodel */; 784 | path = CCCamera.xcdatamodeld; 785 | sourceTree = ""; 786 | versionGroupType = wrapper.xcdatamodel; 787 | }; 788 | /* End XCVersionGroup section */ 789 | }; 790 | rootObject = 7C85FB6A1D6AAAE4002374C2 /* Project object */; 791 | } 792 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/project.xcworkspace/xcuserdata/baidaojuhe.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera.xcodeproj/project.xcworkspace/xcuserdata/baidaojuhe.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /CCCamera.xcodeproj/project.xcworkspace/xcuserdata/cyd.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera.xcodeproj/project.xcworkspace/xcuserdata/cyd.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /CCCamera.xcodeproj/project.xcworkspace/xcuserdata/wsk.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera.xcodeproj/project.xcworkspace/xcuserdata/wsk.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /CCCamera.xcodeproj/xcuserdata/baidaojuhe.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/xcuserdata/baidaojuhe.xcuserdatad/xcschemes/CCCamera.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 33 | 39 | 40 | 41 | 43 | 49 | 50 | 51 | 52 | 53 | 59 | 60 | 61 | 62 | 63 | 64 | 74 | 76 | 82 | 83 | 84 | 85 | 86 | 87 | 93 | 95 | 101 | 102 | 103 | 104 | 106 | 107 | 110 | 111 | 112 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/xcuserdata/baidaojuhe.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | CCCamera.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 7C85FB711D6AAAE4002374C2 16 | 17 | primary 18 | 19 | 20 | 7C85FB8D1D6AAAE4002374C2 21 | 22 | primary 23 | 24 | 25 | 7C85FB981D6AAAE4002374C2 26 | 27 | primary 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/xcuserdata/cyd.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/xcuserdata/cyd.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | CCCamera.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | CCCamera.xcscheme_^#shared#^_ 13 | 14 | orderHint 15 | 0 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/xcuserdata/wsk.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/xcuserdata/wsk.xcuserdatad/xcschemes/CCCamera.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 33 | 39 | 40 | 41 | 43 | 49 | 50 | 51 | 52 | 53 | 59 | 60 | 61 | 62 | 63 | 64 | 74 | 76 | 82 | 83 | 84 | 85 | 86 | 87 | 93 | 95 | 101 | 102 | 103 | 104 | 106 | 107 | 110 | 111 | 112 | -------------------------------------------------------------------------------- /CCCamera.xcodeproj/xcuserdata/wsk.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | CCCamera.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 7C85FB711D6AAAE4002374C2 16 | 17 | primary 18 | 19 | 20 | 7C85FB8D1D6AAAE4002374C2 21 | 22 | primary 23 | 24 | 25 | 7C85FB981D6AAAE4002374C2 26 | 27 | primary 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /CCCamera/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface AppDelegate : UIResponder 13 | 14 | @property (strong, nonatomic) UIWindow *window; 15 | 16 | @property (readonly, strong, nonatomic) NSManagedObjectContext *managedObjectContext; 17 | @property (readonly, strong, nonatomic) NSManagedObjectModel *managedObjectModel; 18 | @property (readonly, strong, nonatomic) NSPersistentStoreCoordinator *persistentStoreCoordinator; 19 | 20 | - (void)saveContext; 21 | - (NSURL *)applicationDocumentsDirectory; 22 | 23 | 24 | @end 25 | 26 | -------------------------------------------------------------------------------- /CCCamera/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | #import "ViewController.h" 11 | #import "CCBaseNavigationController.h" 12 | 13 | @interface AppDelegate () 14 | 15 | @end 16 | 17 | @implementation AppDelegate 18 | 19 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 20 | ViewController *vc = [[ViewController alloc]init]; 21 | CCBaseNavigationController *nav = [[CCBaseNavigationController alloc]initWithRootViewController:vc]; 22 | self.window.rootViewController = nav; 23 | [self.window makeKeyAndVisible]; 24 | return YES; 25 | } 26 | 27 | - (void)applicationWillResignActive:(UIApplication *)application { 28 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 29 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 30 | } 31 | 32 | - (void)applicationDidEnterBackground:(UIApplication *)application { 33 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 34 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 35 | } 36 | 37 | - (void)applicationWillEnterForeground:(UIApplication *)application { 38 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 39 | } 40 | 41 | - (void)applicationDidBecomeActive:(UIApplication *)application { 42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 43 | } 44 | 45 | - (void)applicationWillTerminate:(UIApplication *)application { 46 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 47 | // Saves changes in the application's managed object context before the application terminates. 48 | [self saveContext]; 49 | } 50 | 51 | #pragma mark - Core Data stack 52 | 53 | @synthesize managedObjectContext = _managedObjectContext; 54 | @synthesize managedObjectModel = _managedObjectModel; 55 | @synthesize persistentStoreCoordinator = _persistentStoreCoordinator; 56 | 57 | - (NSURL *)applicationDocumentsDirectory { 58 | // The directory the application uses to store the Core Data store file. This code uses a directory named "com.cyd.ios.CCCamera" in the application's documents directory. 59 | return [[[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask] lastObject]; 60 | } 61 | 62 | - (NSManagedObjectModel *)managedObjectModel { 63 | // The managed object model for the application. It is a fatal error for the application not to be able to find and load its model. 64 | if (_managedObjectModel != nil) { 65 | return _managedObjectModel; 66 | } 67 | NSURL *modelURL = [[NSBundle mainBundle] URLForResource:@"CCCamera" withExtension:@"momd"]; 68 | _managedObjectModel = [[NSManagedObjectModel alloc] initWithContentsOfURL:modelURL]; 69 | return _managedObjectModel; 70 | } 71 | 72 | - (NSPersistentStoreCoordinator *)persistentStoreCoordinator { 73 | // The persistent store coordinator for the application. This implementation creates and returns a coordinator, having added the store for the application to it. 74 | if (_persistentStoreCoordinator != nil) { 75 | return _persistentStoreCoordinator; 76 | } 77 | 78 | // Create the coordinator and store 79 | 80 | _persistentStoreCoordinator = [[NSPersistentStoreCoordinator alloc] initWithManagedObjectModel:[self managedObjectModel]]; 81 | NSURL *storeURL = [[self applicationDocumentsDirectory] URLByAppendingPathComponent:@"CCCamera.sqlite"]; 82 | NSError *error = nil; 83 | NSString *failureReason = @"There was an error creating or loading the application's saved data."; 84 | if (![_persistentStoreCoordinator addPersistentStoreWithType:NSSQLiteStoreType configuration:nil URL:storeURL options:nil error:&error]) { 85 | // Report any error we got. 86 | NSMutableDictionary *dict = [NSMutableDictionary dictionary]; 87 | dict[NSLocalizedDescriptionKey] = @"Failed to initialize the application's saved data"; 88 | dict[NSLocalizedFailureReasonErrorKey] = failureReason; 89 | dict[NSUnderlyingErrorKey] = error; 90 | error = [NSError errorWithDomain:@"YOUR_ERROR_DOMAIN" code:9999 userInfo:dict]; 91 | // Replace this with code to handle the error appropriately. 92 | // abort() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development. 93 | NSLog(@"Unresolved error %@, %@", error, [error userInfo]); 94 | abort(); 95 | } 96 | 97 | return _persistentStoreCoordinator; 98 | } 99 | 100 | 101 | - (NSManagedObjectContext *)managedObjectContext { 102 | // Returns the managed object context for the application (which is already bound to the persistent store coordinator for the application.) 103 | if (_managedObjectContext != nil) { 104 | return _managedObjectContext; 105 | } 106 | 107 | NSPersistentStoreCoordinator *coordinator = [self persistentStoreCoordinator]; 108 | if (!coordinator) { 109 | return nil; 110 | } 111 | _managedObjectContext = [[NSManagedObjectContext alloc] initWithConcurrencyType:NSMainQueueConcurrencyType]; 112 | [_managedObjectContext setPersistentStoreCoordinator:coordinator]; 113 | return _managedObjectContext; 114 | } 115 | 116 | #pragma mark - Core Data Saving support 117 | 118 | - (void)saveContext { 119 | NSManagedObjectContext *managedObjectContext = self.managedObjectContext; 120 | if (managedObjectContext != nil) { 121 | NSError *error = nil; 122 | if ([managedObjectContext hasChanges] && ![managedObjectContext save:&error]) { 123 | // Replace this implementation with code to handle the error appropriately. 124 | // abort() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development. 125 | NSLog(@"Unresolved error %@, %@", error, [error userInfo]); 126 | abort(); 127 | } 128 | } 129 | } 130 | 131 | @end 132 | -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "size" : "20x20", 5 | "idiom" : "iphone", 6 | "filename" : "Icon-40.png", 7 | "scale" : "2x" 8 | }, 9 | { 10 | "size" : "20x20", 11 | "idiom" : "iphone", 12 | "filename" : "Icon-60.png", 13 | "scale" : "3x" 14 | }, 15 | { 16 | "size" : "29x29", 17 | "idiom" : "iphone", 18 | "filename" : "Icon-58.png", 19 | "scale" : "2x" 20 | }, 21 | { 22 | "size" : "29x29", 23 | "idiom" : "iphone", 24 | "filename" : "Icon-87.png", 25 | "scale" : "3x" 26 | }, 27 | { 28 | "size" : "40x40", 29 | "idiom" : "iphone", 30 | "filename" : "Icon-80.png", 31 | "scale" : "2x" 32 | }, 33 | { 34 | "size" : "40x40", 35 | "idiom" : "iphone", 36 | "filename" : "Icon-120.png", 37 | "scale" : "3x" 38 | }, 39 | { 40 | "size" : "60x60", 41 | "idiom" : "iphone", 42 | "filename" : "Icon-121.png", 43 | "scale" : "2x" 44 | }, 45 | { 46 | "size" : "60x60", 47 | "idiom" : "iphone", 48 | "filename" : "Icon-180.png", 49 | "scale" : "3x" 50 | }, 51 | { 52 | "size" : "1024x1024", 53 | "idiom" : "ios-marketing", 54 | "filename" : "IMG_0772.png", 55 | "scale" : "1x" 56 | } 57 | ], 58 | "info" : { 59 | "version" : 1, 60 | "author" : "xcode" 61 | } 62 | } -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/IMG_0772.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/IMG_0772.png -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-120.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-120.png -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-121.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-121.png -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-180.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-180.png -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-40.png -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-58.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-58.png -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-60.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-60.png -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-80.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-80.png -------------------------------------------------------------------------------- /CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-87.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cdcyd/CCCamera/846b57d972a938bc559cc86db959c39edde5c956/CCCamera/Assets.xcassets/AppIcon.appiconset/Icon-87.png -------------------------------------------------------------------------------- /CCCamera/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /CCCamera/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /CCCamera/CCBaseNavigationController.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCBaseNavigationController.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface CCBaseNavigationController : UINavigationController 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /CCCamera/CCBaseNavigationController.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCBaseNavigationController.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCBaseNavigationController.h" 10 | 11 | @interface CCBaseNavigationController () 12 | 13 | @end 14 | 15 | @implementation CCBaseNavigationController 16 | 17 | - (void)viewDidLoad { 18 | [super viewDidLoad]; 19 | self.navigationBar.translucent = NO; 20 | } 21 | 22 | - (void)didReceiveMemoryWarning { 23 | [super didReceiveMemoryWarning]; 24 | // Dispose of any resources that can be recreated. 25 | } 26 | 27 | @end 28 | -------------------------------------------------------------------------------- /CCCamera/CCBaseViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCBaseViewController.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface CCBaseViewController : UIViewController 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /CCCamera/CCBaseViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCBaseViewController.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCBaseViewController.h" 10 | 11 | @interface CCBaseViewController () 12 | 13 | @end 14 | 15 | @implementation CCBaseViewController 16 | 17 | - (void)viewDidLoad { 18 | [super viewDidLoad]; 19 | self.edgesForExtendedLayout = UIRectEdgeNone; 20 | self.view.backgroundColor = UIColor(0xebf5ff, 1); 21 | } 22 | 23 | - (void)dealloc { 24 | NSLog(@"dealloc: %@", self); 25 | } 26 | 27 | - (void)didReceiveMemoryWarning { 28 | [super didReceiveMemoryWarning]; 29 | // Dispose of any resources that can be recreated. 30 | } 31 | 32 | @end 33 | -------------------------------------------------------------------------------- /CCCamera/CCCamera-Bridging-Header.h: -------------------------------------------------------------------------------- 1 | // 2 | // Use this file to import your target's public headers that you would like to expose to Swift. 3 | // 4 | 5 | -------------------------------------------------------------------------------- /CCCamera/CCCamera.pch: -------------------------------------------------------------------------------- 1 | // 2 | // CCCamera.pch 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #ifndef CCCamera_pch 10 | #define CCCamera_pch 11 | 12 | @import UIKit; 13 | @import Foundation; 14 | 15 | #import "UIView+CCHUD.h" 16 | #import "UIView+CCAdditions.h" 17 | 18 | #ifdef DEBUG 19 | #define NSLog(fmt, ...) NSLog((@"function:%s [Line:%d] " fmt), __PRETTY_FUNCTION__, __LINE__, ##__VA_ARGS__); 20 | #else 21 | #define NSLog(...) 22 | #endif 23 | 24 | #ifndef weakify 25 | #if __has_feature(objc_arc) 26 | #define weakify( x ) \ 27 | _Pragma("clang diagnostic push") \ 28 | _Pragma("clang diagnostic ignored \"-Wshadow\"") \ 29 | autoreleasepool{} __weak __typeof__(x) __weak_##x##__ = x; \ 30 | _Pragma("clang diagnostic pop") 31 | #else 32 | #define weakify ( x ) \ 33 | _Pragma("chang diagnostic push") \ 34 | _Pragma("chang diagnostic ignored \"-Wshadow\"") \ 35 | autoreleasepool{} __block __typeof__(x) __block_##x##__ = x; \ 36 | _Pragma("chang diagnostic pop") 37 | #endif 38 | #endif 39 | 40 | #ifndef strongify 41 | #if __has_feature(objc_arc) 42 | #define strongify( x ) \ 43 | _Pragma("chang diagnostic push") \ 44 | _Pragma("chang diagnostic ignored \"-Wshadow\"") \ 45 | try{} @finally{} __typeof__(x) x = __weak_##x##__; \ 46 | _Pragma("chang diagnostic pop") 47 | #else 48 | #define strongify( x ) \ 49 | _Pragma("chang diagnostic push") \ 50 | _Pragma("chang diagnostic ignored \"-Wshadow\"") \ 51 | try{} @finally{} __typeof__(x) x = __block_##x##__; \ 52 | _Pragma("chang diagnostic pop") 53 | #endif 54 | #endif 55 | 56 | // 颜色 57 | #define UIColor(rgbValue, alphaValue) [UIColor colorWithRed:((float)((rgbValue & 0xFF0000) >> 16))/255.0 \ 58 | green:((float)((rgbValue & 0x00FF00) >> 8))/255.0 \ 59 | blue:((float)(rgbValue & 0x0000FF)) / 255.0 \ 60 | alpha:alphaValue] 61 | 62 | // 屏幕 宽度、高度 63 | #define CD_SCREEN_WIDTH ([UIScreen mainScreen].bounds.size.width) 64 | #define CD_SCREEN_HEIGHT ([UIScreen mainScreen].bounds.size.height) 65 | 66 | #endif /* CCCamera_pch */ 67 | -------------------------------------------------------------------------------- /CCCamera/CCCamera.xcdatamodeld/.xccurrentversion: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | _XCCurrentVersionName 6 | CCCamera.xcdatamodel 7 | 8 | 9 | -------------------------------------------------------------------------------- /CCCamera/CCCamera.xcdatamodeld/CCCamera.xcdatamodel/contents: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /CCCamera/CCCameraManager.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCCameraManager.h 3 | // CCCamera 4 | // 5 | // Created by cyd on 2018/8/13. 6 | // Copyright © 2018 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | NS_ASSUME_NONNULL_BEGIN 13 | 14 | @interface CCCameraManager : NSObject 15 | 16 | - (AVCaptureDeviceInput *)switchCamera:(AVCaptureSession *)session 17 | old:(AVCaptureDeviceInput *)oldinput 18 | new:(AVCaptureDeviceInput *)newinput; 19 | 20 | - (id)resetFocusAndExposure:(AVCaptureDevice *)device; 21 | 22 | - (id)zoom:(AVCaptureDevice *)device factor:(CGFloat)factor; 23 | 24 | - (id)focus:(AVCaptureDevice *)device point:(CGPoint)point; 25 | 26 | - (id)expose:(AVCaptureDevice *)device point:(CGPoint)point; 27 | 28 | - (id)changeFlash:(AVCaptureDevice *)device mode:(AVCaptureFlashMode)mode; 29 | 30 | - (id)changeTorch:(AVCaptureDevice *)device model:(AVCaptureTorchMode)mode; 31 | 32 | - (AVCaptureFlashMode)flashMode:(AVCaptureDevice *)device; 33 | 34 | - (AVCaptureTorchMode)torchMode:(AVCaptureDevice *)device; 35 | 36 | @end 37 | 38 | NS_ASSUME_NONNULL_END 39 | -------------------------------------------------------------------------------- /CCCamera/CCCameraManager.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCCameraManager.m 3 | // CCCamera 4 | // 5 | // Created by cyd on 2018/8/13. 6 | // Copyright © 2018 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCCameraManager.h" 10 | 11 | @implementation CCCameraManager 12 | 13 | #pragma mark - -转换摄像头 14 | - (AVCaptureDeviceInput *)switchCamera:(AVCaptureSession *)session old:(AVCaptureDeviceInput *)oldinput new:(AVCaptureDeviceInput *)newinput { 15 | [session beginConfiguration]; 16 | [session removeInput:oldinput]; 17 | if ([session canAddInput:newinput]) { 18 | [session addInput:newinput]; 19 | [session commitConfiguration]; 20 | return newinput; 21 | } else { 22 | [session addInput:oldinput]; 23 | [session commitConfiguration]; 24 | return oldinput; 25 | } 26 | } 27 | 28 | #pragma mark - -缩放 29 | - (id)zoom:(AVCaptureDevice *)device factor:(CGFloat)factor { 30 | if (device.activeFormat.videoMaxZoomFactor > factor && factor >= 1.0) { 31 | NSError *error; 32 | if ([device lockForConfiguration:&error]) { 33 | [device rampToVideoZoomFactor:factor withRate:4.0]; 34 | [device unlockForConfiguration]; 35 | } 36 | return error; 37 | } 38 | return [self error:@"不支持的缩放倍数" code:2000]; 39 | } 40 | 41 | #pragma mark - -聚焦 42 | - (id)focus:(AVCaptureDevice *)device point:(CGPoint)point{ 43 | BOOL supported = [device isFocusPointOfInterestSupported] && 44 | [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]; 45 | if (supported){ 46 | NSError *error; 47 | if ([device lockForConfiguration:&error]) { 48 | device.focusPointOfInterest = point; 49 | device.focusMode = AVCaptureFocusModeAutoFocus; 50 | [device unlockForConfiguration]; 51 | } 52 | return error; 53 | } 54 | return [self error:@"设备不支持对焦" code:2001]; 55 | } 56 | 57 | #pragma mark - -曝光 58 | static const NSString *CameraAdjustingExposureContext; 59 | - (id)expose:(AVCaptureDevice *)device point:(CGPoint)point{ 60 | BOOL supported = [device isExposurePointOfInterestSupported] && 61 | [device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]; 62 | if (supported) { 63 | NSError *error; 64 | if ([device lockForConfiguration:&error]) { 65 | device.exposurePointOfInterest = point; 66 | device.exposureMode = AVCaptureExposureModeContinuousAutoExposure; 67 | if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) { 68 | [device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:&CameraAdjustingExposureContext]; 69 | } 70 | [device unlockForConfiguration]; 71 | } 72 | return error; 73 | } 74 | return [self error:@"设备不支持曝光" code:2002]; 75 | } 76 | 77 | - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{ 78 | if (context == &CameraAdjustingExposureContext) { 79 | AVCaptureDevice *device = (AVCaptureDevice *)object; 80 | if (!device.isAdjustingExposure && [device isExposureModeSupported:AVCaptureExposureModeLocked]) { 81 | [object removeObserver:self forKeyPath:@"adjustingExposure" context:&CameraAdjustingExposureContext]; 82 | dispatch_async(dispatch_get_main_queue(), ^{ 83 | NSError *error; 84 | if ([device lockForConfiguration:&error]) { 85 | device.exposureMode = AVCaptureExposureModeLocked; 86 | [device unlockForConfiguration]; 87 | } else { 88 | NSLog(@"%@", error); 89 | } 90 | }); 91 | } 92 | } else { 93 | [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; 94 | } 95 | } 96 | 97 | #pragma mark - -自动聚焦、曝光 98 | - (id)resetFocusAndExposure:(AVCaptureDevice *)device { 99 | AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus; 100 | AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure; 101 | BOOL canResetFocus = [device isFocusPointOfInterestSupported] && 102 | [device isFocusModeSupported:focusMode]; 103 | BOOL canResetExposure = [device isExposurePointOfInterestSupported] && 104 | [device isExposureModeSupported:exposureMode]; 105 | CGPoint centerPoint = CGPointMake(0.5f, 0.5f); 106 | NSError *error; 107 | if ([device lockForConfiguration:&error]) { 108 | if (canResetFocus) { 109 | device.focusMode = focusMode; 110 | device.focusPointOfInterest = centerPoint; 111 | } 112 | if (canResetExposure) { 113 | device.exposureMode = exposureMode; 114 | device.exposurePointOfInterest = centerPoint; 115 | } 116 | [device unlockForConfiguration]; 117 | } 118 | return error; 119 | } 120 | 121 | #pragma mark - -闪光灯 122 | - (AVCaptureFlashMode)flashMode:(AVCaptureDevice *)device{ 123 | return [device flashMode]; 124 | } 125 | 126 | - (id)changeFlash:(AVCaptureDevice *)device mode:(AVCaptureFlashMode)mode{ 127 | if (![device hasFlash]) { 128 | return [self error:@"不支持闪光灯" code:2003]; 129 | } 130 | if ([self torchMode:device] == AVCaptureTorchModeOn) { 131 | [self setTorch:device model:AVCaptureTorchModeOff]; 132 | } 133 | return [self setFlash:device mode:mode]; 134 | } 135 | 136 | - (id)setFlash:(AVCaptureDevice *)device mode:(AVCaptureFlashMode)mode { 137 | if ([device isFlashModeSupported:mode]) { 138 | NSError *error; 139 | if ([device lockForConfiguration:&error]) { 140 | device.flashMode = mode; 141 | [device unlockForConfiguration]; 142 | } 143 | return error; 144 | } 145 | return [self error:@"不支持闪光灯" code:2003]; 146 | } 147 | 148 | #pragma mark - -手电筒 149 | - (AVCaptureTorchMode)torchMode:(AVCaptureDevice *)device { 150 | return [device torchMode]; 151 | } 152 | 153 | - (id)changeTorch:(AVCaptureDevice *)device model:(AVCaptureTorchMode)mode{ 154 | if (![device hasTorch]) { 155 | return [self error:@"不支持手电筒" code:2004]; 156 | } 157 | if ([self flashMode:device] == AVCaptureFlashModeOn) { 158 | [self setFlash:device mode:AVCaptureFlashModeOff]; 159 | } 160 | return [self setTorch:device model:mode]; 161 | } 162 | 163 | - (id)setTorch:(AVCaptureDevice *)device model:(AVCaptureTorchMode)mode { 164 | if ([device isTorchModeSupported:mode]) { 165 | NSError *error; 166 | if ([device lockForConfiguration:&error]) { 167 | device.torchMode = mode; 168 | [device unlockForConfiguration]; 169 | } 170 | return error; 171 | } 172 | return [self error:@"不支持手电筒" code:2004]; 173 | } 174 | 175 | #pragma mark - 176 | - (NSError *)error:(NSString *)text code:(NSInteger)code { 177 | NSDictionary *desc = @{NSLocalizedDescriptionKey: text}; 178 | NSError *error = [NSError errorWithDomain:@"com.cc.camera" code:code userInfo:desc]; 179 | return error; 180 | } 181 | 182 | @end 183 | -------------------------------------------------------------------------------- /CCCamera/CCCameraView.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCCameraView.h 3 | // CCCamera 4 | // 5 | // Created by 佰道聚合 on 2017/7/5. 6 | // Copyright © 2017年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "CCVideoPreview.h" 11 | 12 | @class CCCameraView; 13 | @protocol CCCameraViewDelegate 14 | @optional; 15 | 16 | /// 闪光灯 17 | -(void)flashLightAction:(CCCameraView *)cameraView handle:(void(^)(NSError *error))handle; 18 | /// 补光 19 | -(void)torchLightAction:(CCCameraView *)cameraView handle:(void(^)(NSError *error))handle; 20 | /// 转换摄像头 21 | -(void)swicthCameraAction:(CCCameraView *)cameraView handle:(void(^)(NSError *error))handle; 22 | /// 自动聚焦曝光 23 | -(void)autoFocusAndExposureAction:(CCCameraView *)cameraView handle:(void(^)(NSError *error))handle; 24 | /// 聚焦 25 | -(void)focusAction:(CCCameraView *)cameraView point:(CGPoint)point handle:(void(^)(NSError *error))handle; 26 | /// 曝光 27 | -(void)exposAction:(CCCameraView *)cameraView point:(CGPoint)point handle:(void(^)(NSError *error))handle; 28 | /// 缩放 29 | -(void)zoomAction:(CCCameraView *)cameraView factor:(CGFloat)factor; 30 | 31 | /// 取消 32 | -(void)cancelAction:(CCCameraView *)cameraView; 33 | /// 拍照 34 | -(void)takePhotoAction:(CCCameraView *)cameraView; 35 | /// 停止录制视频 36 | -(void)stopRecordVideoAction:(CCCameraView *)cameraView; 37 | /// 开始录制视频 38 | -(void)startRecordVideoAction:(CCCameraView *)cameraView; 39 | /// 改变拍摄类型 1:拍照 2:视频 40 | -(void)didChangeTypeAction:(CCCameraView *)cameraView type:(NSInteger)type; 41 | 42 | @end 43 | 44 | @interface CCCameraView : UIView 45 | 46 | @property(nonatomic, weak) id delegate; 47 | 48 | @property(nonatomic, strong, readonly) CCVideoPreview *previewView; 49 | 50 | @property(nonatomic, assign, readonly) NSInteger type; // 1:拍照 2:视频 51 | 52 | -(void)changeTorch:(BOOL)on; 53 | 54 | -(void)changeFlash:(BOOL)on; 55 | 56 | @end 57 | -------------------------------------------------------------------------------- /CCCamera/CCCameraView.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCCameraView.m 3 | // CCCamera 4 | // 5 | // Created by 佰道聚合 on 2017/7/5. 6 | // Copyright © 2017年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCCameraView.h" 10 | #import "UIView+CCHUD.h" 11 | 12 | @interface CCCameraView() 13 | 14 | @property(nonatomic, assign) NSInteger type; // 1:拍照 2:视频 15 | @property(nonatomic, strong) CCVideoPreview *previewView; 16 | @property(nonatomic, strong) UIView *topView; // 上面的bar 17 | @property(nonatomic, strong) UIView *bottomView; // 下面的bar 18 | @property(nonatomic, strong) UIView *focusView; // 聚焦动画view 19 | @property(nonatomic, strong) UIView *exposureView; // 曝光动画view 20 | 21 | @property(nonatomic, strong) UISlider *slider; 22 | @property(nonatomic, strong) UIButton *torchBtn; 23 | @property(nonatomic, strong) UIButton *flashBtn; 24 | @property(nonatomic, strong) UIButton *photoBtn; 25 | 26 | @end 27 | 28 | @implementation CCCameraView 29 | 30 | -(instancetype)initWithFrame:(CGRect)frame 31 | { 32 | NSAssert(frame.size.height>164 || frame.size.width>374, @"相机视图的高不小于164,宽不小于375"); 33 | self = [super initWithFrame:frame]; 34 | if (self) { 35 | _type = 1; 36 | [self setupUI]; 37 | } 38 | return self; 39 | } 40 | 41 | -(UIView *)topView{ 42 | if (_topView == nil) { 43 | _topView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, self.width, 64)]; 44 | _topView.backgroundColor = [UIColor blackColor]; 45 | } 46 | return _topView; 47 | } 48 | 49 | -(UIView *)bottomView{ 50 | if (_bottomView == nil) { 51 | _bottomView = [[UIView alloc]initWithFrame:CGRectMake(0, self.height-100, self.width, 100)]; 52 | _bottomView.backgroundColor = [UIColor blackColor]; 53 | } 54 | return _bottomView; 55 | } 56 | 57 | -(UIView *)focusView{ 58 | if (_focusView == nil) { 59 | _focusView = [[UIView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 150, 150.0f)]; 60 | _focusView.backgroundColor = [UIColor clearColor]; 61 | _focusView.layer.borderColor = [UIColor blueColor].CGColor; 62 | _focusView.layer.borderWidth = 5.0f; 63 | _focusView.hidden = YES; 64 | } 65 | return _focusView; 66 | } 67 | 68 | -(UIView *)exposureView{ 69 | if (_exposureView == nil) { 70 | _exposureView = [[UIView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 150, 150.0f)]; 71 | _exposureView.backgroundColor = [UIColor clearColor]; 72 | _exposureView.layer.borderColor = [UIColor purpleColor].CGColor; 73 | _exposureView.layer.borderWidth = 5.0f; 74 | _exposureView.hidden = YES; 75 | } 76 | return _exposureView; 77 | } 78 | 79 | -(UISlider *)slider{ 80 | if (_slider == nil) { 81 | _slider = [[UISlider alloc] init]; 82 | _slider.minimumValue = 0; 83 | _slider.maximumValue = 1; 84 | _slider.maximumTrackTintColor = [UIColor whiteColor]; 85 | _slider.minimumTrackTintColor = [UIColor whiteColor]; 86 | _slider.alpha = 0.0; 87 | } 88 | return _slider; 89 | } 90 | 91 | -(void)setupUI{ 92 | self.previewView = [[CCVideoPreview alloc]initWithFrame:CGRectMake(0, 64, self.width, self.height-64-100)]; 93 | [self addSubview:self.previewView]; 94 | [self addSubview:self.topView]; 95 | [self addSubview:self.bottomView]; 96 | [self.previewView addSubview:self.focusView]; 97 | [self.previewView addSubview:self.exposureView]; 98 | [self.previewView addSubview:self.slider]; 99 | 100 | // ----------------------- 手势 101 | // 点击-->聚焦 双击-->曝光 102 | UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapAction:)]; 103 | UITapGestureRecognizer *doubleTap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(doubleTapAction:)]; 104 | doubleTap.numberOfTapsRequired = 2; 105 | [self.previewView addGestureRecognizer:tap]; 106 | [self.previewView addGestureRecognizer:doubleTap]; 107 | [tap requireGestureRecognizerToFail:doubleTap]; 108 | 109 | // 捏合-->缩放 110 | UIPinchGestureRecognizer *pinch = [[UIPinchGestureRecognizer alloc] initWithTarget:self action: @selector(pinchAction:)]; 111 | [self.previewView addGestureRecognizer:pinch]; 112 | 113 | // ----------------------- UI 114 | // 缩放 115 | self.slider.transform = CGAffineTransformMakeRotation(M_PI_2); 116 | self.slider.frame = CGRectMake(CD_SCREEN_WIDTH-50, 50, 15, 200); 117 | 118 | // 拍照 119 | UIButton *photoButton = [UIButton buttonWithType:UIButtonTypeCustom]; 120 | [photoButton setTitle:@"拍照" forState:UIControlStateNormal]; 121 | [photoButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal]; 122 | [photoButton addTarget:self action:@selector(takePicture:) forControlEvents:UIControlEventTouchUpInside]; 123 | [photoButton sizeToFit]; 124 | photoButton.center = CGPointMake(_bottomView.centerX-20, _bottomView.height/2); 125 | [self.bottomView addSubview:photoButton]; 126 | _photoBtn = photoButton; 127 | 128 | // 取消 129 | UIButton *cancelButton = [UIButton buttonWithType:UIButtonTypeCustom]; 130 | [cancelButton setTitle:@"取消" forState:UIControlStateNormal]; 131 | [cancelButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal]; 132 | [cancelButton addTarget:self action:@selector(cancel:) forControlEvents:UIControlEventTouchUpInside]; 133 | [cancelButton sizeToFit]; 134 | cancelButton.center = CGPointMake(40, _bottomView.height/2); 135 | [self.bottomView addSubview:cancelButton]; 136 | 137 | // 照片类型 138 | UIButton *typeButton = [UIButton buttonWithType:UIButtonTypeCustom]; 139 | [typeButton setTitle:@"照片" forState:UIControlStateNormal]; 140 | [typeButton setTitle:@"视频" forState:UIControlStateSelected]; 141 | [typeButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal]; 142 | [typeButton addTarget:self action:@selector(changeType:) forControlEvents:UIControlEventTouchUpInside]; 143 | [typeButton sizeToFit]; 144 | typeButton.center = CGPointMake(_bottomView.width-60, _bottomView.height/2); 145 | [self.bottomView addSubview:typeButton]; 146 | 147 | // 转换前后摄像头 148 | UIButton *switchCameraButton = [UIButton buttonWithType:UIButtonTypeCustom]; 149 | [switchCameraButton setTitle:@"转换摄像头" forState:UIControlStateNormal]; 150 | [switchCameraButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal]; 151 | [switchCameraButton addTarget:self action:@selector(switchCameraClick:) forControlEvents:UIControlEventTouchUpInside]; 152 | [switchCameraButton sizeToFit]; 153 | switchCameraButton.center = CGPointMake(switchCameraButton.width/2+10, _topView.height/2); 154 | [self.topView addSubview:switchCameraButton]; 155 | 156 | // 补光 157 | UIButton *lightButton = [UIButton buttonWithType:UIButtonTypeCustom]; 158 | [lightButton setTitle:@"补光" forState:UIControlStateNormal]; 159 | [lightButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal]; 160 | [lightButton setTitleColor:[UIColor blueColor] forState:UIControlStateSelected]; 161 | [lightButton addTarget:self action:@selector(torchClick:) forControlEvents:UIControlEventTouchUpInside]; 162 | [lightButton sizeToFit]; 163 | lightButton.center = CGPointMake(lightButton.width/2 + switchCameraButton.right+10, _topView.height/2); 164 | [self.topView addSubview:lightButton]; 165 | _torchBtn = lightButton; 166 | 167 | // 闪光灯 168 | UIButton *flashButton = [UIButton buttonWithType:UIButtonTypeCustom]; 169 | [flashButton setTitle:@"闪光灯" forState:UIControlStateNormal]; 170 | [flashButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal]; 171 | [flashButton setTitleColor:[UIColor blueColor] forState:UIControlStateSelected]; 172 | [flashButton addTarget:self action:@selector(flashClick:) forControlEvents:UIControlEventTouchUpInside]; 173 | [flashButton sizeToFit]; 174 | flashButton.center = CGPointMake(flashButton.width/2 + lightButton.right+10, _topView.height/2); 175 | [self.topView addSubview:flashButton]; 176 | _flashBtn = flashButton; 177 | 178 | // 重置对焦、曝光 179 | UIButton *focusAndExposureButton = [UIButton buttonWithType:UIButtonTypeCustom]; 180 | [focusAndExposureButton setTitle:@"自动聚焦/曝光" forState:UIControlStateNormal]; 181 | [focusAndExposureButton setTitleColor:[UIColor whiteColor] forState:UIControlStateNormal]; 182 | [focusAndExposureButton addTarget:self action:@selector(focusAndExposureClick:) forControlEvents:UIControlEventTouchUpInside]; 183 | [focusAndExposureButton sizeToFit]; 184 | focusAndExposureButton.center = CGPointMake(focusAndExposureButton.width/2 + flashButton.right+10, _topView.height/2); 185 | [self.topView addSubview:focusAndExposureButton]; 186 | } 187 | 188 | -(void)changeTorch:(BOOL)on{ 189 | _torchBtn.selected = on; 190 | } 191 | 192 | -(void)changeFlash:(BOOL)on{ 193 | _flashBtn.selected = on; 194 | } 195 | 196 | -(void)pinchAction:(UIPinchGestureRecognizer *)pinch { 197 | if ([_delegate respondsToSelector:@selector(zoomAction:factor:)]) { 198 | if (pinch.state == UIGestureRecognizerStateBegan) { 199 | [UIView animateWithDuration:0.1 animations:^{ 200 | self->_slider.alpha = 1; 201 | }]; 202 | } else if (pinch.state == UIGestureRecognizerStateChanged) { 203 | if (pinch.velocity > 0) { 204 | _slider.value += pinch.velocity/100; 205 | } else { 206 | _slider.value += pinch.velocity/20; 207 | } 208 | [_delegate zoomAction:self factor: powf(5, _slider.value)]; 209 | } else { 210 | [UIView animateWithDuration:0.1 animations:^{ 211 | self->_slider.alpha = 0.0; 212 | }]; 213 | } 214 | } 215 | } 216 | 217 | // 聚焦 218 | -(void)tapAction:(UIGestureRecognizer *)tap { 219 | if ([_delegate respondsToSelector:@selector(focusAction:point:handle:)]) { 220 | CGPoint point = [tap locationInView:self.previewView]; 221 | [self runFocusAnimation:self.focusView point:point]; 222 | [_delegate focusAction:self point:[self.previewView captureDevicePointForPoint:point] handle:^(NSError *error) { 223 | if (error) [self showError:error]; 224 | }]; 225 | } 226 | } 227 | 228 | // 曝光 229 | -(void)doubleTapAction:(UIGestureRecognizer *)tap { 230 | if ([_delegate respondsToSelector:@selector(exposAction:point:handle:)]) { 231 | CGPoint point = [tap locationInView:self.previewView]; 232 | [self runFocusAnimation:self.exposureView point:point]; 233 | [_delegate exposAction:self point:[self.previewView captureDevicePointForPoint:point] handle:^(NSError *error) { 234 | if (error) [self showError:error]; 235 | }]; 236 | } 237 | } 238 | 239 | // 自动聚焦和曝光 240 | -(void)focusAndExposureClick:(UIButton *)button { 241 | if ([_delegate respondsToSelector:@selector(autoFocusAndExposureAction:handle:)]) { 242 | [self runResetAnimation]; 243 | [_delegate autoFocusAndExposureAction:self handle:^(NSError *error) { 244 | if (error) [self showError:error]; 245 | }]; 246 | } 247 | } 248 | 249 | // 拍照、视频 250 | -(void)takePicture:(UIButton *)btn { 251 | if (self.type == 1) { 252 | if ([_delegate respondsToSelector:@selector(takePhotoAction:)]) { 253 | [_delegate takePhotoAction:self]; 254 | } 255 | } else { 256 | if (btn.selected == YES) { 257 | // 结束 258 | btn.selected = NO; 259 | [_photoBtn setTitle:@"开始" forState:UIControlStateNormal]; 260 | if ([_delegate respondsToSelector:@selector(stopRecordVideoAction:)]) { 261 | [_delegate stopRecordVideoAction:self]; 262 | } 263 | } else { 264 | // 开始 265 | btn.selected = YES; 266 | [_photoBtn setTitle:@"结束" forState:UIControlStateNormal]; 267 | if ([_delegate respondsToSelector:@selector(startRecordVideoAction:)]) { 268 | [_delegate startRecordVideoAction:self]; 269 | } 270 | } 271 | } 272 | } 273 | 274 | // 取消 275 | -(void)cancel:(UIButton *)btn { 276 | if ([_delegate respondsToSelector:@selector(cancelAction:)]) { 277 | [_delegate cancelAction:self]; 278 | } 279 | } 280 | 281 | // 转换拍摄类型 282 | -(void)changeType:(UIButton *)btn { 283 | btn.selected = !btn.selected; 284 | self.type = self.type == 1?2:1; 285 | if (self.type == 1) { 286 | [_photoBtn setTitle:@"拍照" forState:UIControlStateNormal]; 287 | } else { 288 | [_photoBtn setTitle:@"开始" forState:UIControlStateNormal]; 289 | } 290 | if ([_delegate respondsToSelector:@selector(didChangeTypeAction:type:)]) { 291 | [_delegate didChangeTypeAction:self type:self.type == 1?2:1]; 292 | } 293 | } 294 | 295 | // 转换摄像头 296 | -(void)switchCameraClick:(UIButton *)btn { 297 | if ([_delegate respondsToSelector:@selector(swicthCameraAction:handle:)]) { 298 | [_delegate swicthCameraAction:self handle:^(NSError *error) { 299 | if (error) [self showError:error]; 300 | }]; 301 | } 302 | } 303 | 304 | // 手电筒 305 | -(void)torchClick:(UIButton *)btn { 306 | if ([_delegate respondsToSelector:@selector(torchLightAction:handle:)]) { 307 | [_delegate torchLightAction:self handle:^(NSError *error) { 308 | if (error) { 309 | [self showError:error]; 310 | } else { 311 | self->_flashBtn.selected = NO; 312 | self->_torchBtn.selected = !self->_torchBtn.selected; 313 | } 314 | }]; 315 | } 316 | } 317 | 318 | // 闪光灯 319 | -(void)flashClick:(UIButton *)btn { 320 | if ([_delegate respondsToSelector:@selector(flashLightAction:handle:)]) { 321 | [_delegate flashLightAction:self handle:^(NSError *error) { 322 | if (error) { 323 | [self showError:error]; 324 | } else { 325 | self->_flashBtn.selected = !self->_flashBtn.selected; 326 | self->_torchBtn.selected = NO; 327 | } 328 | }]; 329 | } 330 | } 331 | 332 | #pragma mark - Private methods 333 | // 聚焦、曝光动画 334 | -(void)runFocusAnimation:(UIView *)view point:(CGPoint)point { 335 | view.center = point; 336 | view.hidden = NO; 337 | [UIView animateWithDuration:0.15f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut animations:^{ 338 | view.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1.0); 339 | } completion:^(BOOL complete) { 340 | double delayInSeconds = 0.5f; 341 | dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC)); 342 | dispatch_after(popTime, dispatch_get_main_queue(), ^(void){ 343 | view.hidden = YES; 344 | view.transform = CGAffineTransformIdentity; 345 | }); 346 | }]; 347 | } 348 | 349 | // 自动聚焦、曝光动画 350 | - (void)runResetAnimation { 351 | self.focusView.center = CGPointMake(self.previewView.width/2, self.previewView.height/2); 352 | self.exposureView.center = CGPointMake(self.previewView.width/2, self.previewView.height/2);; 353 | self.exposureView.transform = CGAffineTransformMakeScale(1.2f, 1.2f); 354 | self.focusView.hidden = NO; 355 | self.focusView.hidden = NO; 356 | [UIView animateWithDuration:0.15f delay:0.0f options:UIViewAnimationOptionCurveEaseInOut animations:^{ 357 | self.focusView.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1.0); 358 | self.exposureView.layer.transform = CATransform3DMakeScale(0.7, 0.7, 1.0); 359 | } completion:^(BOOL complete) { 360 | double delayInSeconds = 0.5f; 361 | dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC)); 362 | dispatch_after(popTime, dispatch_get_main_queue(), ^(void){ 363 | self.focusView.hidden = YES; 364 | self.exposureView.hidden = YES; 365 | self.focusView.transform = CGAffineTransformIdentity; 366 | self.exposureView.transform = CGAffineTransformIdentity; 367 | }); 368 | }]; 369 | } 370 | 371 | @end 372 | -------------------------------------------------------------------------------- /CCCamera/CCCameraViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCCameraViewController.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCBaseViewController.h" 10 | 11 | @interface CCCameraViewController : CCBaseViewController 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /CCCamera/CCCameraViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCCameraViewController.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCCameraViewController.h" 10 | #import 11 | #import 12 | #import 13 | #import 14 | 15 | #import "CCImagePreviewController.h" 16 | #import "CCCameraView.h" 17 | 18 | #import "CCCameraManager.h" 19 | #import "CCMotionManager.h" 20 | #import "CCMovieManager.h" 21 | 22 | #define ISIOS9 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 23 | 24 | @interface CCCameraViewController () 25 | { 26 | // 会话 27 | AVCaptureSession *_session; 28 | 29 | // 输入 30 | AVCaptureDeviceInput *_deviceInput; 31 | 32 | // 输出 33 | AVCaptureConnection *_videoConnection; 34 | AVCaptureConnection *_audioConnection; 35 | AVCaptureVideoDataOutput *_videoOutput; 36 | AVCaptureStillImageOutput *_imageOutput; 37 | 38 | // 录制 39 | BOOL _recording; 40 | } 41 | 42 | @property(nonatomic, strong) CCCameraView *cameraView; // 界面布局 43 | @property(nonatomic, strong) CCMovieManager *movieManager; // 视频管理 44 | @property(nonatomic, strong) CCCameraManager *cameraManager; // 相机管理 45 | @property(nonatomic, strong) CCMotionManager *motionManager; // 陀螺仪管理 46 | @property(nonatomic, strong) AVCaptureDevice *activeCamera; // 当前输入设备 47 | @property(nonatomic, strong) AVCaptureDevice *inactiveCamera; // 不活跃的设备(这里指前摄像头或后摄像头,不包括外接输入设备) 48 | 49 | @end 50 | 51 | @implementation CCCameraViewController 52 | 53 | - (instancetype)init{ 54 | self = [super init]; 55 | if (self) { 56 | _movieManager = [[CCMovieManager alloc] init]; 57 | _motionManager = [[CCMotionManager alloc] init]; 58 | _cameraManager = [[CCCameraManager alloc] init]; 59 | } 60 | return self; 61 | } 62 | 63 | - (void)viewDidLoad{ 64 | [super viewDidLoad]; 65 | self.cameraView = [[CCCameraView alloc] initWithFrame:self.view.bounds]; 66 | self.cameraView.delegate = self; 67 | [self.view addSubview:self.cameraView]; 68 | 69 | NSError *error; 70 | [self setupSession:&error]; 71 | if (!error) { 72 | [self.cameraView.previewView setCaptureSessionsion:_session]; 73 | [self startCaptureSession]; 74 | }else{ 75 | [self.view showError:error]; 76 | } 77 | } 78 | 79 | - (void)viewWillAppear:(BOOL)animated{ 80 | [super viewWillAppear:animated]; 81 | self.navigationController.navigationBarHidden = YES; 82 | } 83 | 84 | - (void)viewWillDisappear:(BOOL)animated{ 85 | [super viewWillDisappear:animated]; 86 | self.navigationController.navigationBarHidden = NO; 87 | } 88 | 89 | - (void)dealloc{ 90 | NSLog(@"相机界面销毁了"); 91 | } 92 | 93 | #pragma mark - -输入设备 94 | - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{ 95 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 96 | for (AVCaptureDevice *device in devices) { 97 | if (device.position == position) { 98 | return device; 99 | } 100 | } 101 | return nil; 102 | } 103 | 104 | - (AVCaptureDevice *)activeCamera{ 105 | return _deviceInput.device; 106 | } 107 | 108 | - (AVCaptureDevice *)inactiveCamera{ 109 | AVCaptureDevice *device = nil; 110 | if ([[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1) { 111 | if ([self activeCamera].position == AVCaptureDevicePositionBack) { 112 | device = [self cameraWithPosition:AVCaptureDevicePositionFront]; 113 | } else { 114 | device = [self cameraWithPosition:AVCaptureDevicePositionBack]; 115 | } 116 | } 117 | return device; 118 | } 119 | 120 | #pragma mark - -相关配置 121 | /// 会话 122 | - (void)setupSession:(NSError **)error{ 123 | _session = [[AVCaptureSession alloc]init]; 124 | _session.sessionPreset = AVCaptureSessionPresetHigh; 125 | 126 | [self setupSessionInputs:error]; 127 | [self setupSessionOutputs:error]; 128 | } 129 | 130 | /// 输入 131 | - (void)setupSessionInputs:(NSError **)error{ 132 | // 视频输入 133 | AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 134 | AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error]; 135 | if (videoInput) { 136 | if ([_session canAddInput:videoInput]){ 137 | [_session addInput:videoInput]; 138 | } 139 | } 140 | _deviceInput = videoInput; 141 | 142 | // 音频输入 143 | AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 144 | AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:error]; 145 | if ([_session canAddInput:audioIn]){ 146 | [_session addInput:audioIn]; 147 | } 148 | } 149 | 150 | /// 输出 151 | - (void)setupSessionOutputs:(NSError **)error{ 152 | dispatch_queue_t captureQueue = dispatch_queue_create("com.cc.captureQueue", DISPATCH_QUEUE_SERIAL); 153 | 154 | // 视频输出 155 | AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init]; 156 | [videoOut setAlwaysDiscardsLateVideoFrames:YES]; 157 | [videoOut setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]}]; 158 | [videoOut setSampleBufferDelegate:self queue:captureQueue]; 159 | if ([_session canAddOutput:videoOut]){ 160 | [_session addOutput:videoOut]; 161 | } 162 | _videoOutput = videoOut; 163 | _videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo]; 164 | 165 | // 音频输出 166 | AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init]; 167 | [audioOut setSampleBufferDelegate:self queue:captureQueue]; 168 | if ([_session canAddOutput:audioOut]){ 169 | [_session addOutput:audioOut]; 170 | } 171 | _audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio]; 172 | 173 | // 静态图片输出 174 | AVCaptureStillImageOutput *imageOutput = [[AVCaptureStillImageOutput alloc] init]; 175 | imageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG}; 176 | if ([_session canAddOutput:imageOutput]) { 177 | [_session addOutput:imageOutput]; 178 | } 179 | _imageOutput = imageOutput; 180 | } 181 | 182 | #pragma mark - -会话控制 183 | // 开启捕捉 184 | - (void)startCaptureSession{ 185 | if (!_session.isRunning){ 186 | [_session startRunning]; 187 | } 188 | } 189 | 190 | // 停止捕捉 191 | - (void)stopCaptureSession{ 192 | if (_session.isRunning){ 193 | [_session stopRunning]; 194 | } 195 | } 196 | 197 | #pragma mark - -操作相机 198 | // 缩放 199 | -(void)zoomAction:(CCCameraView *)cameraView factor:(CGFloat)factor { 200 | NSError *error = [_cameraManager zoom:[self activeCamera] factor:factor]; 201 | if (error) NSLog(@"%@", error); 202 | } 203 | 204 | // 聚焦 205 | -(void)focusAction:(CCCameraView *)cameraView point:(CGPoint)point handle:(void (^)(NSError *))handle { 206 | NSError *error = [_cameraManager focus:[self activeCamera] point:point]; 207 | handle(error); 208 | NSLog(@"%f", [self activeCamera].activeFormat.videoMaxZoomFactor); 209 | } 210 | 211 | // 曝光 212 | -(void)exposAction:(CCCameraView *)cameraView point:(CGPoint)point handle:(void (^)(NSError *))handle { 213 | NSError *error = [_cameraManager expose:[self activeCamera] point:point]; 214 | handle(error); 215 | } 216 | 217 | // 自动聚焦、曝光 218 | -(void)autoFocusAndExposureAction:(CCCameraView *)cameraView handle:(void (^)(NSError *))handle { 219 | NSError *error = [_cameraManager resetFocusAndExposure:[self activeCamera]]; 220 | handle(error); 221 | } 222 | 223 | // 闪光灯 224 | -(void)flashLightAction:(CCCameraView *)cameraView handle:(void (^)(NSError *))handle { 225 | BOOL on = [_cameraManager flashMode:[self activeCamera]] == AVCaptureFlashModeOn; 226 | AVCaptureFlashMode mode = on ? AVCaptureFlashModeOff : AVCaptureFlashModeOn; 227 | NSError *error = [_cameraManager changeFlash:[self activeCamera] mode: mode]; 228 | handle(error); 229 | } 230 | 231 | // 手电筒 232 | -(void)torchLightAction:(CCCameraView *)cameraView handle:(void (^)(NSError *))handle { 233 | BOOL on = [_cameraManager torchMode:[self activeCamera]] == AVCaptureTorchModeOn; 234 | AVCaptureTorchMode mode = on ? AVCaptureTorchModeOff : AVCaptureTorchModeOn; 235 | NSError *error = [_cameraManager changeTorch:[self activeCamera] model:mode]; 236 | handle(error); 237 | } 238 | 239 | // 转换摄像头 240 | - (void)swicthCameraAction:(CCCameraView *)cameraView handle:(void (^)(NSError *))handle { 241 | NSError *error; 242 | AVCaptureDevice *videoDevice = [self inactiveCamera]; 243 | AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; 244 | if (videoInput) { 245 | // 动画效果 246 | CATransition *animation = [CATransition animation]; 247 | animation.type = @"oglFlip"; 248 | animation.subtype = kCATransitionFromLeft; 249 | animation.duration = 0.5; 250 | [self.cameraView.previewView.layer addAnimation:animation forKey:@"flip"]; 251 | 252 | // 当前闪光灯状态 253 | AVCaptureFlashMode mode = [_cameraManager flashMode:[self activeCamera]]; 254 | 255 | // 转换摄像头 256 | _deviceInput = [_cameraManager switchCamera:_session old:_deviceInput new:videoInput]; 257 | 258 | // 重新设置视频输出链接 259 | _videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; 260 | 261 | // 如果后置转前置,系统会自动关闭手电筒(如果之前打开的,需要更新UI) 262 | if (videoDevice.position == AVCaptureDevicePositionFront) { 263 | [self.cameraView changeTorch:NO]; 264 | } 265 | 266 | // 前后摄像头的闪光灯不是同步的,所以在转换摄像头后需要重新设置闪光灯 267 | [_cameraManager changeFlash:[self activeCamera] mode:mode]; 268 | } 269 | handle(error); 270 | } 271 | 272 | #pragma mark - -拍摄照片 273 | // 拍照 274 | - (void)takePhotoAction:(CCCameraView *)cameraView{ 275 | AVCaptureConnection *connection = [_imageOutput connectionWithMediaType:AVMediaTypeVideo]; 276 | if (connection.isVideoOrientationSupported) { 277 | connection.videoOrientation = [self currentVideoOrientation]; 278 | } 279 | [_imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef _Nullable imageDataSampleBuffer, NSError * _Nullable error) { 280 | if (error) { 281 | [self.view showError:error]; 282 | return; 283 | } 284 | NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; 285 | UIImage *image = [[UIImage alloc]initWithData:imageData]; 286 | CCImagePreviewController *vc = [[CCImagePreviewController alloc]initWithImage:image frame:self.cameraView.previewView.frame]; 287 | [self.navigationController pushViewController:vc animated:YES]; 288 | }]; 289 | } 290 | 291 | // 取消拍照 292 | - (void)cancelAction:(CCCameraView *)cameraView{ 293 | [self.navigationController popViewControllerAnimated:YES]; 294 | } 295 | 296 | #pragma mark - -录制视频 297 | // 开始录像 298 | -(void)startRecordVideoAction:(CCCameraView *)cameraView{ 299 | _recording = YES; 300 | _movieManager.currentDevice = [self activeCamera]; 301 | _movieManager.currentOrientation = [self currentVideoOrientation]; 302 | [_movieManager start:^(NSError * _Nonnull error) { 303 | if (error) [self.view showError:error]; 304 | }]; 305 | } 306 | 307 | // 停止录像 308 | -(void)stopRecordVideoAction:(CCCameraView *)cameraView{ 309 | _recording = NO; 310 | [_movieManager stop:^(NSURL * _Nonnull url, NSError * _Nonnull error) { 311 | if (error) { 312 | [self.view showError:error]; 313 | } else { 314 | [self.view showAlertView:@"是否保存到相册" ok:^(UIAlertAction *act) { 315 | [self saveMovieToCameraRoll: url]; 316 | } cancel:nil]; 317 | } 318 | }]; 319 | } 320 | 321 | // 保存视频 322 | - (void)saveMovieToCameraRoll:(NSURL *)url{ 323 | [self.view showLoadHUD:@"保存中..."]; 324 | if (ISIOS9) { 325 | [PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) { 326 | if (status != PHAuthorizationStatusAuthorized) return; 327 | [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{ 328 | PHAssetCreationRequest *videoRequest = [PHAssetCreationRequest creationRequestForAsset]; 329 | [videoRequest addResourceWithType:PHAssetResourceTypeVideo fileURL:url options:nil]; 330 | } completionHandler:^( BOOL success, NSError * _Nullable error ) { 331 | dispatch_sync(dispatch_get_main_queue(), ^{ 332 | [self.view hideHUD]; 333 | }); 334 | success?:[self.view showError:error]; 335 | }]; 336 | }]; 337 | } else { 338 | ALAssetsLibrary *lab = [[ALAssetsLibrary alloc]init]; 339 | [lab writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error) { 340 | dispatch_sync(dispatch_get_main_queue(), ^{ 341 | [self.view hideHUD]; 342 | }); 343 | !error?:[self.view showError:error]; 344 | }]; 345 | } 346 | } 347 | 348 | #pragma mark - -输出代理 349 | - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ 350 | if (_recording) { 351 | [_movieManager writeData:connection video:_videoConnection audio:_audioConnection buffer:sampleBuffer]; 352 | } 353 | } 354 | 355 | #pragma mark - -其它方法 356 | // 当前设备取向 357 | - (AVCaptureVideoOrientation)currentVideoOrientation{ 358 | AVCaptureVideoOrientation orientation; 359 | switch (self.motionManager.deviceOrientation) { 360 | case UIDeviceOrientationPortrait: 361 | orientation = AVCaptureVideoOrientationPortrait; 362 | break; 363 | case UIDeviceOrientationLandscapeLeft: 364 | orientation = AVCaptureVideoOrientationLandscapeRight; 365 | break; 366 | case UIDeviceOrientationLandscapeRight: 367 | orientation = AVCaptureVideoOrientationLandscapeLeft; 368 | break; 369 | case UIDeviceOrientationPortraitUpsideDown: 370 | orientation = AVCaptureVideoOrientationPortraitUpsideDown; 371 | break; 372 | default: 373 | orientation = AVCaptureVideoOrientationPortrait; 374 | break; 375 | } 376 | return orientation; 377 | } 378 | 379 | - (void)didReceiveMemoryWarning { 380 | [super didReceiveMemoryWarning]; 381 | // Dispose of any resources that can be recreated. 382 | } 383 | 384 | @end 385 | -------------------------------------------------------------------------------- /CCCamera/CCFilterRenderer.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CCFilterRenderer.swift 3 | // CCCamera 4 | // 5 | // Created by cyd on 2018/9/7. 6 | // Copyright © 2018 cyd. All rights reserved. 7 | // 8 | 9 | import CoreMedia 10 | 11 | func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (outputBufferPool: CVPixelBufferPool?, outputColorSpace: CGColorSpace?, outputFormatDescription: CMFormatDescription?) { 12 | 13 | let inputMediaSubType = CMFormatDescriptionGetMediaSubType(inputFormatDescription) 14 | if inputMediaSubType != kCVPixelFormatType_32BGRA { 15 | assertionFailure("Invalid input pixel buffer type \(inputMediaSubType)") 16 | return (nil, nil, nil) 17 | } 18 | 19 | let inputDimensions = CMVideoFormatDescriptionGetDimensions(inputFormatDescription) 20 | var pixelBufferAttributes: [String: Any] = [ 21 | kCVPixelBufferPixelFormatTypeKey as String: UInt(inputMediaSubType), 22 | kCVPixelBufferWidthKey as String: Int(inputDimensions.width), 23 | kCVPixelBufferHeightKey as String: Int(inputDimensions.height), 24 | kCVPixelBufferIOSurfacePropertiesKey as String: [:] 25 | ] 26 | 27 | var cgColorSpace = CGColorSpaceCreateDeviceRGB() 28 | if let inputFormatDescriptionExtension = CMFormatDescriptionGetExtensions(inputFormatDescription) as Dictionary? { 29 | let colorPrimaries = inputFormatDescriptionExtension[kCVImageBufferColorPrimariesKey] 30 | if let colorPrimaries = colorPrimaries { 31 | var colorSpaceProperties: [String: AnyObject] = [kCVImageBufferColorPrimariesKey as String: colorPrimaries] 32 | if let yCbCrMatrix = inputFormatDescriptionExtension[kCVImageBufferYCbCrMatrixKey] { 33 | colorSpaceProperties[kCVImageBufferYCbCrMatrixKey as String] = yCbCrMatrix 34 | } 35 | if let transferFunction = inputFormatDescriptionExtension[kCVImageBufferTransferFunctionKey] { 36 | colorSpaceProperties[kCVImageBufferTransferFunctionKey as String] = transferFunction 37 | } 38 | pixelBufferAttributes[kCVBufferPropagatedAttachmentsKey as String] = colorSpaceProperties 39 | } 40 | if let cvColorspace = inputFormatDescriptionExtension[kCVImageBufferCGColorSpaceKey] { 41 | cgColorSpace = cvColorspace as! CGColorSpace 42 | } else if (colorPrimaries as? String) == (kCVImageBufferColorPrimaries_P3_D65 as String) { 43 | cgColorSpace = CGColorSpace(name: CGColorSpace.displayP3)! 44 | } 45 | } 46 | 47 | let poolAttributes = [kCVPixelBufferPoolMinimumBufferCountKey as String: outputRetainedBufferCountHint] 48 | var cvPixelBufferPool: CVPixelBufferPool? 49 | CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as NSDictionary?, pixelBufferAttributes as NSDictionary?, &cvPixelBufferPool) 50 | guard let pixelBufferPool = cvPixelBufferPool else { 51 | assertionFailure("Allocation failure: Could not allocate pixel buffer pool") 52 | return (nil, nil, nil) 53 | } 54 | preallocateBuffers(pool: pixelBufferPool, allocationThreshold: outputRetainedBufferCountHint) 55 | 56 | var pixelBuffer: CVPixelBuffer? 57 | var outputFormatDescription: CMFormatDescription? 58 | let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: outputRetainedBufferCountHint] as NSDictionary 59 | CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pixelBufferPool, auxAttributes, &pixelBuffer) 60 | if let pixelBuffer = pixelBuffer { 61 | CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &outputFormatDescription) 62 | } 63 | pixelBuffer = nil 64 | 65 | return (pixelBufferPool, cgColorSpace, outputFormatDescription) 66 | } 67 | 68 | private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) { 69 | var pixelBuffers = [CVPixelBuffer]() 70 | var error: CVReturn = kCVReturnSuccess 71 | let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary 72 | var pixelBuffer: CVPixelBuffer? 73 | while error == kCVReturnSuccess { 74 | error = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer) 75 | if let pixelBuffer = pixelBuffer { 76 | pixelBuffers.append(pixelBuffer) 77 | } 78 | pixelBuffer = nil 79 | } 80 | pixelBuffers.removeAll() 81 | } 82 | 83 | protocol CCFilterRenderer: class { 84 | 85 | var isPrepared: Bool { get } 86 | 87 | var description: String { get } 88 | 89 | var onputFormatDescription: CMFormatDescription? { get } 90 | 91 | var inputFormatDescription: CMFormatDescription? { get } 92 | 93 | init(_ name: String) 94 | 95 | func render(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer? 96 | 97 | func prepare(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) 98 | 99 | func reset() 100 | 101 | } 102 | -------------------------------------------------------------------------------- /CCCamera/CCFilterViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CCFilterViewController.swift 3 | // CCCamera 4 | // 5 | // Created by cyd on 2018/9/7. 6 | // Copyright © 2018 cyd. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | 12 | class CCFilterViewController: UIViewController { 13 | 14 | lazy private var preview: CCPreviewView = { 15 | let rect = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height) 16 | let view = CCPreviewView(frame: rect) 17 | return view 18 | }() 19 | 20 | lazy private var filterLabel: UILabel = { 21 | let rect = CGRect(x: 0, y: self.view.bounds.height/2-80, width: self.view.bounds.width, height: 30) 22 | let label = UILabel() 23 | label.font = UIFont.systemFont(ofSize: 14) 24 | label.textColor = UIColor.red 25 | label.textAlignment = .center 26 | label.frame = rect 27 | label.text = "左右轻扫切换滤镜" 28 | return label 29 | }() 30 | 31 | lazy private var videoFilter: CCFilterRenderer = { 32 | let filter = self.filterRenderers[0] 33 | return filter 34 | }() 35 | 36 | lazy private var filterRenderers: [CCFilterRenderer] = { 37 | let lists = [CCPhotoRenderer("CIPhotoEffectChrome"), 38 | CCPhotoRenderer("CIPhotoEffectFade"), 39 | CCPhotoRenderer("CIPhotoEffectInstant"), 40 | CCPhotoRenderer("CIPhotoEffectMono"), 41 | CCPhotoRenderer("CIPhotoEffectNoir"), 42 | CCPhotoRenderer("CIPhotoEffectProcess"), 43 | CCPhotoRenderer("CIPhotoEffectTonal"), 44 | CCPhotoRenderer("CIPhotoEffectTransfer"), 45 | CCPhotoRenderer("CILinearToSRGBToneCurve"), 46 | CCPhotoRenderer("CISRGBToneCurveToLinear"), 47 | CCPhotoRenderer("CIColorInvert")] 48 | return lists 49 | }() 50 | 51 | private enum SessionSetupResult { 52 | case success 53 | case notAuthorized 54 | case configurationFailed 55 | } 56 | 57 | private let session = AVCaptureSession() 58 | 59 | private let sessionQueue = DispatchQueue(label: "session queue", attributes: [], autoreleaseFrequency: .workItem) 60 | 61 | private let dataOutputQueue = DispatchQueue(label: "video data queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem) 62 | 63 | private let videoDataOutput = AVCaptureVideoDataOutput() 64 | 65 | private var filterIndex: Int = 0 66 | 67 | private var setupResult: SessionSetupResult = .success 68 | 69 | private var isSessionRunning = false 70 | 71 | private var renderingEnabled = true 72 | 73 | private var videoDeviceInput: AVCaptureDeviceInput! 74 | 75 | override func viewDidLoad() { 76 | super.viewDidLoad() 77 | self.edgesForExtendedLayout = UIRectEdge.init(rawValue: 0) 78 | self.view.backgroundColor = UIColor.black 79 | self.view.addSubview(preview) 80 | self.view.addSubview(filterLabel) 81 | 82 | let leftSwipeGesture = UISwipeGestureRecognizer(target: self, action: #selector(changeFilterSwipe)) 83 | leftSwipeGesture.direction = .left 84 | preview.addGestureRecognizer(leftSwipeGesture) 85 | 86 | let rightSwipeGesture = UISwipeGestureRecognizer(target: self, action: #selector(changeFilterSwipe)) 87 | rightSwipeGesture.direction = .right 88 | preview.addGestureRecognizer(rightSwipeGesture) 89 | 90 | switch AVCaptureDevice.authorizationStatus(for: .video) { 91 | case .authorized: break 92 | case .notDetermined: 93 | sessionQueue.suspend() 94 | AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in 95 | if !granted { 96 | self.setupResult = .notAuthorized 97 | } 98 | self.sessionQueue.resume() 99 | }) 100 | default: 101 | setupResult = .notAuthorized 102 | } 103 | 104 | sessionQueue.async { 105 | self.configureSession() 106 | } 107 | } 108 | 109 | override func viewWillAppear(_ animated: Bool) { 110 | super.viewWillAppear(animated) 111 | let statusOrientation = UIApplication.shared.statusBarOrientation 112 | sessionQueue.async { 113 | switch self.setupResult { 114 | case .success: 115 | let devicePosition = self.videoDeviceInput.device.position 116 | let videoOrientation = self.videoDataOutput.connection(with: .video)!.videoOrientation 117 | self.preview.mirroring = (devicePosition == .front) 118 | let rotation = CCPreviewView.Rotation(with: statusOrientation, videoOrientation: videoOrientation, cameraPosition: devicePosition) 119 | if let rotation = rotation { 120 | self.preview.rotation = rotation 121 | } 122 | self.dataOutputQueue.async { 123 | self.renderingEnabled = true 124 | } 125 | self.session.startRunning() 126 | self.isSessionRunning = self.session.isRunning 127 | case .notAuthorized: 128 | print("没有权限") 129 | break 130 | case .configurationFailed: 131 | print("配置会话失败") 132 | break 133 | } 134 | } 135 | } 136 | 137 | override func viewWillDisappear(_ animated: Bool) { 138 | dataOutputQueue.async { 139 | self.renderingEnabled = false 140 | } 141 | sessionQueue.async { 142 | if self.setupResult == .success { 143 | self.session.stopRunning() 144 | self.isSessionRunning = self.session.isRunning 145 | } 146 | } 147 | super.viewWillDisappear(animated) 148 | } 149 | 150 | deinit { 151 | print("deinit: \(self)") 152 | } 153 | 154 | private func configureSession() { 155 | if setupResult != .success { 156 | return 157 | } 158 | 159 | let defaultVideoDevice: AVCaptureDevice? = AVCaptureDevice.default(for: .video) 160 | guard let videoDevice = defaultVideoDevice else { 161 | print("Could not find any video device") 162 | setupResult = .configurationFailed 163 | return 164 | } 165 | 166 | do { 167 | videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) 168 | } catch { 169 | print("Could not create video device input: \(error)") 170 | setupResult = .configurationFailed 171 | return 172 | } 173 | 174 | if session.canAddOutput(videoDataOutput) { 175 | session.addOutput(videoDataOutput) 176 | videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)] 177 | videoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue) 178 | } else { 179 | print("Could not add video data output to the session") 180 | setupResult = .configurationFailed 181 | session.commitConfiguration() 182 | return 183 | } 184 | 185 | session.beginConfiguration() 186 | session.sessionPreset = AVCaptureSession.Preset.hd1920x1080 187 | guard session.canAddInput(videoDeviceInput) else { 188 | print("Could not add video device input to the session") 189 | setupResult = .configurationFailed 190 | session.commitConfiguration() 191 | return 192 | } 193 | session.addInput(videoDeviceInput) 194 | session.commitConfiguration() 195 | } 196 | 197 | @objc private func changeFilterSwipe(_ gesture: UISwipeGestureRecognizer) { 198 | if gesture.direction == .left { 199 | filterIndex = (filterIndex + 1) % filterRenderers.count 200 | } else if gesture.direction == .right { 201 | filterIndex = (filterIndex + filterRenderers.count - 1) % filterRenderers.count 202 | } 203 | 204 | let newIndex = filterIndex 205 | let filterDescription = filterRenderers[newIndex].description 206 | self.showFilterLabel(description: filterDescription) 207 | 208 | dataOutputQueue.async { 209 | self.videoFilter.reset() 210 | self.videoFilter = self.filterRenderers[newIndex] 211 | } 212 | } 213 | 214 | private func showFilterLabel(description: String) { 215 | filterLabel.text = description 216 | filterLabel.alpha = 0.0 217 | 218 | UIView.animate(withDuration: 0.25) { 219 | self.filterLabel.alpha = 1.0 220 | } 221 | 222 | UIView.animate(withDuration: 2.0) { 223 | self.filterLabel.alpha = 0.0 224 | } 225 | } 226 | } 227 | 228 | extension CCFilterViewController: AVCaptureVideoDataOutputSampleBufferDelegate { 229 | 230 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 231 | self.processVideo(sampleBuffer: sampleBuffer) 232 | } 233 | 234 | private func processVideo(sampleBuffer: CMSampleBuffer) { 235 | if !renderingEnabled { 236 | return 237 | } 238 | guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), 239 | let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else { 240 | return 241 | } 242 | let filter = self.videoFilter 243 | if !filter.isPrepared { 244 | filter.prepare(with: formatDescription, outputRetainedBufferCountHint: 3) 245 | } 246 | guard let filteredBuffer = filter.render(pixelBuffer: videoPixelBuffer) else { 247 | print("Unable to filter video buffer") 248 | return 249 | } 250 | preview.pixelBuffer = filteredBuffer 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /CCCamera/CCGLRenderCameraViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCGLRenderCameraViewController.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/29. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "CCBaseViewController.h" 11 | 12 | @interface CCGLRenderCameraViewController : CCBaseViewController 13 | 14 | @end 15 | -------------------------------------------------------------------------------- /CCCamera/CCGLRenderCameraViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCGLRenderCameraViewController.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/29. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCGLRenderCameraViewController.h" 10 | #import 11 | #import 12 | 13 | @interface CCGLRenderCameraViewController () 14 | { 15 | AVCaptureSession *_captureSession; 16 | } 17 | @property(nonatomic, strong) GLKView *glview; 18 | 19 | @property(nonatomic, strong) CIContext *cicontext; 20 | 21 | @end 22 | 23 | @implementation CCGLRenderCameraViewController 24 | 25 | - (void)viewDidLoad { 26 | [super viewDidLoad]; 27 | 28 | // 上下文和预览视图 29 | EAGLContext *context = [[EAGLContext alloc]initWithAPI:kEAGLRenderingAPIOpenGLES2]; 30 | GLKView *glView = [[GLKView alloc]initWithFrame:self.view.bounds context:context]; 31 | [EAGLContext setCurrentContext:context]; 32 | [self.view addSubview:glView]; 33 | glView.transform = CGAffineTransformMakeRotation(M_PI_2); 34 | glView.frame = [UIApplication sharedApplication].keyWindow.bounds; 35 | _cicontext = [CIContext contextWithEAGLContext:context]; 36 | _glview = glView; 37 | 38 | // 捕捉会话 39 | AVCaptureSession *session = [[AVCaptureSession alloc]init]; 40 | [session setSessionPreset:AVCaptureSessionPreset1920x1080]; 41 | _captureSession = session; 42 | 43 | // 输入 44 | AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 45 | AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil]; 46 | if (videoInput) { 47 | if ([_captureSession canAddInput:videoInput]){ 48 | [_captureSession addInput:videoInput]; 49 | } 50 | } 51 | 52 | // 输出 53 | AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init]; 54 | [videoOut setAlwaysDiscardsLateVideoFrames:YES]; 55 | [videoOut setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]}]; 56 | [videoOut setSampleBufferDelegate:self queue:dispatch_queue_create("video.buffer", DISPATCH_QUEUE_SERIAL)]; 57 | if ([_captureSession canAddOutput:videoOut]){ 58 | [_captureSession addOutput:videoOut]; 59 | } 60 | if (!_captureSession.isRunning){ 61 | [_captureSession startRunning]; 62 | } 63 | } 64 | 65 | - (void)viewWillDisappear:(BOOL)animated { 66 | [super viewWillDisappear:animated]; 67 | [_captureSession stopRunning]; 68 | } 69 | 70 | -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ 71 | if (self->_glview.context != [EAGLContext currentContext]) { 72 | [EAGLContext setCurrentContext:self->_glview.context]; 73 | } 74 | CVImageBufferRef imageRef = CMSampleBufferGetImageBuffer(sampleBuffer); 75 | CIImage *image = [CIImage imageWithCVImageBuffer:imageRef]; 76 | dispatch_async(dispatch_get_main_queue(), ^{ 77 | [self->_glview bindDrawable]; 78 | [self->_cicontext drawImage:image inRect:image.extent fromRect:image.extent]; 79 | [self->_glview display]; 80 | }); 81 | } 82 | 83 | - (void)didReceiveMemoryWarning { 84 | [super didReceiveMemoryWarning]; 85 | // Dispose of any resources that can be recreated. 86 | } 87 | 88 | @end 89 | -------------------------------------------------------------------------------- /CCCamera/CCImagePreviewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCImagePreviewController.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCBaseViewController.h" 10 | 11 | @interface CCImagePreviewController : CCBaseViewController 12 | 13 | + (instancetype)new NS_UNAVAILABLE; 14 | 15 | - (instancetype)init NS_UNAVAILABLE; 16 | 17 | - (instancetype)initWithImage:(UIImage *)image frame:(CGRect)frame NS_DESIGNATED_INITIALIZER; 18 | 19 | @end 20 | -------------------------------------------------------------------------------- /CCCamera/CCImagePreviewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCImagePreviewController.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCImagePreviewController.h" 10 | 11 | @interface CCImagePreviewController () 12 | { 13 | UIImage *_image; 14 | CGRect _frame; 15 | } 16 | @end 17 | 18 | @implementation CCImagePreviewController 19 | 20 | - (instancetype)initWithImage:(UIImage *)image frame:(CGRect)frame{ 21 | if (self = [super initWithNibName:nil bundle:nil]) { 22 | _image = image; 23 | _frame = frame; 24 | } 25 | return self; 26 | } 27 | 28 | - (instancetype)init{ 29 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Use -initWithImage: frame:" userInfo:nil]; 30 | } 31 | 32 | + (instancetype)new{ 33 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Use -initWithImage: frame:" userInfo:nil]; 34 | } 35 | 36 | -(instancetype)initWithCoder:(NSCoder *)aDecoder{ 37 | return [self initWithImage:nil frame:CGRectZero]; 38 | } 39 | 40 | -(instancetype)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil{ 41 | return [self initWithImage:nil frame:CGRectZero]; 42 | } 43 | 44 | - (void)viewDidLoad { 45 | [super viewDidLoad]; 46 | UIImageView *imageView = [[UIImageView alloc]initWithImage:_image]; 47 | imageView.layer.masksToBounds = YES; 48 | imageView.contentMode = UIViewContentModeScaleAspectFill; 49 | imageView.frame = CGRectMake(0, 0, _frame.size.width, _frame.size.height); 50 | [self.view addSubview:imageView]; 51 | NSLog(@"%ld--%ld", (long)_image.imageOrientation, UIImageOrientationUp); 52 | } 53 | 54 | - (void)didReceiveMemoryWarning { 55 | [super didReceiveMemoryWarning]; 56 | // Dispose of any resources that can be recreated. 57 | } 58 | 59 | @end 60 | -------------------------------------------------------------------------------- /CCCamera/CCMotionManager.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCMotionManager.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/29. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface CCMotionManager : NSObject 13 | 14 | @property(nonatomic, assign)UIDeviceOrientation deviceOrientation; 15 | 16 | @property(nonatomic, assign)AVCaptureVideoOrientation videoOrientation; 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /CCCamera/CCMotionManager.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCMotionManager.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/29. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCMotionManager.h" 10 | #import 11 | 12 | @interface CCMotionManager() 13 | 14 | @property(nonatomic, strong) CMMotionManager * motionManager; 15 | 16 | @end 17 | 18 | @implementation CCMotionManager 19 | 20 | -(instancetype)init 21 | { 22 | self = [super init]; 23 | if (self) { 24 | _motionManager = [[CMMotionManager alloc] init]; 25 | _motionManager.deviceMotionUpdateInterval = 1/15.0; 26 | if (!_motionManager.deviceMotionAvailable) { 27 | _motionManager = nil; 28 | return self; 29 | } 30 | @weakify(self) 31 | [_motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue currentQueue] withHandler: ^(CMDeviceMotion *motion, NSError *error){ 32 | @strongify(self) 33 | [self performSelectorOnMainThread:@selector(handleDeviceMotion:) withObject:motion waitUntilDone:YES]; 34 | }]; 35 | } 36 | return self; 37 | } 38 | 39 | - (void)handleDeviceMotion:(CMDeviceMotion *)deviceMotion{ 40 | double x = deviceMotion.gravity.x; 41 | double y = deviceMotion.gravity.y; 42 | if (fabs(y) >= fabs(x)) { 43 | if (y >= 0) { 44 | _deviceOrientation = UIDeviceOrientationPortraitUpsideDown; 45 | _videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; 46 | } else { 47 | _deviceOrientation = UIDeviceOrientationPortrait; 48 | _videoOrientation = AVCaptureVideoOrientationPortrait; 49 | } 50 | } else { 51 | if (x >= 0) { 52 | _deviceOrientation = UIDeviceOrientationLandscapeRight; 53 | _videoOrientation = AVCaptureVideoOrientationLandscapeRight; 54 | } else { 55 | _deviceOrientation = UIDeviceOrientationLandscapeLeft; 56 | _videoOrientation = AVCaptureVideoOrientationLandscapeLeft; 57 | } 58 | } 59 | } 60 | 61 | -(void)dealloc{ 62 | NSLog(@"陀螺仪对象销毁了"); 63 | [_motionManager stopDeviceMotionUpdates]; 64 | } 65 | 66 | @end 67 | -------------------------------------------------------------------------------- /CCCamera/CCMovieManager.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCMovieManager.h 3 | // CCCamera 4 | // 5 | // Created by cyd on 2018/8/13. 6 | // Copyright © 2018 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | NS_ASSUME_NONNULL_BEGIN 13 | 14 | @interface CCMovieManager : NSObject 15 | 16 | @property(nonatomic, assign) AVCaptureVideoOrientation referenceOrientation; // 视频播放方向 17 | 18 | @property(nonatomic, assign) AVCaptureVideoOrientation currentOrientation; 19 | 20 | @property(nonatomic, strong) AVCaptureDevice *currentDevice; 21 | 22 | - (void)start:(void(^)(NSError *error))handle; 23 | 24 | - (void)stop:(void(^)(NSURL *url, NSError *error))handle; 25 | 26 | - (void)writeData:(AVCaptureConnection *)connection 27 | video:(AVCaptureConnection*)video 28 | audio:(AVCaptureConnection *)audio 29 | buffer:(CMSampleBufferRef)buffer; 30 | 31 | @end 32 | 33 | NS_ASSUME_NONNULL_END 34 | -------------------------------------------------------------------------------- /CCCamera/CCMovieManager.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCMovieManager.m 3 | // CCCamera 4 | // 5 | // Created by cyd on 2018/8/13. 6 | // Copyright © 2018 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCMovieManager.h" 10 | 11 | @interface CCMovieManager() 12 | { 13 | BOOL _readyToRecordVideo; 14 | BOOL _readyToRecordAudio; 15 | dispatch_queue_t _movieWritingQueue; 16 | 17 | NSURL *_movieURL; 18 | AVAssetWriter *_movieWriter; 19 | AVAssetWriterInput *_movieAudioInput; 20 | AVAssetWriterInput *_movieVideoInput; 21 | } 22 | 23 | @end 24 | 25 | @implementation CCMovieManager 26 | 27 | - (instancetype)init { 28 | self = [super init]; 29 | if (self) { 30 | _movieWritingQueue = dispatch_queue_create("Movie.Writing.Queue", DISPATCH_QUEUE_SERIAL); 31 | _movieURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), @"movie.mov"]]; 32 | _referenceOrientation = AVCaptureVideoOrientationPortrait; 33 | } 34 | return self; 35 | } 36 | 37 | - (void)start:(void(^)(NSError *error))handle{ 38 | [self removeFile:_movieURL]; 39 | dispatch_async(_movieWritingQueue, ^{ 40 | NSError *error; 41 | if (!self->_movieWriter) { 42 | self->_movieWriter = [[AVAssetWriter alloc] initWithURL:self->_movieURL fileType:AVFileTypeQuickTimeMovie error:&error]; 43 | } 44 | handle(error); 45 | }); 46 | } 47 | 48 | - (void)stop:(void(^)(NSURL *url, NSError *error))handle{ 49 | _readyToRecordVideo = NO; 50 | _readyToRecordAudio = NO; 51 | dispatch_async(_movieWritingQueue, ^{ 52 | [self->_movieWriter finishWritingWithCompletionHandler:^(){ 53 | if (self->_movieWriter.status == AVAssetWriterStatusCompleted) { 54 | dispatch_sync(dispatch_get_main_queue(), ^{ 55 | handle(self->_movieURL, nil); 56 | }); 57 | } else { 58 | handle(nil, self->_movieWriter.error); 59 | } 60 | self->_movieWriter = nil; 61 | }]; 62 | }); 63 | } 64 | 65 | - (void)writeData:(AVCaptureConnection *)connection video:(AVCaptureConnection*)video audio:(AVCaptureConnection *)audio buffer:(CMSampleBufferRef)buffer { 66 | CFRetain(buffer); 67 | dispatch_async(_movieWritingQueue, ^{ 68 | if (connection == video){ 69 | if (!self->_readyToRecordVideo){ 70 | self->_readyToRecordVideo = [self setupAssetWriterVideoInput:CMSampleBufferGetFormatDescription(buffer)] == nil; 71 | } 72 | if ([self inputsReadyToRecord]){ 73 | [self writeSampleBuffer:buffer ofType:AVMediaTypeVideo]; 74 | } 75 | } else if (connection == audio){ 76 | if (!self->_readyToRecordAudio){ 77 | self->_readyToRecordAudio = [self setupAssetWriterAudioInput:CMSampleBufferGetFormatDescription(buffer)] == nil; 78 | } 79 | if ([self inputsReadyToRecord]){ 80 | [self writeSampleBuffer:buffer ofType:AVMediaTypeAudio]; 81 | } 82 | } 83 | CFRelease(buffer); 84 | }); 85 | } 86 | 87 | - (void)writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType{ 88 | if (_movieWriter.status == AVAssetWriterStatusUnknown){ 89 | if ([_movieWriter startWriting]){ 90 | [_movieWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; 91 | } else { 92 | NSLog(@"%@", _movieWriter.error); 93 | } 94 | } 95 | if (_movieWriter.status == AVAssetWriterStatusWriting){ 96 | if (mediaType == AVMediaTypeVideo){ 97 | if (!_movieVideoInput.readyForMoreMediaData){ 98 | return; 99 | } 100 | if (![_movieVideoInput appendSampleBuffer:sampleBuffer]){ 101 | NSLog(@"%@", _movieWriter.error); 102 | } 103 | } else if (mediaType == AVMediaTypeAudio){ 104 | if (!_movieAudioInput.readyForMoreMediaData){ 105 | return; 106 | } 107 | if (![_movieAudioInput appendSampleBuffer:sampleBuffer]){ 108 | NSLog(@"%@", _movieWriter.error); 109 | } 110 | } 111 | } 112 | } 113 | 114 | - (BOOL)inputsReadyToRecord{ 115 | return _readyToRecordVideo && _readyToRecordAudio; 116 | } 117 | 118 | /// 音频源数据写入配置 119 | - (NSError *)setupAssetWriterAudioInput:(CMFormatDescriptionRef)currentFormatDescription { 120 | size_t aclSize = 0; 121 | const AudioStreamBasicDescription *currentASBD = CMAudioFormatDescriptionGetStreamBasicDescription(currentFormatDescription); 122 | const AudioChannelLayout *channelLayout = CMAudioFormatDescriptionGetChannelLayout(currentFormatDescription,&aclSize); 123 | NSData *dataLayout = aclSize > 0 ? [NSData dataWithBytes:channelLayout length:aclSize] : [NSData data]; 124 | NSDictionary *settings = @{AVFormatIDKey: [NSNumber numberWithInteger: kAudioFormatMPEG4AAC], 125 | AVSampleRateKey: [NSNumber numberWithFloat: currentASBD->mSampleRate], 126 | AVChannelLayoutKey: dataLayout, 127 | AVNumberOfChannelsKey: [NSNumber numberWithInteger: currentASBD->mChannelsPerFrame], 128 | AVEncoderBitRatePerChannelKey: [NSNumber numberWithInt: 64000]}; 129 | 130 | if ([_movieWriter canApplyOutputSettings:settings forMediaType: AVMediaTypeAudio]){ 131 | _movieAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings:settings]; 132 | _movieAudioInput.expectsMediaDataInRealTime = YES; 133 | if ([_movieWriter canAddInput:_movieAudioInput]){ 134 | [_movieWriter addInput:_movieAudioInput]; 135 | } else { 136 | return _movieWriter.error; 137 | } 138 | } else { 139 | return _movieWriter.error; 140 | } 141 | return nil; 142 | } 143 | 144 | /// 视频源数据写入配置 145 | - (NSError *)setupAssetWriterVideoInput:(CMFormatDescriptionRef)currentFormatDescription { 146 | CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(currentFormatDescription); 147 | NSUInteger numPixels = dimensions.width * dimensions.height; 148 | CGFloat bitsPerPixel = numPixels < (640 * 480) ? 4.05 : 11.0; 149 | NSDictionary *compression = @{AVVideoAverageBitRateKey: [NSNumber numberWithInteger: numPixels * bitsPerPixel], 150 | AVVideoMaxKeyFrameIntervalKey: [NSNumber numberWithInteger:30]}; 151 | NSDictionary *settings = @{AVVideoCodecKey: AVVideoCodecH264, 152 | AVVideoWidthKey: [NSNumber numberWithInteger:dimensions.width], 153 | AVVideoHeightKey: [NSNumber numberWithInteger:dimensions.height], 154 | AVVideoCompressionPropertiesKey: compression}; 155 | 156 | if ([_movieWriter canApplyOutputSettings:settings forMediaType:AVMediaTypeVideo]){ 157 | _movieVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings]; 158 | _movieVideoInput.expectsMediaDataInRealTime = YES; 159 | _movieVideoInput.transform = [self transformFromCurrentVideoOrientationToOrientation:self.referenceOrientation]; 160 | if ([_movieWriter canAddInput:_movieVideoInput]){ 161 | [_movieWriter addInput:_movieVideoInput]; 162 | } else { 163 | return _movieWriter.error; 164 | } 165 | } else { 166 | return _movieWriter.error; 167 | } 168 | return nil; 169 | } 170 | 171 | // 获取视频旋转矩阵 172 | - (CGAffineTransform)transformFromCurrentVideoOrientationToOrientation:(AVCaptureVideoOrientation)orientation{ 173 | CGFloat orientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:orientation]; 174 | CGFloat videoOrientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:self.currentOrientation]; 175 | CGFloat angleOffset; 176 | if (self.currentDevice.position == AVCaptureDevicePositionBack) { 177 | angleOffset = videoOrientationAngleOffset - orientationAngleOffset + M_PI_2; 178 | } else { 179 | angleOffset = orientationAngleOffset - videoOrientationAngleOffset + M_PI_2; 180 | } 181 | CGAffineTransform transform = CGAffineTransformMakeRotation(angleOffset); 182 | return transform; 183 | } 184 | 185 | // 获取视频旋转角度 186 | - (CGFloat)angleOffsetFromPortraitOrientationToOrientation:(AVCaptureVideoOrientation)orientation{ 187 | CGFloat angle = 0.0; 188 | switch (orientation){ 189 | case AVCaptureVideoOrientationPortrait: 190 | angle = 0.0; 191 | break; 192 | case AVCaptureVideoOrientationPortraitUpsideDown: 193 | angle = M_PI; 194 | break; 195 | case AVCaptureVideoOrientationLandscapeRight: 196 | angle = -M_PI_2; 197 | break; 198 | case AVCaptureVideoOrientationLandscapeLeft: 199 | angle = M_PI_2; 200 | break; 201 | } 202 | return angle; 203 | } 204 | 205 | // 移除文件 206 | - (void)removeFile:(NSURL *)fileURL{ 207 | NSFileManager *fileManager = [NSFileManager defaultManager]; 208 | NSString *filePath = fileURL.path; 209 | if ([fileManager fileExistsAtPath:filePath]){ 210 | NSError *error; 211 | BOOL success = [fileManager removeItemAtPath:filePath error:&error]; 212 | if (!success){ 213 | NSLog(@"删除视频文件失败:%@", error); 214 | } else { 215 | NSLog(@"删除视频文件成功"); 216 | } 217 | } 218 | } 219 | 220 | @end 221 | -------------------------------------------------------------------------------- /CCCamera/CCPhotoRenderer.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CCPhotoRenderer.swift 3 | // CCCamera 4 | // 5 | // Created by cyd on 2018/9/7. 6 | // Copyright © 2018 cyd. All rights reserved. 7 | // 8 | 9 | import CoreMedia 10 | import CoreVideo 11 | import CoreImage 12 | 13 | class CCPhotoRenderer: CCFilterRenderer { 14 | 15 | var description: String { 16 | switch name { 17 | case "CIPhotoEffectChrome": 18 | return name + "(铬黄)" 19 | case "CIPhotoEffectFade": 20 | return name + "(褪色)" 21 | case "CIPhotoEffectInstant": 22 | return name + "(怀旧)" 23 | case "CIPhotoEffectMono": 24 | return name + "(单色)" 25 | case "CIPhotoEffectNoir": 26 | return name + "(黑白)" 27 | case "CIPhotoEffectProcess": 28 | return name + "(冲印)" 29 | case "CIPhotoEffectTonal": 30 | return name + "(色调)" 31 | case "CIPhotoEffectTransfer": 32 | return name + "(岁月)" 33 | default: 34 | return name 35 | } 36 | } 37 | 38 | var isPrepared = false 39 | 40 | private var name: String = "" 41 | 42 | private var ciContext: CIContext? 43 | 44 | private var rosyFilter: CIFilter? 45 | 46 | private var outputColorSpace: CGColorSpace? 47 | 48 | private var outputPixelBufferPool: CVPixelBufferPool? 49 | 50 | private(set) var onputFormatDescription: CMFormatDescription? 51 | 52 | private(set) var inputFormatDescription: CMFormatDescription? 53 | 54 | required init(_ name: String) { 55 | self.name = name 56 | } 57 | 58 | func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) { 59 | reset() 60 | 61 | (outputPixelBufferPool, outputColorSpace, onputFormatDescription) = allocateOutputBufferPool(with: formatDescription, 62 | outputRetainedBufferCountHint: outputRetainedBufferCountHint) 63 | if outputPixelBufferPool == nil { 64 | return 65 | } 66 | inputFormatDescription = formatDescription 67 | 68 | ciContext = CIContext() 69 | rosyFilter = CIFilter(name: self.name) 70 | isPrepared = true 71 | } 72 | 73 | func reset() { 74 | ciContext = nil 75 | rosyFilter = nil 76 | isPrepared = false 77 | outputColorSpace = nil 78 | outputPixelBufferPool = nil 79 | onputFormatDescription = nil 80 | inputFormatDescription = nil 81 | } 82 | 83 | func render(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer? { 84 | guard let rosyFilter = rosyFilter, let ciContext = ciContext, isPrepared else { 85 | assertionFailure("Invalid state: Not prepared") 86 | return nil 87 | } 88 | 89 | let sourceImage = CIImage(cvImageBuffer: pixelBuffer) 90 | rosyFilter.setValue(sourceImage, forKey: kCIInputImageKey) 91 | guard let filteredImage = rosyFilter.value(forKey: kCIOutputImageKey) as? CIImage else { 92 | print("CIFilter failed to render image") 93 | return nil 94 | } 95 | 96 | var pbuf: CVPixelBuffer? 97 | CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf) 98 | guard let outputPixelBuffer = pbuf else { 99 | print("Allocation failure") 100 | return nil 101 | } 102 | 103 | ciContext.render(filteredImage, to: outputPixelBuffer, bounds: filteredImage.extent, colorSpace: outputColorSpace) 104 | return outputPixelBuffer 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /CCCamera/CCPreviewView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CCPreviewView.swift 3 | // CCCamera 4 | // 5 | // Created by cyd on 2018/9/10. 6 | // Copyright © 2018 cyd. All rights reserved. 7 | // 8 | 9 | import Metal 10 | import MetalKit 11 | import CoreMedia 12 | import AVFoundation 13 | 14 | class CCPreviewView: MTKView { 15 | 16 | enum Rotation: Int { 17 | case rotate0Degrees 18 | case rotate90Degrees 19 | case rotate180Degrees 20 | case rotate270Degrees 21 | } 22 | 23 | var mirroring = false { 24 | didSet { 25 | syncQueue.sync { 26 | internalMirroring = mirroring 27 | } 28 | } 29 | } 30 | 31 | var rotation: Rotation = .rotate0Degrees { 32 | didSet { 33 | syncQueue.sync { 34 | internalRotation = rotation 35 | } 36 | } 37 | } 38 | 39 | var pixelBuffer: CVPixelBuffer? { 40 | didSet { 41 | syncQueue.sync { 42 | internalPixelBuffer = pixelBuffer 43 | } 44 | } 45 | } 46 | 47 | private let syncQueue = DispatchQueue(label: "Preview.View.Sync.Queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem) 48 | 49 | private var commandQueue: MTLCommandQueue? 50 | 51 | private var internalRotation: Rotation = .rotate0Degrees 52 | 53 | private var internalMirroring: Bool = false 54 | 55 | private var internalPixelBuffer: CVPixelBuffer? 56 | 57 | 58 | private var textureCache: CVMetalTextureCache? 59 | 60 | private var textureWidth: Int = 0 61 | 62 | private var textureHeight: Int = 0 63 | 64 | private var textureMirroring = false 65 | 66 | private var textureRotation: Rotation = .rotate0Degrees 67 | 68 | private var textureTranform: CGAffineTransform? 69 | 70 | 71 | private var sampler: MTLSamplerState! 72 | 73 | private var internalBounds: CGRect! 74 | 75 | private var textCoordBuffer: MTLBuffer! 76 | 77 | private var vertexCoordBuffer: MTLBuffer! 78 | 79 | private var renderPipelineState: MTLRenderPipelineState! 80 | 81 | override init(frame frameRect: CGRect, device: MTLDevice? = MTLCreateSystemDefaultDevice()) { 82 | super.init(frame: frameRect, device: device) 83 | self.device = MTLCreateSystemDefaultDevice() 84 | self.colorPixelFormat = .bgra8Unorm 85 | self.configureMetal() 86 | self.createTextureCache() 87 | } 88 | 89 | required init(coder: NSCoder) { 90 | fatalError("init(coder:) has not been implemented") 91 | } 92 | 93 | private func configureMetal() { 94 | let defaultLibrary = device!.makeDefaultLibrary()! 95 | let pipelineDescriptor = MTLRenderPipelineDescriptor() 96 | pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm 97 | pipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "vertexPassThrough") 98 | pipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "fragmentPassThrough") 99 | 100 | let samplerDescriptor = MTLSamplerDescriptor() 101 | samplerDescriptor.sAddressMode = .clampToEdge 102 | samplerDescriptor.tAddressMode = .clampToEdge 103 | samplerDescriptor.minFilter = .linear 104 | samplerDescriptor.magFilter = .linear 105 | sampler = device!.makeSamplerState(descriptor: samplerDescriptor) 106 | 107 | do { 108 | renderPipelineState = try device!.makeRenderPipelineState(descriptor: pipelineDescriptor) 109 | } catch { 110 | fatalError("Unable to create preview Metal view pipeline state. (\(error))") 111 | } 112 | 113 | commandQueue = device!.makeCommandQueue() 114 | } 115 | 116 | private func createTextureCache() { 117 | var newTextureCache: CVMetalTextureCache? 118 | if CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device!, nil, &newTextureCache) == kCVReturnSuccess { 119 | textureCache = newTextureCache 120 | } else { 121 | assertionFailure("Unable to allocate texture cache") 122 | } 123 | } 124 | 125 | private func setupTransform(width: Int, height: Int, mirroring: Bool, rotation: Rotation) { 126 | var scaleX: Float = 1.0 127 | var scaleY: Float = 1.0 128 | var resizeAspect: Float = 1.0 129 | 130 | internalBounds = self.bounds 131 | textureWidth = width 132 | textureHeight = height 133 | textureMirroring = mirroring 134 | textureRotation = rotation 135 | 136 | if textureWidth > 0 && textureHeight > 0 { 137 | switch textureRotation { 138 | case .rotate0Degrees, .rotate180Degrees: 139 | scaleX = Float(internalBounds.width / CGFloat(textureWidth)) 140 | scaleY = Float(internalBounds.height / CGFloat(textureHeight)) 141 | case .rotate90Degrees, .rotate270Degrees: 142 | scaleX = Float(internalBounds.width / CGFloat(textureHeight)) 143 | scaleY = Float(internalBounds.height / CGFloat(textureWidth)) 144 | } 145 | } 146 | 147 | resizeAspect = min(scaleX, scaleY) 148 | if scaleX < scaleY { 149 | scaleY = scaleX / scaleY 150 | scaleX = 1.0 151 | } else { 152 | scaleX = scaleY / scaleX 153 | scaleY = 1.0 154 | } 155 | 156 | if textureMirroring { 157 | scaleX *= -1.0 158 | } 159 | 160 | let vertexData: [Float] = [-scaleX, -scaleY, 0.0, 1.0, 161 | scaleX, -scaleY, 0.0, 1.0, 162 | -scaleX, scaleY, 0.0, 1.0, 163 | scaleX, scaleY, 0.0, 1.0] 164 | vertexCoordBuffer = device!.makeBuffer(bytes: vertexData, length: vertexData.count * MemoryLayout.size, options: []) 165 | 166 | var textData: [Float] 167 | switch textureRotation { 168 | case .rotate0Degrees: 169 | textData = [0.0, 1.0, 170 | 1.0, 1.0, 171 | 0.0, 0.0, 172 | 1.0, 0.0] 173 | 174 | case .rotate180Degrees: 175 | textData = [1.0, 0.0, 176 | 0.0, 0.0, 177 | 1.0, 1.0, 178 | 0.0, 1.0] 179 | 180 | case .rotate90Degrees: 181 | textData = [1.0, 1.0, 182 | 1.0, 0.0, 183 | 0.0, 1.0, 184 | 0.0, 0.0] 185 | 186 | case .rotate270Degrees: 187 | textData = [0.0, 0.0, 188 | 0.0, 1.0, 189 | 1.0, 0.0, 190 | 1.0, 1.0] 191 | } 192 | textCoordBuffer = device?.makeBuffer(bytes: textData, length: textData.count * MemoryLayout.size, options: []) 193 | 194 | var transform = CGAffineTransform.identity 195 | if textureMirroring { 196 | transform = transform.concatenating(CGAffineTransform(scaleX: -1, y: 1)) 197 | transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(textureWidth), y: 0)) 198 | } 199 | 200 | switch textureRotation { 201 | case .rotate0Degrees: 202 | transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(0))) 203 | 204 | case .rotate180Degrees: 205 | transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(Double.pi))) 206 | transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(textureWidth), y: CGFloat(textureHeight))) 207 | 208 | case .rotate90Degrees: 209 | transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(Double.pi) / 2)) 210 | transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(textureHeight), y: 0)) 211 | 212 | case .rotate270Degrees: 213 | transform = transform.concatenating(CGAffineTransform(rotationAngle: 3 * CGFloat(Double.pi) / 2)) 214 | transform = transform.concatenating(CGAffineTransform(translationX: 0, y: CGFloat(textureWidth))) 215 | } 216 | 217 | transform = transform.concatenating(CGAffineTransform(scaleX: CGFloat(resizeAspect), y: CGFloat(resizeAspect))) 218 | let tranformRect = CGRect(origin: .zero, size: CGSize(width: textureWidth, height: textureHeight)).applying(transform) 219 | let tx = (internalBounds.size.width - tranformRect.size.width) / 2 220 | let ty = (internalBounds.size.height - tranformRect.size.height) / 2 221 | transform = transform.concatenating(CGAffineTransform(translationX: tx, y: ty)) 222 | textureTranform = transform.inverted() 223 | } 224 | 225 | override func draw(_ rect: CGRect) { 226 | var pixelBuffer: CVPixelBuffer? 227 | var mirroring = false 228 | var rotation: Rotation = .rotate0Degrees 229 | 230 | syncQueue.sync { 231 | pixelBuffer = internalPixelBuffer 232 | mirroring = internalMirroring 233 | rotation = internalRotation 234 | } 235 | 236 | guard let drawable = currentDrawable, 237 | let currentRenderPassDescriptor = currentRenderPassDescriptor, 238 | let previewPixelBuffer = pixelBuffer else { 239 | return 240 | } 241 | 242 | let width = CVPixelBufferGetWidth(previewPixelBuffer) 243 | let height = CVPixelBufferGetHeight(previewPixelBuffer) 244 | 245 | if textureCache == nil { 246 | createTextureCache() 247 | } 248 | var cvTextureOut: CVMetalTexture? 249 | CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 250 | textureCache!, 251 | previewPixelBuffer, 252 | nil, 253 | .bgra8Unorm, 254 | width, 255 | height, 256 | 0, 257 | &cvTextureOut) 258 | guard let cvTexture = cvTextureOut, let texture = CVMetalTextureGetTexture(cvTexture) else { 259 | print("Failed to create preview texture") 260 | CVMetalTextureCacheFlush(textureCache!, 0) 261 | return 262 | } 263 | 264 | if texture.width != textureWidth || 265 | texture.height != textureHeight || 266 | mirroring != textureMirroring || 267 | rotation != textureRotation || 268 | self.bounds != internalBounds { 269 | setupTransform(width: texture.width, height: texture.height, mirroring: mirroring, rotation: rotation) 270 | } 271 | 272 | guard let commandQueue = commandQueue else { 273 | print("Failed to create Metal command queue") 274 | CVMetalTextureCacheFlush(textureCache!, 0) 275 | return 276 | } 277 | 278 | guard let commandBuffer = commandQueue.makeCommandBuffer() else { 279 | print("Failed to create Metal command buffer") 280 | CVMetalTextureCacheFlush(textureCache!, 0) 281 | return 282 | } 283 | 284 | guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else { 285 | print("Failed to create Metal command encoder") 286 | CVMetalTextureCacheFlush(textureCache!, 0) 287 | return 288 | } 289 | 290 | commandEncoder.label = "Preview display" 291 | commandEncoder.setRenderPipelineState(renderPipelineState!) 292 | commandEncoder.setVertexBuffer(vertexCoordBuffer, offset: 0, index: 0) 293 | commandEncoder.setVertexBuffer(textCoordBuffer, offset: 0, index: 1) 294 | commandEncoder.setFragmentTexture(texture, index: 0) 295 | commandEncoder.setFragmentSamplerState(sampler, index: 0) 296 | commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4) 297 | commandEncoder.endEncoding() 298 | commandBuffer.present(drawable) 299 | commandBuffer.commit() 300 | } 301 | 302 | func flushTextureCache() { 303 | textureCache = nil 304 | } 305 | 306 | func texturePointForView(point: CGPoint) -> CGPoint? { 307 | var result: CGPoint? 308 | guard let transform = textureTranform else { 309 | return result 310 | } 311 | let transformPoint = point.applying(transform) 312 | 313 | if CGRect(origin: .zero, size: CGSize(width: textureWidth, height: textureHeight)).contains(transformPoint) { 314 | result = transformPoint 315 | } else { 316 | print("Invalid point \(point) result point \(transformPoint)") 317 | } 318 | return result 319 | } 320 | 321 | func viewPointForTexture(point: CGPoint) -> CGPoint? { 322 | var result: CGPoint? 323 | guard let transform = textureTranform?.inverted() else { 324 | return result 325 | } 326 | let transformPoint = point.applying(transform) 327 | 328 | if internalBounds.contains(transformPoint) { 329 | result = transformPoint 330 | } else { 331 | print("Invalid point \(point) result point \(transformPoint)") 332 | } 333 | return result 334 | } 335 | } 336 | 337 | extension CCPreviewView.Rotation { 338 | init?(with interfaceOrientation: UIInterfaceOrientation, videoOrientation: AVCaptureVideoOrientation, cameraPosition: AVCaptureDevice.Position) { 339 | switch videoOrientation { 340 | case .portrait: 341 | switch interfaceOrientation { 342 | case .landscapeRight: 343 | if cameraPosition == .front { 344 | self = .rotate90Degrees 345 | } else { 346 | self = .rotate270Degrees 347 | } 348 | case .landscapeLeft: 349 | if cameraPosition == .front { 350 | self = .rotate270Degrees 351 | } else { 352 | self = .rotate90Degrees 353 | } 354 | case .portrait: 355 | self = .rotate0Degrees 356 | case .portraitUpsideDown: 357 | self = .rotate180Degrees 358 | default: return nil 359 | } 360 | case .portraitUpsideDown: 361 | switch interfaceOrientation { 362 | case .landscapeRight: 363 | if cameraPosition == .front { 364 | self = .rotate270Degrees 365 | } else { 366 | self = .rotate90Degrees 367 | } 368 | case .landscapeLeft: 369 | if cameraPosition == .front { 370 | self = .rotate90Degrees 371 | } else { 372 | self = .rotate270Degrees 373 | } 374 | case .portrait: 375 | self = .rotate180Degrees 376 | case .portraitUpsideDown: 377 | self = .rotate0Degrees 378 | default: return nil 379 | } 380 | case .landscapeRight: 381 | switch interfaceOrientation { 382 | case .landscapeRight: 383 | self = .rotate0Degrees 384 | case .landscapeLeft: 385 | self = .rotate180Degrees 386 | case .portrait: 387 | if cameraPosition == .front { 388 | self = .rotate270Degrees 389 | } else { 390 | self = .rotate90Degrees 391 | } 392 | case .portraitUpsideDown: 393 | if cameraPosition == .front { 394 | self = .rotate90Degrees 395 | } else { 396 | self = .rotate270Degrees 397 | } 398 | default: return nil 399 | } 400 | case .landscapeLeft: 401 | switch interfaceOrientation { 402 | case .landscapeLeft: 403 | self = .rotate0Degrees 404 | case .landscapeRight: 405 | self = .rotate180Degrees 406 | case .portrait: 407 | if cameraPosition == .front { 408 | self = .rotate90Degrees 409 | } else { 410 | self = .rotate270Degrees 411 | } 412 | case .portraitUpsideDown: 413 | if cameraPosition == .front { 414 | self = .rotate270Degrees 415 | } else { 416 | self = .rotate90Degrees 417 | } 418 | default: return nil 419 | } 420 | } 421 | } 422 | } 423 | 424 | 425 | -------------------------------------------------------------------------------- /CCCamera/CCTools.h: -------------------------------------------------------------------------------- 1 | // 2 | // CCTools.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface CCTools : NSObject 12 | 13 | + (void)createGIFfromURL:(NSURL*)videoURL loopCount:(int)loopCount completion:(void(^)(NSURL *GifURL))completionBlock; 14 | 15 | + (void)createGIFfromURL:(NSURL*)videoURL frameCount:(int)frameCount delayTime:(float)delayTime loopCount:(int)loopCount completion:(void(^)(NSURL *GifURL))completionBlock; 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /CCCamera/CCTools.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCTools.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCTools.h" 10 | #import 11 | #import 12 | #import 13 | #import 14 | 15 | #define gifFileName @"gifName.gif" 16 | #define timeInterval @(600) 17 | #define tolerance @(0.01) 18 | 19 | typedef NS_ENUM(NSInteger, GIFSize) { 20 | GIFSizeVeryLow = 2, 21 | GIFSizeLow = 3, 22 | GIFSizeMedium = 5, 23 | GIFSizeHigh = 7, 24 | GIFSizeOriginal = 10 25 | }; 26 | 27 | @implementation CCTools 28 | 29 | + (void)createGIFfromURL:(NSURL*)videoURL loopCount:(int)loopCount completion:(void(^)(NSURL *GifURL))completionBlock{ 30 | // 大小 31 | AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL]; 32 | float videoWidth = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].width; 33 | float videoHeight = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].height; 34 | GIFSize optimalSize = GIFSizeMedium; 35 | if (videoWidth >= 1200 || videoHeight >= 1200){ 36 | optimalSize = GIFSizeVeryLow; 37 | } 38 | else if (videoWidth >= 800 || videoHeight >= 800){ 39 | optimalSize = GIFSizeLow; 40 | } 41 | else if (videoWidth >= 400 || videoHeight >= 400){ 42 | optimalSize = GIFSizeMedium; 43 | } 44 | else if (videoWidth < 400|| videoHeight < 400){ 45 | optimalSize = GIFSizeHigh; 46 | } 47 | 48 | // 每秒取贞的时间点 49 | float videoLength = (float)asset.duration.value/asset.duration.timescale; 50 | int framesPerSecond = 4; 51 | int frameCount = videoLength * framesPerSecond; 52 | float increment = (float)videoLength / frameCount; 53 | NSMutableArray *timePoints = [NSMutableArray array]; 54 | for (int currentFrame = 0; currentFrame < frameCount; ++currentFrame) { 55 | float seconds = (float)increment * currentFrame; 56 | CMTime time = CMTimeMakeWithSeconds(seconds, [timeInterval intValue]); 57 | [timePoints addObject:[NSValue valueWithCMTime:time]]; 58 | } 59 | 60 | // 循环属性 61 | NSDictionary *fileProperties = [self filePropertiesWithLoopCount:loopCount]; 62 | 63 | // 延迟属性 64 | float delayTime = 0.1f; 65 | NSDictionary *frameProperties = [self framePropertiesWithDelayTime:delayTime]; 66 | 67 | dispatch_group_t gifQueue = dispatch_group_create(); 68 | dispatch_group_enter(gifQueue); 69 | __block NSURL *gifURL; 70 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 71 | gifURL = [self createGIFforTimePoints:timePoints fromURL:videoURL fileProperties:fileProperties frameProperties:frameProperties frameCount:frameCount gifSize:optimalSize]; 72 | dispatch_group_leave(gifQueue); 73 | }); 74 | 75 | dispatch_group_notify(gifQueue, dispatch_get_main_queue(), ^{ 76 | completionBlock(gifURL); 77 | }); 78 | } 79 | 80 | + (void)createGIFfromURL:(NSURL*)videoURL frameCount:(int)frameCount delayTime:(float)delayTime loopCount:(int)loopCount completion:(void(^)(NSURL *GifURL))completionBlock{ 81 | // 循环属性 82 | NSDictionary *fileProperties = [self filePropertiesWithLoopCount:loopCount]; 83 | 84 | // 延迟属性 85 | NSDictionary *frameProperties = [self framePropertiesWithDelayTime:delayTime]; 86 | 87 | // 大小 88 | AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL]; 89 | float videoLength = (float)asset.duration.value/asset.duration.timescale; 90 | float increment = (float)videoLength/frameCount; 91 | NSMutableArray *timePoints = [NSMutableArray array]; 92 | for (int currentFrame = 0; currentFrame 10 | #import 11 | 12 | @interface CCVideoPreview : UIView 13 | 14 | @property (strong, nonatomic) AVCaptureSession *captureSessionsion; 15 | 16 | - (CGPoint)captureDevicePointForPoint:(CGPoint)point; 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /CCCamera/CCVideoPreview.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCVideoPreview.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCVideoPreview.h" 10 | 11 | @implementation CCVideoPreview 12 | 13 | - (instancetype)initWithFrame:(CGRect)frame 14 | { 15 | self = [super initWithFrame:frame]; 16 | if (self) { 17 | [(AVCaptureVideoPreviewLayer *)self.layer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 18 | } 19 | return self; 20 | } 21 | 22 | - (AVCaptureSession*)captureSessionsion { 23 | return [(AVCaptureVideoPreviewLayer*)self.layer session]; 24 | } 25 | 26 | - (void)setCaptureSessionsion:(AVCaptureSession *)session { 27 | [(AVCaptureVideoPreviewLayer*)self.layer setSession:session]; 28 | } 29 | 30 | - (CGPoint)captureDevicePointForPoint:(CGPoint)point { 31 | AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer *)self.layer; 32 | return [layer captureDevicePointOfInterestForPoint:point]; 33 | } 34 | 35 | // 使该view的layer方法返回AVCaptureVideoPreviewLayer对象 36 | + (Class)layerClass { 37 | return [AVCaptureVideoPreviewLayer class]; 38 | } 39 | 40 | @end 41 | -------------------------------------------------------------------------------- /CCCamera/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | NSCameraUsageDescription 26 | CCCamera将访问你的相机 27 | NSMicrophoneUsageDescription 28 | CCCamera将访问你的麦克风 29 | NSPhotoLibraryUsageDescription 30 | CCCamera将访问你的相册 31 | UILaunchStoryboardName 32 | LaunchScreen 33 | UIMainStoryboardFile 34 | Main 35 | UIRequiredDeviceCapabilities 36 | 37 | armv7 38 | 39 | UISupportedInterfaceOrientations 40 | 41 | UIInterfaceOrientationPortrait 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /CCCamera/PassThrough.metal: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE.txt for this sample’s licensing information. 3 | 4 | Abstract: 5 | Pass-through shader (used for preview). 6 | */ 7 | 8 | #include 9 | using namespace metal; 10 | 11 | struct VertexIO 12 | { 13 | float4 position [[position]]; 14 | float2 textureCoord [[user(texturecoord)]]; 15 | }; 16 | 17 | vertex VertexIO vertexPassThrough(device packed_float4 *pPosition [[ buffer(0) ]], 18 | device packed_float2 *pTexCoords [[ buffer(1) ]], 19 | uint vid [[ vertex_id ]]) 20 | { 21 | VertexIO outVertex; 22 | outVertex.position = pPosition[vid]; 23 | outVertex.textureCoord = pTexCoords[vid]; 24 | return outVertex; 25 | } 26 | 27 | fragment half4 fragmentPassThrough(VertexIO inputFragment [[ stage_in ]], 28 | texture2d inputTexture [[ texture(0) ]], 29 | sampler samplr [[ sampler(0) ]]) 30 | { 31 | return inputTexture.sample(samplr, inputFragment.textureCoord); 32 | } 33 | -------------------------------------------------------------------------------- /CCCamera/UIView+CCAdditions.h: -------------------------------------------------------------------------------- 1 | // 2 | // UIView+CCAdditions.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface UIView (CCAdditions) 12 | 13 | @property (nonatomic) CGFloat top; 14 | @property (nonatomic) CGFloat left; 15 | @property (nonatomic) CGFloat right; 16 | @property (nonatomic) CGFloat bottom; 17 | 18 | @property (nonatomic) CGFloat width; 19 | @property (nonatomic) CGFloat height; 20 | 21 | @property (nonatomic) CGFloat centerX; 22 | @property (nonatomic) CGFloat centerY; 23 | 24 | @property (nonatomic) CGPoint origin; 25 | @property (nonatomic) CGSize size; 26 | 27 | - (UIViewController *)viewController; 28 | 29 | @end 30 | -------------------------------------------------------------------------------- /CCCamera/UIView+CCAdditions.m: -------------------------------------------------------------------------------- 1 | // 2 | // UIView+CCAdditions.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "UIView+CCAdditions.h" 10 | 11 | @implementation UIView (CCAdditions) 12 | 13 | - (CGFloat)top { 14 | return self.frame.origin.y; 15 | } 16 | 17 | - (void)setTop:(CGFloat)y { 18 | CGRect frame = self.frame; 19 | frame.origin.y = y; 20 | self.frame = frame; 21 | } 22 | 23 | - (CGFloat)left { 24 | return self.frame.origin.x; 25 | } 26 | 27 | - (void)setLeft:(CGFloat)x { 28 | CGRect frame = self.frame; 29 | frame.origin.x = x; 30 | self.frame = frame; 31 | } 32 | 33 | - (CGFloat)right { 34 | return self.frame.origin.x + self.frame.size.width; 35 | } 36 | 37 | - (void)setRight:(CGFloat)right { 38 | CGRect frame = self.frame; 39 | frame.origin.x = right - frame.size.width; 40 | self.frame = frame; 41 | } 42 | 43 | - (CGFloat)bottom { 44 | return self.frame.origin.y + self.frame.size.height; 45 | } 46 | 47 | - (void)setBottom:(CGFloat)bottom { 48 | CGRect frame = self.frame; 49 | frame.origin.y = bottom - frame.size.height; 50 | self.frame = frame; 51 | } 52 | 53 | - (CGFloat)centerX { 54 | return self.center.x; 55 | } 56 | 57 | - (void)setCenterX:(CGFloat)centerX { 58 | self.center = CGPointMake(centerX, self.center.y); 59 | } 60 | 61 | - (CGFloat)centerY { 62 | return self.center.y; 63 | } 64 | 65 | - (void)setCenterY:(CGFloat)centerY { 66 | self.center = CGPointMake(self.center.x, centerY); 67 | } 68 | 69 | - (CGFloat)width { 70 | return self.frame.size.width; 71 | } 72 | 73 | - (void)setWidth:(CGFloat)width { 74 | CGRect frame = self.frame; 75 | frame.size.width = width; 76 | self.frame = frame; 77 | } 78 | 79 | - (CGFloat)height { 80 | return self.frame.size.height; 81 | } 82 | 83 | - (void)setHeight:(CGFloat)height { 84 | CGRect frame = self.frame; 85 | frame.size.height = height; 86 | self.frame = frame; 87 | } 88 | 89 | - (CGPoint)origin { 90 | return self.frame.origin; 91 | } 92 | 93 | - (void)setOrigin:(CGPoint)origin { 94 | CGRect frame = self.frame; 95 | frame.origin = origin; 96 | self.frame = frame; 97 | } 98 | 99 | - (CGSize)size { 100 | return self.frame.size; 101 | } 102 | 103 | - (void)setSize:(CGSize)size { 104 | CGRect frame = self.frame; 105 | frame.size = size; 106 | self.frame = frame; 107 | } 108 | 109 | - (UIViewController *)viewController 110 | { 111 | if ([[self nextResponder] isKindOfClass:[UIViewController class]]) { 112 | return (UIViewController *)[self nextResponder]; 113 | } 114 | 115 | for (UIView* next = [self superview]; next; next = next.superview) 116 | { 117 | UIResponder *nextResponder = [next nextResponder]; 118 | if ([nextResponder isKindOfClass:[UIViewController class]]) 119 | { 120 | return (UIViewController *)nextResponder; 121 | } 122 | } 123 | return nil; 124 | } 125 | 126 | @end 127 | -------------------------------------------------------------------------------- /CCCamera/UIView+CCHUD.h: -------------------------------------------------------------------------------- 1 | // 2 | // UIView+CCHUD.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/24. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface UIView (CCHUD) 12 | 13 | @property(nonatomic, strong ,readonly)UIAlertController *alertController; 14 | 15 | // 加载框 16 | -(void)showHUD:(NSString *)message; // 没有菊花 17 | 18 | -(void)showLoadHUD:(NSString *)message; // 有菊花 19 | 20 | -(void)hideHUD; 21 | 22 | // 提示框 23 | -(void)showAutoDismissHUD:(NSString *)message; 24 | 25 | -(void)showAutoDismissHUD:(NSString *)message delay:(NSTimeInterval)delay; 26 | 27 | // 弹出框 28 | -(void)showError:(NSError *)error; 29 | 30 | -(void)showAlertView:(NSString *)message ok:(void(^)(UIAlertAction * action))ok cancel:(void(^)(UIAlertAction * action))cancel; 31 | 32 | @end 33 | -------------------------------------------------------------------------------- /CCCamera/UIView+CCHUD.m: -------------------------------------------------------------------------------- 1 | // 2 | // UIView+CCHUD.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/24. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "UIView+CCHUD.h" 10 | #import 11 | 12 | #define KEY_CC_ALERT_VIEW "UIView.AlertController" 13 | 14 | @implementation UIView (CCHUD) 15 | @dynamic alertController; 16 | 17 | -(UIAlertController *)alertController{ 18 | NSObject * obj = objc_getAssociatedObject(self, KEY_CC_ALERT_VIEW); 19 | if (obj && [obj isKindOfClass:[UIAlertController class]]){ 20 | return (UIAlertController *)obj; 21 | } 22 | return nil; 23 | } 24 | 25 | -(void)setAlertController:(UIAlertController *)alertController 26 | { 27 | if (nil == alertController){ return; } 28 | objc_setAssociatedObject(self, KEY_CC_ALERT_VIEW, alertController, OBJC_ASSOCIATION_RETAIN_NONATOMIC); 29 | } 30 | 31 | #pragma mark - 加载框 32 | -(void)showHUD:(NSString *)message{ 33 | [self showHUD:message isLoad:NO]; 34 | } 35 | 36 | -(void)showLoadHUD:(NSString *)message{ 37 | [self showHUD:message isLoad:YES]; 38 | } 39 | 40 | -(void)showHUD:(NSString *)message isLoad:(BOOL)isLoad{ 41 | UIAlertController *alertController = [self getAVC]; 42 | alertController.message = [NSString stringWithFormat:@"\n\n\n%@", message]; 43 | if (isLoad) { 44 | [self findLabel:alertController.view succ:^(UIView *label) { 45 | dispatch_async(dispatch_get_main_queue(), ^{ 46 | UIActivityIndicatorView *activityView = [[UIActivityIndicatorView alloc]initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge]; 47 | activityView.color = [UIColor lightGrayColor]; 48 | activityView.center = CGPointMake(label.width/2, 25); 49 | [label addSubview:activityView]; 50 | [activityView startAnimating]; 51 | }); 52 | }]; 53 | } 54 | [self.viewController presentViewController:alertController animated:YES completion:nil]; 55 | } 56 | 57 | #pragma mark - 提示框 58 | -(void)showAutoDismissHUD:(NSString *)message{ 59 | [self showAutoDismissHUD:message delay:0.3]; 60 | } 61 | 62 | -(void)showAutoDismissHUD:(NSString *)message delay:(NSTimeInterval)delay{ 63 | UIAlertController *alertController = [self getAVC]; 64 | alertController.message = message; 65 | [self.viewController presentViewController:alertController animated:YES completion:nil]; 66 | [NSTimer scheduledTimerWithTimeInterval:delay 67 | target:self 68 | selector:@selector(hideHUD) 69 | userInfo:alertController 70 | repeats:NO]; 71 | } 72 | 73 | -(void)hideHUD{ 74 | [[self getAVC] dismissViewControllerAnimated:YES completion:nil]; 75 | } 76 | 77 | #pragma mark - 弹出框 78 | - (void)showError:(NSError *)error{ 79 | dispatch_async(dispatch_get_main_queue(), ^{ 80 | [self showAlertView:error.localizedDescription ok:^(UIAlertAction *action) { 81 | 82 | } cancel:nil]; 83 | }); 84 | } 85 | 86 | -(void)showAlertView:(NSString *)message ok:(void(^)(UIAlertAction * action))ok cancel:(void(^)(UIAlertAction * action))cancel{ 87 | UIAlertController *alertController = [UIAlertController alertControllerWithTitle:nil 88 | message:message 89 | preferredStyle:UIAlertControllerStyleAlert]; 90 | if (cancel) { 91 | UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:@"取消" style:UIAlertActionStyleCancel handler:^(UIAlertAction * _Nonnull action) { 92 | !cancel ? : cancel(action) ; 93 | }]; 94 | [alertController addAction:cancelAction]; 95 | } 96 | if (ok) { 97 | UIAlertAction *okAction = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) { 98 | !ok ? : ok(action) ; 99 | }]; 100 | [alertController addAction:okAction]; 101 | } 102 | [self.viewController presentViewController:alertController animated:YES completion:nil]; 103 | } 104 | 105 | #pragma mark - Private methods 106 | -(void)findLabel:(UIView*)view succ:(void(^)(UIView *label))succ{ 107 | for (UIView* subView in view.subviews) 108 | { 109 | if ([subView isKindOfClass:[UILabel class]]) { 110 | if (succ) { 111 | succ(subView); 112 | } 113 | } 114 | [self findLabel:subView succ:succ]; 115 | } 116 | } 117 | 118 | -(UIAlertController *)getAVC{ 119 | if (!self.alertController) { 120 | self.alertController = [UIAlertController alertControllerWithTitle:nil 121 | message:@"" 122 | preferredStyle:UIAlertControllerStyleAlert]; 123 | } 124 | return self.alertController; 125 | } 126 | 127 | @end 128 | -------------------------------------------------------------------------------- /CCCamera/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "CCBaseViewController.h" 10 | 11 | @interface ViewController : CCBaseViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /CCCamera/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | #import 11 | 12 | @interface ViewController () 13 | 14 | @property(nonatomic, strong)UITableView *tableView; 15 | @property(nonatomic, strong)NSArray *dataSource; 16 | 17 | @end 18 | 19 | @implementation ViewController 20 | 21 | - (void)viewDidLoad { 22 | [super viewDidLoad]; 23 | self.title = @"CCCamera"; 24 | self.dataSource = @[@"相机.CCCameraViewController", 25 | @"滤镜.CCFilterViewController.swift", 26 | @"OpenGL ES.CCGLRenderCameraViewController"]; 27 | [self.view addSubview:self.tableView]; 28 | [[UITableViewHeaderFooterView appearance] setTintColor:UIColor(0xebf5ff, 1)]; 29 | } 30 | 31 | - (UITableView *)tableView{ 32 | if (_tableView == nil) { 33 | _tableView = [[UITableView alloc]initWithFrame:self.view.bounds]; 34 | _tableView.delegate = self; 35 | _tableView.dataSource = self; 36 | _tableView.tableFooterView = [[UIView alloc]initWithFrame:CGRectZero]; 37 | _tableView.backgroundColor = [UIColor clearColor]; 38 | } 39 | return _tableView; 40 | } 41 | 42 | #pragma mark - UITableView DataSource UITableView Delegate 43 | - (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView 44 | { 45 | return _dataSource.count; 46 | } 47 | 48 | - (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{ 49 | return 1; 50 | } 51 | 52 | - (CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSInteger)section 53 | { 54 | return 15; 55 | } 56 | 57 | - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{ 58 | static NSString* identifier = @"cameraCell"; 59 | UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:identifier]; 60 | if (cell == nil) { 61 | cell = [[UITableViewCell alloc]initWithStyle:UITableViewCellStyleDefault reuseIdentifier:identifier]; 62 | cell.backgroundColor = [UIColor whiteColor]; 63 | cell.textLabel.font = [UIFont systemFontOfSize:20]; 64 | } 65 | cell.textLabel.text = [[_dataSource[indexPath.section] componentsSeparatedByString:@"."] firstObject]; 66 | return cell; 67 | } 68 | 69 | - (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath{ 70 | [tableView deselectRowAtIndexPath:indexPath animated:YES]; 71 | NSArray *names = [_dataSource[indexPath.section] componentsSeparatedByString:@"."]; 72 | NSString *name = [names.lastObject isEqualToString:@"swift"] ? [NSString stringWithFormat:@"CCCamera.%@", names[1]] : names.lastObject; 73 | const char *className = [name UTF8String]; 74 | Class pushClass = objc_getClass(className); 75 | if (object_isClass(pushClass)) { 76 | id vc = [[pushClass alloc]init]; 77 | [self.navigationController pushViewController:vc animated:YES]; 78 | } 79 | } 80 | 81 | - (void)didReceiveMemoryWarning { 82 | [super didReceiveMemoryWarning]; 83 | // Dispose of any resources that can be recreated. 84 | } 85 | 86 | @end 87 | -------------------------------------------------------------------------------- /CCCamera/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // CCCamera 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /CCCameraTests/CCCameraTests.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCCameraTests.m 3 | // CCCameraTests 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface CCCameraTests : XCTestCase 12 | 13 | @end 14 | 15 | @implementation CCCameraTests 16 | 17 | - (void)setUp { 18 | [super setUp]; 19 | // Put setup code here. This method is called before the invocation of each test method in the class. 20 | } 21 | 22 | - (void)tearDown { 23 | // Put teardown code here. This method is called after the invocation of each test method in the class. 24 | [super tearDown]; 25 | } 26 | 27 | - (void)testExample { 28 | // This is an example of a functional test case. 29 | // Use XCTAssert and related functions to verify your tests produce the correct results. 30 | } 31 | 32 | - (void)testPerformanceExample { 33 | // This is an example of a performance test case. 34 | [self measureBlock:^{ 35 | // Put the code you want to measure the time of here. 36 | }]; 37 | } 38 | 39 | @end 40 | -------------------------------------------------------------------------------- /CCCameraTests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | 24 | 25 | -------------------------------------------------------------------------------- /CCCameraUITests/CCCameraUITests.m: -------------------------------------------------------------------------------- 1 | // 2 | // CCCameraUITests.m 3 | // CCCameraUITests 4 | // 5 | // Created by wsk on 16/8/22. 6 | // Copyright © 2016年 cyd. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface CCCameraUITests : XCTestCase 12 | 13 | @end 14 | 15 | @implementation CCCameraUITests 16 | 17 | - (void)setUp { 18 | [super setUp]; 19 | 20 | // Put setup code here. This method is called before the invocation of each test method in the class. 21 | 22 | // In UI tests it is usually best to stop immediately when a failure occurs. 23 | self.continueAfterFailure = NO; 24 | // UI tests must launch the application that they test. Doing this in setup will make sure it happens for each test method. 25 | [[[XCUIApplication alloc] init] launch]; 26 | 27 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. 28 | } 29 | 30 | - (void)tearDown { 31 | // Put teardown code here. This method is called after the invocation of each test method in the class. 32 | [super tearDown]; 33 | } 34 | 35 | - (void)testExample { 36 | // Use recording to get started writing UI tests. 37 | // Use XCTAssert and related functions to verify your tests produce the correct results. 38 | } 39 | 40 | @end 41 | -------------------------------------------------------------------------------- /CCCameraUITests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | 24 | 25 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CCCamera --------------------------------------------------------------------------------