├── WeChatSightDemo.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ └── contents.xcworkspacedata └── xcuserdata │ └── wukong.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ ├── WeChatSightDemo.xcscheme │ └── xcschememanagement.plist ├── WeChatSightDemo ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ ├── Contents.json │ └── sight_video_focus.imageset │ │ ├── Contents.json │ │ ├── sight_video_focus@2x.png │ │ └── sight_video_focus@3x.png ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Category │ ├── UIImageView+PlayGIF.h │ └── UIImageView+PlayGIF.m ├── Controller │ ├── WCSCaptureViewController.h │ ├── WCSCaptureViewController.m │ ├── WCSPlayMovieController.h │ ├── WCSPlayMovieController.m │ ├── WCSPreviewViewController.h │ └── WCSPreviewViewController.m ├── Info.plist ├── View │ ├── WKScaleButton.h │ └── WKScaleButton.m ├── ViewController.h ├── ViewController.m ├── WKMovieRecorder │ ├── WKMovieRecorder.h │ ├── WKMovieRecorder.m │ ├── WKMovieWriter.h │ ├── WKMovieWriter.m │ ├── WKProgressView.h │ ├── WKProgressView.m │ ├── WKTestAudioOutput.h │ ├── WKTestAudioOutput.m │ ├── WKVideoConverter.h │ └── WKVideoConverter.m └── main.m ├── WeChatSightDemoTests ├── Info.plist └── WeChatSightDemoTests.m └── WeChatSightDemoUITests ├── Info.plist └── WeChatSightDemoUITests.m /WeChatSightDemo.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 679CA8A91D6FE3F300CA0EE4 /* WCSPreviewViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 679CA8A81D6FE3F300CA0EE4 /* WCSPreviewViewController.m */; }; 11 | 679CA8AD1D7019D400CA0EE4 /* UIImageView+PlayGIF.m in Sources */ = {isa = PBXBuildFile; fileRef = 679CA8AC1D7019D400CA0EE4 /* UIImageView+PlayGIF.m */; }; 12 | 679CA8B01D7023BA00CA0EE4 /* WCSPlayMovieController.m in Sources */ = {isa = PBXBuildFile; fileRef = 679CA8AF1D7023BA00CA0EE4 /* WCSPlayMovieController.m */; }; 13 | 67C63BC11D66FB8F001A53D4 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63BC01D66FB8F001A53D4 /* main.m */; }; 14 | 67C63BC41D66FB8F001A53D4 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63BC31D66FB8F001A53D4 /* AppDelegate.m */; }; 15 | 67C63BC71D66FB8F001A53D4 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63BC61D66FB8F001A53D4 /* ViewController.m */; }; 16 | 67C63BCA1D66FB8F001A53D4 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 67C63BC81D66FB8F001A53D4 /* Main.storyboard */; }; 17 | 67C63BCC1D66FB8F001A53D4 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 67C63BCB1D66FB8F001A53D4 /* Assets.xcassets */; }; 18 | 67C63BCF1D66FB8F001A53D4 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 67C63BCD1D66FB8F001A53D4 /* LaunchScreen.storyboard */; }; 19 | 67C63BDA1D66FB8F001A53D4 /* WeChatSightDemoTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63BD91D66FB8F001A53D4 /* WeChatSightDemoTests.m */; }; 20 | 67C63BE51D66FB8F001A53D4 /* WeChatSightDemoUITests.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63BE41D66FB8F001A53D4 /* WeChatSightDemoUITests.m */; }; 21 | 67C63BF91D66FC76001A53D4 /* WCSCaptureViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63BF81D66FC76001A53D4 /* WCSCaptureViewController.m */; }; 22 | 67C63C051D66FCA6001A53D4 /* WKMovieRecorder.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63BFC1D66FCA6001A53D4 /* WKMovieRecorder.m */; }; 23 | 67C63C061D66FCA6001A53D4 /* WKMovieWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63BFE1D66FCA6001A53D4 /* WKMovieWriter.m */; }; 24 | 67C63C071D66FCA6001A53D4 /* WKProgressView.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63C001D66FCA6001A53D4 /* WKProgressView.m */; }; 25 | 67C63C081D66FCA6001A53D4 /* WKTestAudioOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63C021D66FCA6001A53D4 /* WKTestAudioOutput.m */; }; 26 | 67C63C091D66FCA6001A53D4 /* WKVideoConverter.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63C041D66FCA6001A53D4 /* WKVideoConverter.m */; }; 27 | 67C63C0C1D6704D7001A53D4 /* WKScaleButton.m in Sources */ = {isa = PBXBuildFile; fileRef = 67C63C0B1D6704D7001A53D4 /* WKScaleButton.m */; }; 28 | /* End PBXBuildFile section */ 29 | 30 | /* Begin PBXContainerItemProxy section */ 31 | 67C63BD61D66FB8F001A53D4 /* PBXContainerItemProxy */ = { 32 | isa = PBXContainerItemProxy; 33 | containerPortal = 67C63BB41D66FB8E001A53D4 /* Project object */; 34 | proxyType = 1; 35 | remoteGlobalIDString = 67C63BBB1D66FB8E001A53D4; 36 | remoteInfo = WeChatSightDemo; 37 | }; 38 | 67C63BE11D66FB8F001A53D4 /* PBXContainerItemProxy */ = { 39 | isa = PBXContainerItemProxy; 40 | containerPortal = 67C63BB41D66FB8E001A53D4 /* Project object */; 41 | proxyType = 1; 42 | remoteGlobalIDString = 67C63BBB1D66FB8E001A53D4; 43 | remoteInfo = WeChatSightDemo; 44 | }; 45 | /* End PBXContainerItemProxy section */ 46 | 47 | /* Begin PBXFileReference section */ 48 | 679CA8A71D6FE3F300CA0EE4 /* WCSPreviewViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WCSPreviewViewController.h; sourceTree = ""; }; 49 | 679CA8A81D6FE3F300CA0EE4 /* WCSPreviewViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WCSPreviewViewController.m; sourceTree = ""; }; 50 | 679CA8AB1D7019D400CA0EE4 /* UIImageView+PlayGIF.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIImageView+PlayGIF.h"; sourceTree = ""; }; 51 | 679CA8AC1D7019D400CA0EE4 /* UIImageView+PlayGIF.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIImageView+PlayGIF.m"; sourceTree = ""; }; 52 | 679CA8AE1D7023BA00CA0EE4 /* WCSPlayMovieController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WCSPlayMovieController.h; sourceTree = ""; }; 53 | 679CA8AF1D7023BA00CA0EE4 /* WCSPlayMovieController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WCSPlayMovieController.m; sourceTree = ""; }; 54 | 67C63BBC1D66FB8E001A53D4 /* WeChatSightDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = WeChatSightDemo.app; sourceTree = BUILT_PRODUCTS_DIR; }; 55 | 67C63BC01D66FB8F001A53D4 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 56 | 67C63BC21D66FB8F001A53D4 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 57 | 67C63BC31D66FB8F001A53D4 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 58 | 67C63BC51D66FB8F001A53D4 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 59 | 67C63BC61D66FB8F001A53D4 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 60 | 67C63BC91D66FB8F001A53D4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 61 | 67C63BCB1D66FB8F001A53D4 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 62 | 67C63BCE1D66FB8F001A53D4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 63 | 67C63BD01D66FB8F001A53D4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 64 | 67C63BD51D66FB8F001A53D4 /* WeChatSightDemoTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = WeChatSightDemoTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 65 | 67C63BD91D66FB8F001A53D4 /* WeChatSightDemoTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = WeChatSightDemoTests.m; sourceTree = ""; }; 66 | 67C63BDB1D66FB8F001A53D4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 67 | 67C63BE01D66FB8F001A53D4 /* WeChatSightDemoUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = WeChatSightDemoUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 68 | 67C63BE41D66FB8F001A53D4 /* WeChatSightDemoUITests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = WeChatSightDemoUITests.m; sourceTree = ""; }; 69 | 67C63BE61D66FB8F001A53D4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 70 | 67C63BF71D66FC76001A53D4 /* WCSCaptureViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WCSCaptureViewController.h; sourceTree = ""; }; 71 | 67C63BF81D66FC76001A53D4 /* WCSCaptureViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WCSCaptureViewController.m; sourceTree = ""; }; 72 | 67C63BFB1D66FCA6001A53D4 /* WKMovieRecorder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WKMovieRecorder.h; sourceTree = ""; }; 73 | 67C63BFC1D66FCA6001A53D4 /* WKMovieRecorder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WKMovieRecorder.m; sourceTree = ""; }; 74 | 67C63BFD1D66FCA6001A53D4 /* WKMovieWriter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WKMovieWriter.h; sourceTree = ""; }; 75 | 67C63BFE1D66FCA6001A53D4 /* WKMovieWriter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WKMovieWriter.m; sourceTree = ""; }; 76 | 67C63BFF1D66FCA6001A53D4 /* WKProgressView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WKProgressView.h; sourceTree = ""; }; 77 | 67C63C001D66FCA6001A53D4 /* WKProgressView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WKProgressView.m; sourceTree = ""; }; 78 | 67C63C011D66FCA6001A53D4 /* WKTestAudioOutput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WKTestAudioOutput.h; sourceTree = ""; }; 79 | 67C63C021D66FCA6001A53D4 /* WKTestAudioOutput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WKTestAudioOutput.m; sourceTree = ""; }; 80 | 67C63C031D66FCA6001A53D4 /* WKVideoConverter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WKVideoConverter.h; sourceTree = ""; }; 81 | 67C63C041D66FCA6001A53D4 /* WKVideoConverter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WKVideoConverter.m; sourceTree = ""; }; 82 | 67C63C0A1D6704D7001A53D4 /* WKScaleButton.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WKScaleButton.h; sourceTree = ""; }; 83 | 67C63C0B1D6704D7001A53D4 /* WKScaleButton.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WKScaleButton.m; sourceTree = ""; }; 84 | /* End PBXFileReference section */ 85 | 86 | /* Begin PBXFrameworksBuildPhase section */ 87 | 67C63BB91D66FB8E001A53D4 /* Frameworks */ = { 88 | isa = PBXFrameworksBuildPhase; 89 | buildActionMask = 2147483647; 90 | files = ( 91 | ); 92 | runOnlyForDeploymentPostprocessing = 0; 93 | }; 94 | 67C63BD21D66FB8F001A53D4 /* Frameworks */ = { 95 | isa = PBXFrameworksBuildPhase; 96 | buildActionMask = 2147483647; 97 | files = ( 98 | ); 99 | runOnlyForDeploymentPostprocessing = 0; 100 | }; 101 | 67C63BDD1D66FB8F001A53D4 /* Frameworks */ = { 102 | isa = PBXFrameworksBuildPhase; 103 | buildActionMask = 2147483647; 104 | files = ( 105 | ); 106 | runOnlyForDeploymentPostprocessing = 0; 107 | }; 108 | /* End PBXFrameworksBuildPhase section */ 109 | 110 | /* Begin PBXGroup section */ 111 | 679CA8AA1D7019D400CA0EE4 /* Category */ = { 112 | isa = PBXGroup; 113 | children = ( 114 | 679CA8AB1D7019D400CA0EE4 /* UIImageView+PlayGIF.h */, 115 | 679CA8AC1D7019D400CA0EE4 /* UIImageView+PlayGIF.m */, 116 | ); 117 | path = Category; 118 | sourceTree = ""; 119 | }; 120 | 67C63BB31D66FB8E001A53D4 = { 121 | isa = PBXGroup; 122 | children = ( 123 | 67C63BBE1D66FB8F001A53D4 /* WeChatSightDemo */, 124 | 67C63BD81D66FB8F001A53D4 /* WeChatSightDemoTests */, 125 | 67C63BE31D66FB8F001A53D4 /* WeChatSightDemoUITests */, 126 | 67C63BBD1D66FB8E001A53D4 /* Products */, 127 | ); 128 | sourceTree = ""; 129 | }; 130 | 67C63BBD1D66FB8E001A53D4 /* Products */ = { 131 | isa = PBXGroup; 132 | children = ( 133 | 67C63BBC1D66FB8E001A53D4 /* WeChatSightDemo.app */, 134 | 67C63BD51D66FB8F001A53D4 /* WeChatSightDemoTests.xctest */, 135 | 67C63BE01D66FB8F001A53D4 /* WeChatSightDemoUITests.xctest */, 136 | ); 137 | name = Products; 138 | sourceTree = ""; 139 | }; 140 | 67C63BBE1D66FB8F001A53D4 /* WeChatSightDemo */ = { 141 | isa = PBXGroup; 142 | children = ( 143 | 679CA8AA1D7019D400CA0EE4 /* Category */, 144 | 67C63BFA1D66FCA6001A53D4 /* WKMovieRecorder */, 145 | 67C63BF51D66FC64001A53D4 /* Controller */, 146 | 67C63BF61D66FC64001A53D4 /* View */, 147 | 67C63BC21D66FB8F001A53D4 /* AppDelegate.h */, 148 | 67C63BC31D66FB8F001A53D4 /* AppDelegate.m */, 149 | 67C63BC51D66FB8F001A53D4 /* ViewController.h */, 150 | 67C63BC61D66FB8F001A53D4 /* ViewController.m */, 151 | 67C63BC81D66FB8F001A53D4 /* Main.storyboard */, 152 | 67C63BCB1D66FB8F001A53D4 /* Assets.xcassets */, 153 | 67C63BCD1D66FB8F001A53D4 /* LaunchScreen.storyboard */, 154 | 67C63BD01D66FB8F001A53D4 /* Info.plist */, 155 | 67C63BBF1D66FB8F001A53D4 /* Supporting Files */, 156 | ); 157 | path = WeChatSightDemo; 158 | sourceTree = ""; 159 | }; 160 | 67C63BBF1D66FB8F001A53D4 /* Supporting Files */ = { 161 | isa = PBXGroup; 162 | children = ( 163 | 67C63BC01D66FB8F001A53D4 /* main.m */, 164 | ); 165 | name = "Supporting Files"; 166 | sourceTree = ""; 167 | }; 168 | 67C63BD81D66FB8F001A53D4 /* WeChatSightDemoTests */ = { 169 | isa = PBXGroup; 170 | children = ( 171 | 67C63BD91D66FB8F001A53D4 /* WeChatSightDemoTests.m */, 172 | 67C63BDB1D66FB8F001A53D4 /* Info.plist */, 173 | ); 174 | path = WeChatSightDemoTests; 175 | sourceTree = ""; 176 | }; 177 | 67C63BE31D66FB8F001A53D4 /* WeChatSightDemoUITests */ = { 178 | isa = PBXGroup; 179 | children = ( 180 | 67C63BE41D66FB8F001A53D4 /* WeChatSightDemoUITests.m */, 181 | 67C63BE61D66FB8F001A53D4 /* Info.plist */, 182 | ); 183 | path = WeChatSightDemoUITests; 184 | sourceTree = ""; 185 | }; 186 | 67C63BF51D66FC64001A53D4 /* Controller */ = { 187 | isa = PBXGroup; 188 | children = ( 189 | 67C63BF71D66FC76001A53D4 /* WCSCaptureViewController.h */, 190 | 67C63BF81D66FC76001A53D4 /* WCSCaptureViewController.m */, 191 | 679CA8A71D6FE3F300CA0EE4 /* WCSPreviewViewController.h */, 192 | 679CA8A81D6FE3F300CA0EE4 /* WCSPreviewViewController.m */, 193 | 679CA8AE1D7023BA00CA0EE4 /* WCSPlayMovieController.h */, 194 | 679CA8AF1D7023BA00CA0EE4 /* WCSPlayMovieController.m */, 195 | ); 196 | path = Controller; 197 | sourceTree = ""; 198 | }; 199 | 67C63BF61D66FC64001A53D4 /* View */ = { 200 | isa = PBXGroup; 201 | children = ( 202 | 67C63C0A1D6704D7001A53D4 /* WKScaleButton.h */, 203 | 67C63C0B1D6704D7001A53D4 /* WKScaleButton.m */, 204 | ); 205 | path = View; 206 | sourceTree = ""; 207 | }; 208 | 67C63BFA1D66FCA6001A53D4 /* WKMovieRecorder */ = { 209 | isa = PBXGroup; 210 | children = ( 211 | 67C63BFB1D66FCA6001A53D4 /* WKMovieRecorder.h */, 212 | 67C63BFC1D66FCA6001A53D4 /* WKMovieRecorder.m */, 213 | 67C63BFD1D66FCA6001A53D4 /* WKMovieWriter.h */, 214 | 67C63BFE1D66FCA6001A53D4 /* WKMovieWriter.m */, 215 | 67C63BFF1D66FCA6001A53D4 /* WKProgressView.h */, 216 | 67C63C001D66FCA6001A53D4 /* WKProgressView.m */, 217 | 67C63C011D66FCA6001A53D4 /* WKTestAudioOutput.h */, 218 | 67C63C021D66FCA6001A53D4 /* WKTestAudioOutput.m */, 219 | 67C63C031D66FCA6001A53D4 /* WKVideoConverter.h */, 220 | 67C63C041D66FCA6001A53D4 /* WKVideoConverter.m */, 221 | ); 222 | path = WKMovieRecorder; 223 | sourceTree = ""; 224 | }; 225 | /* End PBXGroup section */ 226 | 227 | /* Begin PBXNativeTarget section */ 228 | 67C63BBB1D66FB8E001A53D4 /* WeChatSightDemo */ = { 229 | isa = PBXNativeTarget; 230 | buildConfigurationList = 67C63BE91D66FB8F001A53D4 /* Build configuration list for PBXNativeTarget "WeChatSightDemo" */; 231 | buildPhases = ( 232 | 67C63BB81D66FB8E001A53D4 /* Sources */, 233 | 67C63BB91D66FB8E001A53D4 /* Frameworks */, 234 | 67C63BBA1D66FB8E001A53D4 /* Resources */, 235 | ); 236 | buildRules = ( 237 | ); 238 | dependencies = ( 239 | ); 240 | name = WeChatSightDemo; 241 | productName = WeChatSightDemo; 242 | productReference = 67C63BBC1D66FB8E001A53D4 /* WeChatSightDemo.app */; 243 | productType = "com.apple.product-type.application"; 244 | }; 245 | 67C63BD41D66FB8F001A53D4 /* WeChatSightDemoTests */ = { 246 | isa = PBXNativeTarget; 247 | buildConfigurationList = 67C63BEC1D66FB8F001A53D4 /* Build configuration list for PBXNativeTarget "WeChatSightDemoTests" */; 248 | buildPhases = ( 249 | 67C63BD11D66FB8F001A53D4 /* Sources */, 250 | 67C63BD21D66FB8F001A53D4 /* Frameworks */, 251 | 67C63BD31D66FB8F001A53D4 /* Resources */, 252 | ); 253 | buildRules = ( 254 | ); 255 | dependencies = ( 256 | 67C63BD71D66FB8F001A53D4 /* PBXTargetDependency */, 257 | ); 258 | name = WeChatSightDemoTests; 259 | productName = WeChatSightDemoTests; 260 | productReference = 67C63BD51D66FB8F001A53D4 /* WeChatSightDemoTests.xctest */; 261 | productType = "com.apple.product-type.bundle.unit-test"; 262 | }; 263 | 67C63BDF1D66FB8F001A53D4 /* WeChatSightDemoUITests */ = { 264 | isa = PBXNativeTarget; 265 | buildConfigurationList = 67C63BEF1D66FB8F001A53D4 /* Build configuration list for PBXNativeTarget "WeChatSightDemoUITests" */; 266 | buildPhases = ( 267 | 67C63BDC1D66FB8F001A53D4 /* Sources */, 268 | 67C63BDD1D66FB8F001A53D4 /* Frameworks */, 269 | 67C63BDE1D66FB8F001A53D4 /* Resources */, 270 | ); 271 | buildRules = ( 272 | ); 273 | dependencies = ( 274 | 67C63BE21D66FB8F001A53D4 /* PBXTargetDependency */, 275 | ); 276 | name = WeChatSightDemoUITests; 277 | productName = WeChatSightDemoUITests; 278 | productReference = 67C63BE01D66FB8F001A53D4 /* WeChatSightDemoUITests.xctest */; 279 | productType = "com.apple.product-type.bundle.ui-testing"; 280 | }; 281 | /* End PBXNativeTarget section */ 282 | 283 | /* Begin PBXProject section */ 284 | 67C63BB41D66FB8E001A53D4 /* Project object */ = { 285 | isa = PBXProject; 286 | attributes = { 287 | CLASSPREFIX = WCS; 288 | LastUpgradeCheck = 0730; 289 | ORGANIZATIONNAME = "吴珂"; 290 | TargetAttributes = { 291 | 67C63BBB1D66FB8E001A53D4 = { 292 | CreatedOnToolsVersion = 7.3; 293 | DevelopmentTeam = B4JPFKWLRH; 294 | }; 295 | 67C63BD41D66FB8F001A53D4 = { 296 | CreatedOnToolsVersion = 7.3; 297 | TestTargetID = 67C63BBB1D66FB8E001A53D4; 298 | }; 299 | 67C63BDF1D66FB8F001A53D4 = { 300 | CreatedOnToolsVersion = 7.3; 301 | TestTargetID = 67C63BBB1D66FB8E001A53D4; 302 | }; 303 | }; 304 | }; 305 | buildConfigurationList = 67C63BB71D66FB8E001A53D4 /* Build configuration list for PBXProject "WeChatSightDemo" */; 306 | compatibilityVersion = "Xcode 3.2"; 307 | developmentRegion = English; 308 | hasScannedForEncodings = 0; 309 | knownRegions = ( 310 | en, 311 | Base, 312 | ); 313 | mainGroup = 67C63BB31D66FB8E001A53D4; 314 | productRefGroup = 67C63BBD1D66FB8E001A53D4 /* Products */; 315 | projectDirPath = ""; 316 | projectRoot = ""; 317 | targets = ( 318 | 67C63BBB1D66FB8E001A53D4 /* WeChatSightDemo */, 319 | 67C63BD41D66FB8F001A53D4 /* WeChatSightDemoTests */, 320 | 67C63BDF1D66FB8F001A53D4 /* WeChatSightDemoUITests */, 321 | ); 322 | }; 323 | /* End PBXProject section */ 324 | 325 | /* Begin PBXResourcesBuildPhase section */ 326 | 67C63BBA1D66FB8E001A53D4 /* Resources */ = { 327 | isa = PBXResourcesBuildPhase; 328 | buildActionMask = 2147483647; 329 | files = ( 330 | 67C63BCF1D66FB8F001A53D4 /* LaunchScreen.storyboard in Resources */, 331 | 67C63BCC1D66FB8F001A53D4 /* Assets.xcassets in Resources */, 332 | 67C63BCA1D66FB8F001A53D4 /* Main.storyboard in Resources */, 333 | ); 334 | runOnlyForDeploymentPostprocessing = 0; 335 | }; 336 | 67C63BD31D66FB8F001A53D4 /* Resources */ = { 337 | isa = PBXResourcesBuildPhase; 338 | buildActionMask = 2147483647; 339 | files = ( 340 | ); 341 | runOnlyForDeploymentPostprocessing = 0; 342 | }; 343 | 67C63BDE1D66FB8F001A53D4 /* Resources */ = { 344 | isa = PBXResourcesBuildPhase; 345 | buildActionMask = 2147483647; 346 | files = ( 347 | ); 348 | runOnlyForDeploymentPostprocessing = 0; 349 | }; 350 | /* End PBXResourcesBuildPhase section */ 351 | 352 | /* Begin PBXSourcesBuildPhase section */ 353 | 67C63BB81D66FB8E001A53D4 /* Sources */ = { 354 | isa = PBXSourcesBuildPhase; 355 | buildActionMask = 2147483647; 356 | files = ( 357 | 67C63BC71D66FB8F001A53D4 /* ViewController.m in Sources */, 358 | 67C63BF91D66FC76001A53D4 /* WCSCaptureViewController.m in Sources */, 359 | 67C63BC41D66FB8F001A53D4 /* AppDelegate.m in Sources */, 360 | 67C63C081D66FCA6001A53D4 /* WKTestAudioOutput.m in Sources */, 361 | 679CA8A91D6FE3F300CA0EE4 /* WCSPreviewViewController.m in Sources */, 362 | 67C63C061D66FCA6001A53D4 /* WKMovieWriter.m in Sources */, 363 | 67C63C091D66FCA6001A53D4 /* WKVideoConverter.m in Sources */, 364 | 67C63C0C1D6704D7001A53D4 /* WKScaleButton.m in Sources */, 365 | 67C63BC11D66FB8F001A53D4 /* main.m in Sources */, 366 | 67C63C071D66FCA6001A53D4 /* WKProgressView.m in Sources */, 367 | 67C63C051D66FCA6001A53D4 /* WKMovieRecorder.m in Sources */, 368 | 679CA8B01D7023BA00CA0EE4 /* WCSPlayMovieController.m in Sources */, 369 | 679CA8AD1D7019D400CA0EE4 /* UIImageView+PlayGIF.m in Sources */, 370 | ); 371 | runOnlyForDeploymentPostprocessing = 0; 372 | }; 373 | 67C63BD11D66FB8F001A53D4 /* Sources */ = { 374 | isa = PBXSourcesBuildPhase; 375 | buildActionMask = 2147483647; 376 | files = ( 377 | 67C63BDA1D66FB8F001A53D4 /* WeChatSightDemoTests.m in Sources */, 378 | ); 379 | runOnlyForDeploymentPostprocessing = 0; 380 | }; 381 | 67C63BDC1D66FB8F001A53D4 /* Sources */ = { 382 | isa = PBXSourcesBuildPhase; 383 | buildActionMask = 2147483647; 384 | files = ( 385 | 67C63BE51D66FB8F001A53D4 /* WeChatSightDemoUITests.m in Sources */, 386 | ); 387 | runOnlyForDeploymentPostprocessing = 0; 388 | }; 389 | /* End PBXSourcesBuildPhase section */ 390 | 391 | /* Begin PBXTargetDependency section */ 392 | 67C63BD71D66FB8F001A53D4 /* PBXTargetDependency */ = { 393 | isa = PBXTargetDependency; 394 | target = 67C63BBB1D66FB8E001A53D4 /* WeChatSightDemo */; 395 | targetProxy = 67C63BD61D66FB8F001A53D4 /* PBXContainerItemProxy */; 396 | }; 397 | 67C63BE21D66FB8F001A53D4 /* PBXTargetDependency */ = { 398 | isa = PBXTargetDependency; 399 | target = 67C63BBB1D66FB8E001A53D4 /* WeChatSightDemo */; 400 | targetProxy = 67C63BE11D66FB8F001A53D4 /* PBXContainerItemProxy */; 401 | }; 402 | /* End PBXTargetDependency section */ 403 | 404 | /* Begin PBXVariantGroup section */ 405 | 67C63BC81D66FB8F001A53D4 /* Main.storyboard */ = { 406 | isa = PBXVariantGroup; 407 | children = ( 408 | 67C63BC91D66FB8F001A53D4 /* Base */, 409 | ); 410 | name = Main.storyboard; 411 | sourceTree = ""; 412 | }; 413 | 67C63BCD1D66FB8F001A53D4 /* LaunchScreen.storyboard */ = { 414 | isa = PBXVariantGroup; 415 | children = ( 416 | 67C63BCE1D66FB8F001A53D4 /* Base */, 417 | ); 418 | name = LaunchScreen.storyboard; 419 | sourceTree = ""; 420 | }; 421 | /* End PBXVariantGroup section */ 422 | 423 | /* Begin XCBuildConfiguration section */ 424 | 67C63BE71D66FB8F001A53D4 /* Debug */ = { 425 | isa = XCBuildConfiguration; 426 | buildSettings = { 427 | ALWAYS_SEARCH_USER_PATHS = NO; 428 | CLANG_ANALYZER_NONNULL = YES; 429 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 430 | CLANG_CXX_LIBRARY = "libc++"; 431 | CLANG_ENABLE_MODULES = YES; 432 | CLANG_ENABLE_OBJC_ARC = YES; 433 | CLANG_WARN_BOOL_CONVERSION = YES; 434 | CLANG_WARN_CONSTANT_CONVERSION = YES; 435 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 436 | CLANG_WARN_EMPTY_BODY = YES; 437 | CLANG_WARN_ENUM_CONVERSION = YES; 438 | CLANG_WARN_INT_CONVERSION = YES; 439 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 440 | CLANG_WARN_UNREACHABLE_CODE = YES; 441 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 442 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 443 | COPY_PHASE_STRIP = NO; 444 | DEBUG_INFORMATION_FORMAT = dwarf; 445 | ENABLE_STRICT_OBJC_MSGSEND = YES; 446 | ENABLE_TESTABILITY = YES; 447 | GCC_C_LANGUAGE_STANDARD = gnu99; 448 | GCC_DYNAMIC_NO_PIC = NO; 449 | GCC_NO_COMMON_BLOCKS = YES; 450 | GCC_OPTIMIZATION_LEVEL = 0; 451 | GCC_PREPROCESSOR_DEFINITIONS = ( 452 | "DEBUG=1", 453 | "$(inherited)", 454 | ); 455 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 456 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 457 | GCC_WARN_UNDECLARED_SELECTOR = YES; 458 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 459 | GCC_WARN_UNUSED_FUNCTION = YES; 460 | GCC_WARN_UNUSED_VARIABLE = YES; 461 | IPHONEOS_DEPLOYMENT_TARGET = 9.3; 462 | MTL_ENABLE_DEBUG_INFO = YES; 463 | ONLY_ACTIVE_ARCH = YES; 464 | SDKROOT = iphoneos; 465 | TARGETED_DEVICE_FAMILY = "1,2"; 466 | }; 467 | name = Debug; 468 | }; 469 | 67C63BE81D66FB8F001A53D4 /* Release */ = { 470 | isa = XCBuildConfiguration; 471 | buildSettings = { 472 | ALWAYS_SEARCH_USER_PATHS = NO; 473 | CLANG_ANALYZER_NONNULL = YES; 474 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 475 | CLANG_CXX_LIBRARY = "libc++"; 476 | CLANG_ENABLE_MODULES = YES; 477 | CLANG_ENABLE_OBJC_ARC = YES; 478 | CLANG_WARN_BOOL_CONVERSION = YES; 479 | CLANG_WARN_CONSTANT_CONVERSION = YES; 480 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 481 | CLANG_WARN_EMPTY_BODY = YES; 482 | CLANG_WARN_ENUM_CONVERSION = YES; 483 | CLANG_WARN_INT_CONVERSION = YES; 484 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 485 | CLANG_WARN_UNREACHABLE_CODE = YES; 486 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 487 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 488 | COPY_PHASE_STRIP = NO; 489 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 490 | ENABLE_NS_ASSERTIONS = NO; 491 | ENABLE_STRICT_OBJC_MSGSEND = YES; 492 | GCC_C_LANGUAGE_STANDARD = gnu99; 493 | GCC_NO_COMMON_BLOCKS = YES; 494 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 495 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 496 | GCC_WARN_UNDECLARED_SELECTOR = YES; 497 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 498 | GCC_WARN_UNUSED_FUNCTION = YES; 499 | GCC_WARN_UNUSED_VARIABLE = YES; 500 | IPHONEOS_DEPLOYMENT_TARGET = 9.3; 501 | MTL_ENABLE_DEBUG_INFO = NO; 502 | SDKROOT = iphoneos; 503 | TARGETED_DEVICE_FAMILY = "1,2"; 504 | VALIDATE_PRODUCT = YES; 505 | }; 506 | name = Release; 507 | }; 508 | 67C63BEA1D66FB8F001A53D4 /* Debug */ = { 509 | isa = XCBuildConfiguration; 510 | buildSettings = { 511 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 512 | CODE_SIGN_IDENTITY = "iPhone Developer"; 513 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 514 | DEVELOPMENT_TEAM = B4JPFKWLRH; 515 | INFOPLIST_FILE = WeChatSightDemo/Info.plist; 516 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 517 | PRODUCT_BUNDLE_IDENTIFIER = com.WuKong.WeChatSightDemo; 518 | PRODUCT_NAME = "$(TARGET_NAME)"; 519 | PROVISIONING_PROFILE = ""; 520 | }; 521 | name = Debug; 522 | }; 523 | 67C63BEB1D66FB8F001A53D4 /* Release */ = { 524 | isa = XCBuildConfiguration; 525 | buildSettings = { 526 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 527 | CODE_SIGN_IDENTITY = "iPhone Developer"; 528 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 529 | DEVELOPMENT_TEAM = B4JPFKWLRH; 530 | INFOPLIST_FILE = WeChatSightDemo/Info.plist; 531 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 532 | PRODUCT_BUNDLE_IDENTIFIER = com.WuKong.WeChatSightDemo; 533 | PRODUCT_NAME = "$(TARGET_NAME)"; 534 | PROVISIONING_PROFILE = ""; 535 | }; 536 | name = Release; 537 | }; 538 | 67C63BED1D66FB8F001A53D4 /* Debug */ = { 539 | isa = XCBuildConfiguration; 540 | buildSettings = { 541 | BUNDLE_LOADER = "$(TEST_HOST)"; 542 | INFOPLIST_FILE = WeChatSightDemoTests/Info.plist; 543 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 544 | PRODUCT_BUNDLE_IDENTIFIER = WuKong.WeChatSightDemoTests; 545 | PRODUCT_NAME = "$(TARGET_NAME)"; 546 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/WeChatSightDemo.app/WeChatSightDemo"; 547 | }; 548 | name = Debug; 549 | }; 550 | 67C63BEE1D66FB8F001A53D4 /* Release */ = { 551 | isa = XCBuildConfiguration; 552 | buildSettings = { 553 | BUNDLE_LOADER = "$(TEST_HOST)"; 554 | INFOPLIST_FILE = WeChatSightDemoTests/Info.plist; 555 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 556 | PRODUCT_BUNDLE_IDENTIFIER = WuKong.WeChatSightDemoTests; 557 | PRODUCT_NAME = "$(TARGET_NAME)"; 558 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/WeChatSightDemo.app/WeChatSightDemo"; 559 | }; 560 | name = Release; 561 | }; 562 | 67C63BF01D66FB8F001A53D4 /* Debug */ = { 563 | isa = XCBuildConfiguration; 564 | buildSettings = { 565 | INFOPLIST_FILE = WeChatSightDemoUITests/Info.plist; 566 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 567 | PRODUCT_BUNDLE_IDENTIFIER = WuKong.WeChatSightDemoUITests; 568 | PRODUCT_NAME = "$(TARGET_NAME)"; 569 | TEST_TARGET_NAME = WeChatSightDemo; 570 | }; 571 | name = Debug; 572 | }; 573 | 67C63BF11D66FB8F001A53D4 /* Release */ = { 574 | isa = XCBuildConfiguration; 575 | buildSettings = { 576 | INFOPLIST_FILE = WeChatSightDemoUITests/Info.plist; 577 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 578 | PRODUCT_BUNDLE_IDENTIFIER = WuKong.WeChatSightDemoUITests; 579 | PRODUCT_NAME = "$(TARGET_NAME)"; 580 | TEST_TARGET_NAME = WeChatSightDemo; 581 | }; 582 | name = Release; 583 | }; 584 | /* End XCBuildConfiguration section */ 585 | 586 | /* Begin XCConfigurationList section */ 587 | 67C63BB71D66FB8E001A53D4 /* Build configuration list for PBXProject "WeChatSightDemo" */ = { 588 | isa = XCConfigurationList; 589 | buildConfigurations = ( 590 | 67C63BE71D66FB8F001A53D4 /* Debug */, 591 | 67C63BE81D66FB8F001A53D4 /* Release */, 592 | ); 593 | defaultConfigurationIsVisible = 0; 594 | defaultConfigurationName = Release; 595 | }; 596 | 67C63BE91D66FB8F001A53D4 /* Build configuration list for PBXNativeTarget "WeChatSightDemo" */ = { 597 | isa = XCConfigurationList; 598 | buildConfigurations = ( 599 | 67C63BEA1D66FB8F001A53D4 /* Debug */, 600 | 67C63BEB1D66FB8F001A53D4 /* Release */, 601 | ); 602 | defaultConfigurationIsVisible = 0; 603 | defaultConfigurationName = Release; 604 | }; 605 | 67C63BEC1D66FB8F001A53D4 /* Build configuration list for PBXNativeTarget "WeChatSightDemoTests" */ = { 606 | isa = XCConfigurationList; 607 | buildConfigurations = ( 608 | 67C63BED1D66FB8F001A53D4 /* Debug */, 609 | 67C63BEE1D66FB8F001A53D4 /* Release */, 610 | ); 611 | defaultConfigurationIsVisible = 0; 612 | defaultConfigurationName = Release; 613 | }; 614 | 67C63BEF1D66FB8F001A53D4 /* Build configuration list for PBXNativeTarget "WeChatSightDemoUITests" */ = { 615 | isa = XCConfigurationList; 616 | buildConfigurations = ( 617 | 67C63BF01D66FB8F001A53D4 /* Debug */, 618 | 67C63BF11D66FB8F001A53D4 /* Release */, 619 | ); 620 | defaultConfigurationIsVisible = 0; 621 | defaultConfigurationName = Release; 622 | }; 623 | /* End XCConfigurationList section */ 624 | }; 625 | rootObject = 67C63BB41D66FB8E001A53D4 /* Project object */; 626 | } 627 | -------------------------------------------------------------------------------- /WeChatSightDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /WeChatSightDemo.xcodeproj/xcuserdata/wukong.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 8 | 20 | 21 | 22 | 24 | 30 | 31 | 32 | 34 | 46 | 47 | 48 | 50 | 62 | 63 | 77 | 78 | 92 | 93 | 94 | 95 | 96 | 98 | 110 | 111 | 112 | 113 | 114 | -------------------------------------------------------------------------------- /WeChatSightDemo.xcodeproj/xcuserdata/wukong.xcuserdatad/xcschemes/WeChatSightDemo.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 33 | 39 | 40 | 41 | 43 | 49 | 50 | 51 | 52 | 53 | 59 | 60 | 61 | 62 | 63 | 64 | 74 | 76 | 82 | 83 | 84 | 85 | 86 | 87 | 93 | 95 | 101 | 102 | 103 | 104 | 106 | 107 | 110 | 111 | 112 | -------------------------------------------------------------------------------- /WeChatSightDemo.xcodeproj/xcuserdata/wukong.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | WeChatSightDemo.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 67C63BBB1D66FB8E001A53D4 16 | 17 | primary 18 | 19 | 20 | 67C63BD41D66FB8F001A53D4 21 | 22 | primary 23 | 24 | 25 | 67C63BDF1D66FB8F001A53D4 26 | 27 | primary 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /WeChatSightDemo/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /WeChatSightDemo/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | @interface AppDelegate () 12 | 13 | @end 14 | 15 | @implementation AppDelegate 16 | 17 | 18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 19 | // Override point for customization after application launch. 20 | return YES; 21 | } 22 | 23 | - (void)applicationWillResignActive:(UIApplication *)application { 24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 25 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 26 | } 27 | 28 | - (void)applicationDidEnterBackground:(UIApplication *)application { 29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 31 | } 32 | 33 | - (void)applicationWillEnterForeground:(UIApplication *)application { 34 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 35 | } 36 | 37 | - (void)applicationDidBecomeActive:(UIApplication *)application { 38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 39 | } 40 | 41 | - (void)applicationWillTerminate:(UIApplication *)application { 42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 43 | } 44 | 45 | @end 46 | -------------------------------------------------------------------------------- /WeChatSightDemo/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "29x29", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "29x29", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "40x40", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "40x40", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "60x60", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "60x60", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "ipad", 35 | "size" : "29x29", 36 | "scale" : "1x" 37 | }, 38 | { 39 | "idiom" : "ipad", 40 | "size" : "29x29", 41 | "scale" : "2x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "40x40", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "40x40", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "76x76", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "76x76", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "83.5x83.5", 66 | "scale" : "2x" 67 | } 68 | ], 69 | "info" : { 70 | "version" : 1, 71 | "author" : "xcode" 72 | } 73 | } -------------------------------------------------------------------------------- /WeChatSightDemo/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /WeChatSightDemo/Assets.xcassets/sight_video_focus.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "scale" : "1x" 6 | }, 7 | { 8 | "idiom" : "universal", 9 | "filename" : "sight_video_focus@2x.png", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "filename" : "sight_video_focus@3x.png", 15 | "scale" : "3x" 16 | } 17 | ], 18 | "info" : { 19 | "version" : 1, 20 | "author" : "xcode" 21 | } 22 | } -------------------------------------------------------------------------------- /WeChatSightDemo/Assets.xcassets/sight_video_focus.imageset/sight_video_focus@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WuKongCoo1/WeChatSightDemo/99b34616d7339e4c1b692f3ed9100c0c5e88aec9/WeChatSightDemo/Assets.xcassets/sight_video_focus.imageset/sight_video_focus@2x.png -------------------------------------------------------------------------------- /WeChatSightDemo/Assets.xcassets/sight_video_focus.imageset/sight_video_focus@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WuKongCoo1/WeChatSightDemo/99b34616d7339e4c1b692f3ed9100c0c5e88aec9/WeChatSightDemo/Assets.xcassets/sight_video_focus.imageset/sight_video_focus@3x.png -------------------------------------------------------------------------------- /WeChatSightDemo/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /WeChatSightDemo/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 32 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | -------------------------------------------------------------------------------- /WeChatSightDemo/Category/UIImageView+PlayGIF.h: -------------------------------------------------------------------------------- 1 | // 2 | // UIImageView+PlayGIF.h 3 | // UIImageView-PlayGIF 4 | // 5 | // Created by Yang Fei on 14-3-25. 6 | // Copyright (c) 2014年 yangfei.me. All rights reserved. 7 | // 8 | 9 | /******************************************************* 10 | * Dependencies: 11 | * - QuartzCore.framework 12 | * - ImageIO.framework 13 | * Parameters: 14 | * Pass value to one of them: 15 | * - gifData NSData from a GIF 16 | * - gifPath local path of a GIF 17 | * Usage: 18 | * - startGIF 19 | * - stopGIF 20 | * - isGIFPlaying 21 | * P.S.: 22 | * Don't like category? Use YFGIFImageView.h/m 23 | *******************************************************/ 24 | 25 | /******************************************************* 26 | * 依赖: 27 | * - QuartzCore.framework 28 | * - ImageIO.framework 29 | * 参数: 30 | * 以下传参2选1: 31 | * - gifData GIF图片的NSData 32 | * - gifPath GIF图片的本地路径 33 | * 调用: 34 | * - startGIF 开始播放 35 | * - stopGIF 结束播放 36 | * - isGIFPlaying 判断是否正在播放 37 | * 另外: 38 | * 不想用 category?请使用 YFGIFImageView.h/m 39 | *******************************************************/ 40 | 41 | #import 42 | 43 | @interface UIImageView (PlayGIF) 44 | @property (nonatomic, strong) NSString *gifPath; 45 | @property (nonatomic, strong) NSData *gifData; 46 | @property (nonatomic, strong) NSNumber *index,*frameCount,*timestamp; 47 | @property (nonatomic, strong) NSDictionary *indexDurations; 48 | - (void)startGIF; 49 | - (void)startGIFWithRunLoopMode:(NSString * const)runLoopMode; 50 | - (void)stopGIF; 51 | - (BOOL)isGIFPlaying; 52 | - (CGSize) gifPixelSize; 53 | - (CGImageRef) gifCreateImageForFrameAtIndex:(NSInteger)index; 54 | - (float)gifFrameDurationAtIndex:(size_t)index; 55 | - (NSArray*)frames; 56 | @end 57 | -------------------------------------------------------------------------------- /WeChatSightDemo/Category/UIImageView+PlayGIF.m: -------------------------------------------------------------------------------- 1 | // 2 | // UIImageView+PlayGIF.m 3 | // UIImageView-PlayGIF 4 | // 5 | // Created by Yang Fei on 14-3-25. 6 | // Copyright (c) 2014年 yangfei.me. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | #import 12 | #import 13 | #import 14 | #import "UIImageView+PlayGIF.h" 15 | 16 | /**********************************************************************/ 17 | 18 | @interface PlayGIFManager : NSObject 19 | @property (nonatomic, strong) CADisplayLink *displayLink; 20 | @property (nonatomic, strong) NSHashTable *gifViewHashTable; 21 | @property (nonatomic, strong) NSMapTable *gifSourceRefMapTable; 22 | + (PlayGIFManager *)shared; 23 | - (void)stopGIFView:(UIImageView *)view; 24 | @end 25 | @implementation PlayGIFManager 26 | + (PlayGIFManager *)shared{ 27 | static PlayGIFManager *_sharedInstance = nil; 28 | static dispatch_once_t onceToken; 29 | dispatch_once(&onceToken, ^{ 30 | _sharedInstance = [[PlayGIFManager alloc] init]; 31 | }); 32 | return _sharedInstance; 33 | } 34 | - (id)init{ 35 | self = [super init]; 36 | if (self) { 37 | _gifViewHashTable = [NSHashTable hashTableWithOptions:NSHashTableWeakMemory]; 38 | _gifSourceRefMapTable = [NSMapTable mapTableWithKeyOptions:NSMapTableWeakMemory valueOptions:NSMapTableWeakMemory]; 39 | } 40 | return self; 41 | } 42 | - (void)play{ 43 | for (UIImageView *imageView in _gifViewHashTable) { 44 | [imageView performSelector:@selector(play)]; 45 | } 46 | } 47 | - (void)stopDisplayLink{ 48 | if (self.displayLink) { 49 | [self.displayLink invalidate]; 50 | self.displayLink = nil; 51 | } 52 | } 53 | - (void)stopGIFView:(UIImageView *)view{ 54 | CGImageSourceRef ref = (__bridge CGImageSourceRef)([[PlayGIFManager shared].gifSourceRefMapTable objectForKey:view]); 55 | if (ref) { 56 | [_gifSourceRefMapTable removeObjectForKey:view]; 57 | CFRelease(ref); 58 | } 59 | [_gifViewHashTable removeObject:view]; 60 | if (_gifViewHashTable.count<1 && !_displayLink) { 61 | [self stopDisplayLink]; 62 | } 63 | } 64 | @end 65 | 66 | /**********************************************************************/ 67 | 68 | static const char * kGifPathKey = "kGifPathKey"; 69 | static const char * kGifDataKey = "kGifDataKey"; 70 | static const char * kIndexKey = "kIndexKey"; 71 | static const char * kFrameCountKey = "kFrameCountKey"; 72 | static const char * kTimestampKey = "kTimestampKey"; 73 | static const char * kPxSize = "kPxSize"; 74 | static const char * kGifLength = "kGifLength"; 75 | static const char * kIndexDurationKey = "kIndexDurationKey"; 76 | 77 | @implementation UIImageView (PlayGIF) 78 | @dynamic gifPath; 79 | @dynamic gifData; 80 | @dynamic index; 81 | @dynamic frameCount; 82 | @dynamic timestamp; 83 | @dynamic indexDurations; 84 | 85 | +(void)load{ 86 | static dispatch_once_t onceToken; 87 | dispatch_once(&onceToken, ^{ 88 | Class class = [self class]; 89 | 90 | SEL originalSelector = @selector(removeFromSuperview); 91 | SEL swizzledSelector = @selector(yfgif_removeFromSuperview); 92 | 93 | Method originalMethod = class_getInstanceMethod(class, originalSelector); 94 | Method swizzledMethod = class_getInstanceMethod(class, swizzledSelector); 95 | 96 | BOOL didAddMethod = class_addMethod(class, originalSelector, method_getImplementation(swizzledMethod), method_getTypeEncoding(swizzledMethod)); 97 | if (didAddMethod) { 98 | class_replaceMethod(class, swizzledSelector, method_getImplementation(originalMethod), method_getTypeEncoding(originalMethod)); 99 | } else { 100 | method_exchangeImplementations(originalMethod, swizzledMethod); 101 | } 102 | }); 103 | } 104 | -(void)yfgif_removeFromSuperview{ 105 | [self stopGIF]; 106 | [self yfgif_removeFromSuperview]; 107 | } 108 | 109 | #pragma mark - ASSOCIATION 110 | 111 | -(NSString *)gifPath{ 112 | return objc_getAssociatedObject(self, kGifPathKey); 113 | } 114 | - (void)setGifPath:(NSString *)gifPath{ 115 | objc_setAssociatedObject(self, kGifPathKey, gifPath, OBJC_ASSOCIATION_RETAIN_NONATOMIC); 116 | } 117 | -(NSData *)gifData{ 118 | return objc_getAssociatedObject(self, kGifDataKey); 119 | } 120 | - (void)setGifData:(NSData *)gifData{ 121 | objc_setAssociatedObject(self, kGifDataKey, gifData, OBJC_ASSOCIATION_RETAIN_NONATOMIC); 122 | } 123 | -(NSNumber *)index{ 124 | return objc_getAssociatedObject(self, kIndexKey); 125 | } 126 | - (void)setIndex:(NSNumber *)index{ 127 | objc_setAssociatedObject(self, kIndexKey, index, OBJC_ASSOCIATION_RETAIN_NONATOMIC); 128 | } 129 | -(NSNumber *)frameCount{ 130 | return objc_getAssociatedObject(self, kFrameCountKey); 131 | } 132 | - (void)setFrameCount:(NSNumber *)frameCount{ 133 | objc_setAssociatedObject(self, kFrameCountKey, frameCount, OBJC_ASSOCIATION_RETAIN_NONATOMIC); 134 | } 135 | -(NSNumber *)timestamp{ 136 | return objc_getAssociatedObject(self, kTimestampKey); 137 | } 138 | - (void)setTimestamp:(NSNumber *)timestamp{ 139 | objc_setAssociatedObject(self, kTimestampKey, timestamp, OBJC_ASSOCIATION_RETAIN_NONATOMIC); 140 | } 141 | -(NSDictionary*)indexDurations{ 142 | return objc_getAssociatedObject(self, kIndexDurationKey); 143 | } 144 | -(void)setIndexDurations:(NSDictionary*)durations{ 145 | objc_setAssociatedObject(self, kIndexDurationKey, durations, OBJC_ASSOCIATION_RETAIN_NONATOMIC); 146 | } 147 | 148 | #pragma mark - ACTIONS 149 | 150 | - (void)startGIF 151 | { 152 | self.timestamp = 0; 153 | [self startGIFWithRunLoopMode:NSDefaultRunLoopMode]; 154 | } 155 | 156 | - (void)startGIFWithRunLoopMode:(NSString * const)runLoopMode 157 | { 158 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{ 159 | if (![[PlayGIFManager shared].gifViewHashTable containsObject:self] && (self.gifData || self.gifPath)) { 160 | CGImageSourceRef gifSourceRef; 161 | if (self.gifData) { 162 | gifSourceRef = CGImageSourceCreateWithData((__bridge CFDataRef)(self.gifData), NULL); 163 | }else{ 164 | gifSourceRef = CGImageSourceCreateWithURL((__bridge CFURLRef)[NSURL fileURLWithPath:self.gifPath], NULL); 165 | } 166 | if (!gifSourceRef) { 167 | return; 168 | } 169 | dispatch_async(dispatch_get_main_queue(), ^{ 170 | [[PlayGIFManager shared].gifViewHashTable addObject:self]; 171 | [[PlayGIFManager shared].gifSourceRefMapTable setObject:(__bridge id)(gifSourceRef) forKey:self]; 172 | self.frameCount = [NSNumber numberWithInteger:CGImageSourceGetCount(gifSourceRef)]; 173 | CGSize pxSize = [self GIFDimensionalSize]; 174 | objc_setAssociatedObject(self, kPxSize, [NSValue valueWithCGSize:pxSize], OBJC_ASSOCIATION_RETAIN_NONATOMIC); 175 | objc_setAssociatedObject(self, kGifLength, [self buildIndexAndReturnLength], OBJC_ASSOCIATION_RETAIN_NONATOMIC); 176 | }); 177 | } 178 | }); 179 | if (![PlayGIFManager shared].displayLink) { 180 | [PlayGIFManager shared].displayLink = [CADisplayLink displayLinkWithTarget:[PlayGIFManager shared] selector:@selector(play)]; 181 | [[PlayGIFManager shared].displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:runLoopMode]; 182 | } 183 | } 184 | 185 | -(NSNumber*)buildIndexAndReturnLength{ 186 | 187 | NSMutableDictionary* d = [[NSMutableDictionary alloc] initWithCapacity:[self.frameCount integerValue]]; 188 | float l = 0; 189 | for(int i = 0; i < [self.frameCount intValue]; i++){ 190 | float durationAtIndex = [self frameDurationAtIndex:i]; 191 | [d setObject:@(durationAtIndex) forKey:@(i)]; 192 | l += durationAtIndex; 193 | } 194 | self.indexDurations = d; 195 | return @(l); 196 | } 197 | 198 | -(NSNumber*)gifLength{ 199 | return objc_getAssociatedObject(self, kGifLength); 200 | } 201 | 202 | - (void)stopGIF{ 203 | [[PlayGIFManager shared] stopGIFView:self]; 204 | } 205 | 206 | - (void)play{ 207 | self.timestamp = [NSNumber numberWithFloat:self.timestamp.floatValue+[PlayGIFManager shared].displayLink.duration]; 208 | 209 | float loopT = fmodf([self.timestamp floatValue], [[self gifLength] floatValue]); 210 | self.index = @([self indexForDuration:loopT]); 211 | CGImageSourceRef ref = (__bridge CGImageSourceRef)([[PlayGIFManager shared].gifSourceRefMapTable objectForKey:self]); 212 | CGImageRef imageRef = CGImageSourceCreateImageAtIndex(ref, self.index.integerValue, NULL); 213 | self.layer.contents = (__bridge id)(imageRef); 214 | CGImageRelease(imageRef); 215 | } 216 | 217 | - (int) indexForDuration:(float)duration{ 218 | 219 | float sum = 0; 220 | 221 | for(int i = 0; i < self.frameCount.intValue; i++){ 222 | NSNumber* singleFrameDuration = [self.indexDurations objectForKey:@(i)]; 223 | sum += [singleFrameDuration floatValue]; 224 | 225 | if(sum >= duration) { 226 | return i; 227 | } 228 | } 229 | 230 | return [self.frameCount intValue] - 1; 231 | } 232 | 233 | - (BOOL)isGIFPlaying{ 234 | return [[PlayGIFManager shared].gifViewHashTable containsObject:self]; 235 | } 236 | 237 | - (CGSize) gifPixelSize{ 238 | return [objc_getAssociatedObject(self, kPxSize) CGSizeValue]; 239 | } 240 | 241 | - (CGImageRef) gifCreateImageForFrameAtIndex:(NSInteger)index{ 242 | if(![self isGIFPlaying]){ 243 | return nil; 244 | } 245 | 246 | CGImageSourceRef ref = (__bridge CGImageSourceRef)([[PlayGIFManager shared].gifSourceRefMapTable objectForKey:self]); 247 | return CGImageSourceCreateImageAtIndex(ref, index, NULL); 248 | } 249 | 250 | - (float)gifFrameDurationAtIndex:(size_t)index{ 251 | return [self frameDurationAtIndex:index]; 252 | } 253 | 254 | - (CGSize)GIFDimensionalSize{ 255 | if(![[PlayGIFManager shared].gifSourceRefMapTable objectForKey:self]){ 256 | return CGSizeZero; 257 | } 258 | 259 | CGImageSourceRef ref = (__bridge CGImageSourceRef)([[PlayGIFManager shared].gifSourceRefMapTable objectForKey:self]); 260 | CFDictionaryRef dictRef = CGImageSourceCopyPropertiesAtIndex(ref, 0, NULL); 261 | NSDictionary *dict = (__bridge NSDictionary *)dictRef; 262 | 263 | NSNumber* pixelWidth = (dict[(NSString*)kCGImagePropertyPixelWidth]); 264 | NSNumber* pixelHeight = (dict[(NSString*)kCGImagePropertyPixelHeight]); 265 | 266 | CGSize sizeAsInProperties = CGSizeMake([pixelWidth floatValue], [pixelHeight floatValue]); 267 | 268 | CFRelease(dictRef); 269 | 270 | return sizeAsInProperties; 271 | } 272 | 273 | - (float)frameDurationAtIndex:(size_t)index{ 274 | CGImageSourceRef ref = (__bridge CGImageSourceRef)([[PlayGIFManager shared].gifSourceRefMapTable objectForKey:self]); 275 | CFDictionaryRef dictRef = CGImageSourceCopyPropertiesAtIndex(ref, index, NULL); 276 | NSDictionary *dict = (__bridge NSDictionary *)dictRef; 277 | NSDictionary *gifDict = (dict[(NSString *)kCGImagePropertyGIFDictionary]); 278 | NSNumber *unclampedDelayTime = gifDict[(NSString *)kCGImagePropertyGIFUnclampedDelayTime]; 279 | NSNumber *delayTime = gifDict[(NSString *)kCGImagePropertyGIFDelayTime]; 280 | CFRelease(dictRef); 281 | if (unclampedDelayTime.floatValue) { 282 | return unclampedDelayTime.floatValue; 283 | }else if (delayTime.floatValue) { 284 | return delayTime.floatValue; 285 | }else{ 286 | return 1/24.0; 287 | } 288 | } 289 | 290 | -(NSArray*)frames{ 291 | 292 | NSMutableArray* images = [NSMutableArray new]; 293 | 294 | CGImageSourceRef ref = (__bridge CGImageSourceRef)([[PlayGIFManager shared].gifSourceRefMapTable objectForKey:self]); 295 | 296 | if(!ref){ 297 | return NULL; 298 | } 299 | 300 | NSInteger cnt = CGImageSourceGetCount(ref); 301 | for(NSInteger i = 0; i < cnt; i++){ 302 | CGImageRef imageRef = CGImageSourceCreateImageAtIndex(ref, i, NULL); 303 | [images addObject:[UIImage imageWithCGImage:imageRef]]; 304 | CGImageRelease(imageRef); 305 | } 306 | 307 | return images; 308 | } 309 | 310 | @end 311 | -------------------------------------------------------------------------------- /WeChatSightDemo/Controller/WCSCaptureViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // WCSCaptureViewController.h 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface WCSCaptureViewController : UIViewController 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /WeChatSightDemo/Controller/WCSCaptureViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // WCSCaptureViewController.m 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import "WCSCaptureViewController.h" 10 | #import "WKMovieRecorder.h" 11 | #import "WKScaleButton.h" 12 | #import "WCSPreviewViewController.h" 13 | 14 | #define kScreenHeight [UIScreen mainScreen].bounds.size.height 15 | #define kScreenWidth [UIScreen mainScreen].bounds.size.width 16 | 17 | const NSTimeInterval MinDuration = 3.f; 18 | const CGFloat ProgressLayerHeigth = 2.f; 19 | const NSInteger CaptureScaleFactor = 2.f; 20 | static void * DurationContext = &DurationContext; 21 | 22 | @interface WCSCaptureViewController () 23 | 24 | //StoryBoard 25 | @property (weak, nonatomic) IBOutlet UIView *preview; 26 | @property (weak, nonatomic) IBOutlet UIImageView *focusImageView; 27 | @property (weak, nonatomic) IBOutlet UILabel *statusLabel; 28 | @property (weak, nonatomic) IBOutlet WKScaleButton *longPressButton; 29 | //indicator 30 | @property (nonatomic, strong) CALayer *processLayer; 31 | 32 | @property (nonatomic, assign, getter=isScale) BOOL scale; 33 | 34 | @property (nonatomic, strong) WKMovieRecorder *recorder; 35 | 36 | 37 | 38 | @end 39 | 40 | @implementation WCSCaptureViewController 41 | 42 | #pragma mark - LifeCycle 43 | 44 | - (void)viewDidLoad { 45 | [super viewDidLoad]; 46 | 47 | [self setupUI]; 48 | 49 | [self setupRecorder]; 50 | 51 | } 52 | 53 | - (void)viewWillAppear:(BOOL)animated 54 | { 55 | [super viewWillAppear:animated]; 56 | 57 | [self.navigationController setNavigationBarHidden:YES animated:YES]; 58 | 59 | [_recorder startSession]; 60 | } 61 | 62 | - (void)viewWillDisappear:(BOOL)animated 63 | { 64 | [super viewWillDisappear:animated]; 65 | [self.navigationController setNavigationBarHidden:NO animated:YES]; 66 | [self.recorder finishCapture]; 67 | } 68 | 69 | 70 | - (void)didReceiveMemoryWarning { 71 | [super didReceiveMemoryWarning]; 72 | 73 | } 74 | 75 | - (void)setupUI 76 | { 77 | _statusLabel.text = @"上移取消"; 78 | _statusLabel.textColor = [UIColor greenColor]; 79 | } 80 | 81 | #pragma mark - setupRecorder 82 | - (void)setupRecorder 83 | { 84 | _recorder = [[WKMovieRecorder alloc] initWithMaxDuration:10.f]; 85 | 86 | CGFloat width = 320.f; 87 | CGFloat Height = width / 4 * 3; 88 | _recorder.cropSize = CGSizeMake(width, Height); 89 | __weak typeof(self)weakSelf = self; 90 | 91 | [_recorder setAuthorizationResultBlock:^(BOOL success){ 92 | if (!success) { 93 | dispatch_async(dispatch_get_main_queue(), ^{ 94 | 95 | NSLog(@"这里就省略没有权限的处理了"); 96 | }); 97 | } 98 | }]; 99 | 100 | [_recorder prepareCaptureWithBlock:^{ 101 | 102 | //1.video preview 103 | AVCaptureVideoPreviewLayer* preview = [_recorder getPreviewLayer]; 104 | preview.backgroundColor = [UIColor blackColor].CGColor; 105 | preview.videoGravity = AVLayerVideoGravityResizeAspectFill; 106 | [preview removeFromSuperlayer]; 107 | preview.frame = CGRectInset(self.preview.bounds, 0, (CGRectGetHeight(weakSelf.preview.bounds) - kScreenWidth / 4 * 3) / 2); 108 | 109 | [weakSelf.preview.layer addSublayer:preview]; 110 | 111 | //2.doubleTap 112 | UITapGestureRecognizer *tapGR = [[UITapGestureRecognizer alloc] initWithTarget:weakSelf action:@selector(tapGR:)]; 113 | tapGR.numberOfTapsRequired = 2; 114 | 115 | [weakSelf.preview addGestureRecognizer:tapGR]; 116 | }]; 117 | 118 | [_recorder setFinishBlock:^(NSDictionary *info, WKRecorderFinishedReason reason){ 119 | switch (reason) { 120 | case WKRecorderFinishedReasonNormal: 121 | case WKRecorderFinishedReasonBeyondMaxDuration:{//正常结束 122 | 123 | UIStoryboard *sb = [UIStoryboard storyboardWithName:@"Main" bundle:nil]; 124 | WCSPreviewViewController *previewVC = [sb instantiateViewControllerWithIdentifier:@"WCSPreviewViewController"]; 125 | previewVC.movieInfo = info; 126 | 127 | [weakSelf.navigationController pushViewController:previewVC animated:YES]; 128 | 129 | break; 130 | 131 | } 132 | case WKRecorderFinishedReasonCancle:{//重置 133 | 134 | 135 | break; 136 | } 137 | 138 | default: 139 | break; 140 | } 141 | NSLog(@"随便你要干什么"); 142 | }]; 143 | 144 | [_recorder setFocusAreaDidChangedBlock:^{//焦点改变 145 | 146 | }]; 147 | 148 | [_longPressButton setStateChangeBlock:^(WKState state){ 149 | __strong typeof(weakSelf) strongSelf = weakSelf; 150 | switch (state) { 151 | case WKStateBegin: { 152 | 153 | [strongSelf.recorder startCapture]; 154 | 155 | [strongSelf.statusLabel.superview bringSubviewToFront:strongSelf.statusLabel]; 156 | 157 | [strongSelf showStatusLabelWithBackgroundColor:[UIColor clearColor] textColor:[UIColor greenColor] state:YES]; 158 | 159 | if (!strongSelf.processLayer) { 160 | strongSelf.processLayer = [CALayer layer]; 161 | strongSelf.processLayer.bounds = CGRectMake(0, 0, CGRectGetWidth(strongSelf.preview.bounds), 5); 162 | strongSelf.processLayer.position = CGPointMake(CGRectGetMidX(strongSelf.preview.bounds), CGRectGetHeight(strongSelf.preview.bounds) - 2.5); 163 | strongSelf.processLayer.backgroundColor = [UIColor greenColor].CGColor; 164 | } 165 | [strongSelf addAnimation]; 166 | 167 | [strongSelf.preview.layer addSublayer:strongSelf.processLayer]; 168 | 169 | 170 | [strongSelf.longPressButton disappearAnimation]; 171 | 172 | break; 173 | } 174 | case WKStateIn: { 175 | [strongSelf showStatusLabelWithBackgroundColor:[UIColor clearColor] textColor:[UIColor greenColor] state:YES]; 176 | 177 | break; 178 | } 179 | case WKStateOut: { 180 | 181 | [strongSelf showStatusLabelWithBackgroundColor:[UIColor redColor] textColor:[UIColor whiteColor] state:NO]; 182 | break; 183 | } 184 | case WKStateCancle: { 185 | [strongSelf.recorder cancleCaputre]; 186 | [strongSelf endRecord]; 187 | break; 188 | } 189 | case WKStateFinish: { 190 | [strongSelf.recorder stopCapture]; 191 | [strongSelf endRecord]; 192 | break; 193 | } 194 | } 195 | }]; 196 | } 197 | 198 | #pragma mark - Orientation 199 | - (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id)coordinator 200 | { 201 | [super viewWillTransitionToSize:size withTransitionCoordinator:coordinator]; 202 | 203 | // Note that the app delegate controls the device orientation notifications required to use the device orientation. 204 | UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation; 205 | if ( UIDeviceOrientationIsPortrait( deviceOrientation ) || UIDeviceOrientationIsLandscape( deviceOrientation ) ) { 206 | AVCaptureVideoPreviewLayer *previewLayer = [_recorder getPreviewLayer]; 207 | previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation)deviceOrientation; 208 | 209 | UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation; 210 | AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait; 211 | if ( statusBarOrientation != UIInterfaceOrientationUnknown ) { 212 | initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; 213 | } 214 | 215 | [_recorder videoConnection].videoOrientation = initialVideoOrientation; 216 | } 217 | } 218 | 219 | - (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration NS_DEPRECATED_IOS(2_0,8_0, "Implement viewWillTransitionToSize:withTransitionCoordinator: instead") __TVOS_PROHIBITED 220 | { 221 | [super willRotateToInterfaceOrientation:toInterfaceOrientation duration:duration]; 222 | UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation; 223 | if ( UIDeviceOrientationIsPortrait( deviceOrientation ) || UIDeviceOrientationIsLandscape( deviceOrientation ) ) { 224 | AVCaptureVideoPreviewLayer *previewLayer = [_recorder getPreviewLayer]; 225 | previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation)deviceOrientation; 226 | 227 | UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation; 228 | AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait; 229 | if ( statusBarOrientation != UIInterfaceOrientationUnknown ) { 230 | initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; 231 | } 232 | 233 | [_recorder videoConnection].videoOrientation = initialVideoOrientation; 234 | } 235 | } 236 | 237 | //双击 焦距调整 238 | - (void)tapGR:(UITapGestureRecognizer *)tapGes 239 | { 240 | 241 | 242 | CGFloat scaleFactor = self.isScale ? 1 : 2.f; 243 | 244 | self.scale = !self.isScale; 245 | 246 | [_recorder setScaleFactor:scaleFactor]; 247 | 248 | } 249 | 250 | - (void)addAnimation 251 | { 252 | _processLayer.hidden = NO; 253 | _processLayer.backgroundColor = [UIColor cyanColor].CGColor; 254 | 255 | CABasicAnimation *scaleXAnimation = [CABasicAnimation animationWithKeyPath:@"transform.scale.x"]; 256 | scaleXAnimation.duration = 10.f; 257 | scaleXAnimation.fromValue = @(1.f); 258 | scaleXAnimation.toValue = @(0.f); 259 | 260 | [_processLayer addAnimation:scaleXAnimation forKey:@"scaleXAnimation"]; 261 | } 262 | 263 | - (void)showStatusLabelWithBackgroundColor:(UIColor *)color textColor:(UIColor *)textColor state:(BOOL)isIn 264 | { 265 | _statusLabel.backgroundColor = color; 266 | _statusLabel.textColor = textColor; 267 | _statusLabel.hidden = NO; 268 | 269 | _statusLabel.text = isIn ? @"上移取消" : @"松手取消"; 270 | } 271 | 272 | - (void)endRecord 273 | { 274 | [_processLayer removeAllAnimations]; 275 | _processLayer.hidden = YES; 276 | _statusLabel.hidden = YES; 277 | [self.longPressButton appearAnimation]; 278 | } 279 | 280 | 281 | 282 | 283 | @end 284 | -------------------------------------------------------------------------------- /WeChatSightDemo/Controller/WCSPlayMovieController.h: -------------------------------------------------------------------------------- 1 | // 2 | // WCSPlayMovieController.h 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/26. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface WCSPlayMovieController : UIViewController 12 | 13 | @property (nonatomic, strong) NSURL *movieURL; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /WeChatSightDemo/Controller/WCSPlayMovieController.m: -------------------------------------------------------------------------------- 1 | // 2 | // WCSPlayMovieController.m 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/26. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import "WCSPlayMovieController.h" 10 | #import "WKMovieRecorder.h" 11 | #import "WKVideoConverter.h" 12 | 13 | #define kScreenWidth [UIScreen mainScreen].bounds.size.width 14 | #define kScreenHeight [UIScreen mainScreen].bounds.size.height 15 | 16 | @interface WCSPlayMovieController () 17 | @property (nonatomic, strong) AVPlayer *player; 18 | 19 | @property (nonatomic, strong) AVPlayerLayer *playerLayer; 20 | @end 21 | 22 | @implementation WCSPlayMovieController 23 | 24 | - (void)viewDidLoad { 25 | [super viewDidLoad]; 26 | 27 | CGFloat width = kScreenWidth; 28 | CGFloat Height = width / 4 * 3; 29 | AVPlayerItem *item = [AVPlayerItem playerItemWithURL:self.movieURL]; 30 | AVPlayer *player = [[AVPlayer alloc] initWithPlayerItem:item]; 31 | AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer: player]; 32 | playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 33 | playerLayer.frame = CGRectMake(0, 0, kScreenWidth, Height); 34 | playerLayer.position = self.view.center; 35 | [self.view.layer addSublayer: playerLayer]; 36 | [playerLayer setNeedsDisplay]; 37 | [player play]; 38 | self.player = player; 39 | _playerLayer = playerLayer; 40 | 41 | self.view.backgroundColor = [UIColor blackColor]; 42 | __weak typeof(self) weakSelf = self; 43 | NSNotificationCenter *noteCenter = [NSNotificationCenter defaultCenter]; 44 | [noteCenter addObserverForName:AVPlayerItemDidPlayToEndTimeNotification 45 | object:nil 46 | queue:nil 47 | usingBlock:^(NSNotification *note) { 48 | [weakSelf.player seekToTime:kCMTimeZero]; 49 | [weakSelf.player play]; 50 | }]; 51 | 52 | } 53 | 54 | - (void)didReceiveMemoryWarning { 55 | [super didReceiveMemoryWarning]; 56 | 57 | } 58 | 59 | 60 | @end 61 | -------------------------------------------------------------------------------- /WeChatSightDemo/Controller/WCSPreviewViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // WCSPreviewViewController.h 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/26. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface WCSPreviewViewController : UIViewController 12 | 13 | @property (nonatomic, copy) NSDictionary *movieInfo; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /WeChatSightDemo/Controller/WCSPreviewViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // WCSPreviewViewController.m 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/26. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import "WCSPreviewViewController.h" 10 | #import "WKMovieRecorder.h" 11 | #import "WKVideoConverter.h" 12 | #import "UIImageView+PlayGIF.h" 13 | #import "WCSPlayMovieController.h" 14 | 15 | #define kScreenWidth [UIScreen mainScreen].bounds.size.width 16 | #define kScreenHeight [UIScreen mainScreen].bounds.size.height 17 | @interface WCSPreviewViewController () 18 | 19 | @property (weak, nonatomic) IBOutlet UIButton *previewButton;//预览按钮 20 | @property (weak, nonatomic) IBOutlet UIImageView *preImageView;//播放gif 21 | 22 | @property (nonatomic, strong) AVPlayer *player; 23 | 24 | @property (nonatomic, strong) WKVideoConverter *converter; 25 | 26 | @property (nonatomic, strong) AVPlayerLayer *playerLayer; 27 | 28 | @property (nonatomic, strong) NSURL *videoURL; 29 | 30 | @property (nonatomic, strong) NSURL *gifURL; 31 | 32 | @property (nonatomic, strong) WCSPlayMovieController *playVC; 33 | 34 | 35 | 36 | - (IBAction)showMovieAction:(id)sender; 37 | 38 | @end 39 | 40 | @implementation WCSPreviewViewController 41 | 42 | #pragma mark - life cycle 43 | - (void)viewDidLoad { 44 | [super viewDidLoad]; 45 | 46 | [self setupUI]; 47 | 48 | 49 | } 50 | 51 | - (void)didReceiveMemoryWarning { 52 | [super didReceiveMemoryWarning]; 53 | 54 | } 55 | 56 | - (void)dealloc 57 | { 58 | NSLog(@"%s", __FUNCTION__); 59 | } 60 | 61 | #pragma mark - setup 62 | - (void)setupUI 63 | { 64 | _previewButton.userInteractionEnabled = NO; 65 | 66 | //1.生成文件名 67 | NSDateFormatter *df = [NSDateFormatter new]; 68 | df.dateFormat = @"yyyy-MM-dd'T'HH:mm:ss.SSS"; 69 | NSString *name = [df stringFromDate:[NSDate date]]; 70 | NSString *gifName = [name stringByAppendingPathExtension:@".gif"]; 71 | NSString *videoName = [name stringByAppendingPathExtension:@".mp4"]; 72 | 73 | //2.拷贝视频 74 | [self copyVideoWithMovieName:videoName]; 75 | 76 | //3.生成gif 77 | _preImageView.contentMode = UIViewContentModeScaleAspectFill; 78 | _preImageView.layer.masksToBounds = YES; 79 | _preImageView.image = self.movieInfo[WKRecorderFirstFrame]; 80 | [self generateAndShowGifWithName:gifName]; 81 | 82 | 83 | 84 | } 85 | 86 | - (NSString *)generateMoviePathWithFileName:(NSString *)name 87 | { 88 | NSString *documetPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject]; 89 | 90 | NSString *moviePath = [documetPath stringByAppendingPathComponent:name]; 91 | 92 | return moviePath; 93 | } 94 | 95 | - (void)copyVideoWithMovieName:(NSString *)movieName 96 | { 97 | //1.生成视屏URL 98 | NSMutableString *videoName = [movieName mutableCopy]; 99 | NSURL *videoURL = _movieInfo[WKRecorderMovieURL]; 100 | 101 | [videoName stringByAppendingPathExtension:@".mp4"]; 102 | 103 | [videoName replaceOccurrencesOfString:@" " withString:@"" options:NSCaseInsensitiveSearch range:NSMakeRange(0, videoName.length)]; 104 | 105 | NSString *videoPath = [self generateMoviePathWithFileName:videoName]; 106 | NSURL *newVideoURL = [NSURL fileURLWithPath:videoPath]; 107 | NSError *error = nil; 108 | 109 | [[NSFileManager defaultManager] copyItemAtURL:videoURL toURL:newVideoURL error:&error]; 110 | 111 | 112 | if (error) { 113 | 114 | NSLog(@"%@", [error localizedDescription]); 115 | 116 | }else{ 117 | self.videoURL = newVideoURL; 118 | } 119 | 120 | } 121 | 122 | - (void)generateAndShowGifWithName:(NSString *)gifName 123 | { 124 | NSString *gifPath = [self generateMoviePathWithFileName:gifName]; 125 | NSURL *newVideoURL = [NSURL fileURLWithPath:gifPath]; 126 | 127 | WKVideoConverter *converter = [[WKVideoConverter alloc] init]; 128 | 129 | 130 | [converter convertVideoToGifImageWithURL:self.videoURL destinationUrl:newVideoURL finishBlock:^{//播放gif 131 | _previewButton.userInteractionEnabled = YES; 132 | _preImageView.gifPath = gifPath; 133 | [_preImageView startGIF]; 134 | }]; 135 | 136 | _converter = converter; 137 | } 138 | 139 | - (IBAction)showMovieAction:(id)sender { 140 | WCSPlayMovieController *playVC = [[WCSPlayMovieController alloc] init]; 141 | playVC.movieURL = self.videoURL; 142 | 143 | [self displayChildController:playVC]; 144 | 145 | _playVC = playVC; 146 | } 147 | 148 | #pragma mark - displayChildController 149 | - (void) displayChildController: (UIViewController*) child { 150 | [self addChildViewController:child]; 151 | [self.view addSubview:child.view]; 152 | child.view.frame = self.view.frame; 153 | [child didMoveToParentViewController:self]; 154 | } 155 | 156 | - (void) hideContentController: (UIViewController*) child { 157 | [child willMoveToParentViewController:nil]; 158 | [child.view removeFromSuperview]; 159 | [child removeFromParentViewController]; 160 | } 161 | 162 | - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event 163 | { 164 | [self hideContentController:self.playVC]; 165 | self.playVC = nil; 166 | } 167 | 168 | 169 | @end 170 | -------------------------------------------------------------------------------- /WeChatSightDemo/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | NSMicrophoneUsageDescription 6 | 使用麦克风 7 | NSCameraUsageDescription 8 | 使用摄像头 9 | CFBundleDevelopmentRegion 10 | en 11 | CFBundleExecutable 12 | $(EXECUTABLE_NAME) 13 | CFBundleIdentifier 14 | $(PRODUCT_BUNDLE_IDENTIFIER) 15 | CFBundleInfoDictionaryVersion 16 | 6.0 17 | CFBundleName 18 | $(PRODUCT_NAME) 19 | CFBundlePackageType 20 | APPL 21 | CFBundleShortVersionString 22 | 1.0 23 | CFBundleSignature 24 | ???? 25 | CFBundleVersion 26 | 1 27 | LSRequiresIPhoneOS 28 | 29 | UILaunchStoryboardName 30 | LaunchScreen 31 | UIMainStoryboardFile 32 | Main 33 | UIRequiredDeviceCapabilities 34 | 35 | armv7 36 | 37 | UISupportedInterfaceOrientations 38 | 39 | UIInterfaceOrientationPortrait 40 | 41 | UISupportedInterfaceOrientations~ipad 42 | 43 | UIInterfaceOrientationPortrait 44 | UIInterfaceOrientationPortraitUpsideDown 45 | UIInterfaceOrientationLandscapeLeft 46 | UIInterfaceOrientationLandscapeRight 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /WeChatSightDemo/View/WKScaleButton.h: -------------------------------------------------------------------------------- 1 | // 2 | // WKScaleButton.h 3 | // VideoCaptureDemo 4 | // 5 | // Created by 吴珂 on 16/5/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | typedef NS_ENUM(NSInteger, WKState) { 12 | WKStateBegin, 13 | WKStateIn, 14 | WKStateOut, 15 | WKStateCancle, 16 | WKStateFinish 17 | }; 18 | 19 | typedef void (^WKStateChangeBlock)(WKState state); 20 | 21 | @interface WKScaleButton : UIView 22 | 23 | @property (nonatomic,strong) CAShapeLayer *circleLayer; 24 | @property (nonatomic,strong) UILabel *label; 25 | @property (nonatomic, readonly) CGFloat radius; 26 | @property (nonatomic, copy) WKStateChangeBlock stateChangeBlock; 27 | 28 | -(void)disappearAnimation; 29 | -(void)appearAnimation; 30 | 31 | - (BOOL)circleContainsPoint:(CGPoint)point; 32 | - (void)setTitle:(NSString *)title; 33 | 34 | @end 35 | -------------------------------------------------------------------------------- /WeChatSightDemo/View/WKScaleButton.m: -------------------------------------------------------------------------------- 1 | // 2 | // WKScaleButton.m 3 | // VideoCaptureDemo 4 | // 5 | // Created by 吴珂 on 16/5/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import "WKScaleButton.h" 10 | 11 | const CGFloat ScaleButtonCircleRadius = 120.f; 12 | 13 | #define UIColorFromRGB(rgbValue) [UIColor colorWithRed:((float)((rgbValue & 0xFF0000) >> 16))/255.0 green:((float)((rgbValue & 0xFF00) >> 8))/255.0 blue:((float)(rgbValue & 0xFF))/255.0 alpha:1.0] 14 | 15 | @implementation WKScaleButton 16 | { 17 | CALayer *_effectiveLayer; 18 | WKState _state; 19 | CGFloat _scaleButtonCircleRadius; 20 | } 21 | - (void)awakeFromNib 22 | { 23 | self.backgroundColor = [UIColor clearColor]; 24 | 25 | _scaleButtonCircleRadius = (self.bounds.size.width - 10) / 2; 26 | 27 | _label = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, self.bounds.size.width, 40)]; 28 | _label.center = CGPointMake(CGRectGetMidX(self.bounds), CGRectGetMidY(self.bounds)); 29 | _label.backgroundColor = [UIColor clearColor]; 30 | _label.textAlignment = NSTextAlignmentCenter; 31 | _label.text = @"按住拍"; 32 | _label.textColor = [UIColor whiteColor]; 33 | [self addSubview:_label]; 34 | 35 | _circleLayer = [CAShapeLayer layer]; 36 | _circleLayer.frame = self.bounds; 37 | UIBezierPath *path = [UIBezierPath bezierPathWithArcCenter:_circleLayer.position radius:_scaleButtonCircleRadius startAngle:-M_PI endAngle:M_PI clockwise:YES]; 38 | _circleLayer.path = path.CGPath; 39 | _circleLayer.fillColor = [UIColor clearColor].CGColor; 40 | 41 | _circleLayer.lineWidth = 3; 42 | _circleLayer.strokeColor = [UIColor cyanColor].CGColor; 43 | 44 | CALayer *gradientLayer = [CALayer layer]; 45 | CAGradientLayer *gradientLayer1 = [CAGradientLayer layer]; 46 | gradientLayer1.frame = CGRectMake(0, 0, self.bounds.size.width, self.bounds.size.height); 47 | [gradientLayer1 setColors:[NSArray arrayWithObjects:(id)[[UIColor cyanColor] CGColor],(id)[UIColorFromRGB(0xfde802) CGColor], [UIColor redColor].CGColor, nil]]; 48 | [gradientLayer1 setLocations:@[@0.25, @0.5, @0.75, @1]]; 49 | [gradientLayer1 setStartPoint:CGPointMake(0.5, 1)]; 50 | [gradientLayer1 setEndPoint:CGPointMake(0.5, 0)]; 51 | [gradientLayer addSublayer:gradientLayer1]; 52 | 53 | [gradientLayer setMask:_circleLayer]; 54 | [self.layer addSublayer:gradientLayer]; 55 | 56 | UILongPressGestureRecognizer *panGesture = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(panAction:)]; 57 | [self addGestureRecognizer:panGesture]; 58 | panGesture.minimumPressDuration = 0.5; 59 | 60 | } 61 | 62 | - (void)setTitle:(NSString *)title 63 | { 64 | _label.text = title; 65 | } 66 | 67 | - (void)panAction:(UILongPressGestureRecognizer *)ges 68 | { 69 | 70 | switch (ges.state) { 71 | 72 | case UIGestureRecognizerStateBegan: { 73 | _state = WKStateIn; 74 | self.stateChangeBlock(WKStateBegin); 75 | 76 | break; 77 | } 78 | case UIGestureRecognizerStateChanged: { 79 | CGPoint point = [ges locationInView:self]; 80 | 81 | if (![self circleContainsPoint:point]) { 82 | NSLog(@"out"); 83 | _state = WKStateOut; 84 | self.stateChangeBlock(WKStateOut); 85 | 86 | 87 | 88 | } else if ([self circleContainsPoint:point]) { 89 | 90 | NSLog(@"in"); 91 | self.stateChangeBlock(WKStateIn); 92 | _state = WKStateIn; 93 | } 94 | break; 95 | } 96 | case UIGestureRecognizerStateEnded: 97 | case UIGestureRecognizerStateCancelled: { 98 | NSLog(@"cancel, end"); 99 | if(_state == WKStateIn){ 100 | self.stateChangeBlock(WKStateFinish); 101 | }else{ 102 | self.stateChangeBlock(WKStateCancle); 103 | } 104 | break; 105 | } 106 | case UIGestureRecognizerStateFailed: { 107 | NSLog(@"failed"); 108 | self.stateChangeBlock(WKStateCancle); 109 | break; 110 | } 111 | default: 112 | break; 113 | } 114 | } 115 | 116 | 117 | -(void)disappearAnimation{ 118 | CABasicAnimation *animation_scale = [CABasicAnimation animationWithKeyPath:@"transform.scale"]; 119 | animation_scale.toValue = @1.5; 120 | CABasicAnimation *animation_opacity = [CABasicAnimation animationWithKeyPath:@"opacity"]; 121 | animation_opacity.toValue = @0; 122 | CAAnimationGroup *aniGroup = [CAAnimationGroup animation]; 123 | aniGroup.duration = 0.2; 124 | aniGroup.animations = @[animation_scale, animation_opacity]; 125 | aniGroup.fillMode = kCAFillModeForwards; 126 | aniGroup.removedOnCompletion = NO; 127 | [_circleLayer addAnimation:aniGroup forKey:@"start"]; 128 | [_label.layer addAnimation:aniGroup forKey:@"start1"]; 129 | } 130 | 131 | -(void)appearAnimation{ 132 | CABasicAnimation *animation_scale = [CABasicAnimation animationWithKeyPath:@"transform.scale"]; 133 | animation_scale.toValue = @1; 134 | CABasicAnimation *animation_opacity = [CABasicAnimation animationWithKeyPath:@"opacity"]; 135 | animation_opacity.toValue = @1; 136 | CAAnimationGroup *aniGroup = [CAAnimationGroup animation]; 137 | aniGroup.duration = 0.2; 138 | aniGroup.animations = @[animation_scale, animation_opacity]; 139 | aniGroup.fillMode = kCAFillModeForwards; 140 | aniGroup.removedOnCompletion = NO; 141 | [_circleLayer addAnimation:aniGroup forKey:@"reset"]; 142 | [_label.layer addAnimation:aniGroup forKey:@"reset1"]; 143 | } 144 | 145 | - (CGFloat)radius 146 | { 147 | return _scaleButtonCircleRadius; 148 | } 149 | 150 | - (BOOL)circleContainsPoint:(CGPoint)point 151 | { 152 | CGFloat wh = _scaleButtonCircleRadius * 2; 153 | CGRect circleRect = CGRectMake((CGRectGetWidth(self.frame) - wh) / 2, (CGRectGetWidth(self.frame) - wh) / 2, wh, wh); 154 | return CGRectContainsPoint(circleRect, point); 155 | } 156 | 157 | @end 158 | -------------------------------------------------------------------------------- /WeChatSightDemo/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | 15 | @end 16 | 17 | -------------------------------------------------------------------------------- /WeChatSightDemo/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | 11 | @interface ViewController () 12 | 13 | @end 14 | 15 | @implementation ViewController 16 | 17 | - (void)viewDidLoad { 18 | [super viewDidLoad]; 19 | // Do any additional setup after loading the view, typically from a nib. 20 | } 21 | 22 | - (void)didReceiveMemoryWarning { 23 | [super didReceiveMemoryWarning]; 24 | // Dispose of any resources that can be recreated. 25 | } 26 | 27 | @end 28 | -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKMovieRecorder.h: -------------------------------------------------------------------------------- 1 | // 2 | // WKMovieRecorder.h 3 | // CapturePause 4 | // 5 | // Created by 吴珂 on 16/7/7. 6 | // Copyright © 2016年 Geraint Davies. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | extern const NSString * const WKRecorderLastFrame; 13 | extern const NSString * const WKRecorderMovieURL; 14 | extern const NSString * const WKRecorderDuration; 15 | extern const NSString * const WKRecorderAllFrames; 16 | extern const NSString * const WKRecorderFirstFrame; 17 | 18 | typedef NS_ENUM(NSInteger, WKRecorderFinishedReason){ 19 | WKRecorderFinishedReasonNormal,//主动结束 20 | WKRecorderFinishedReasonCancle,//取消 21 | WKRecorderFinishedReasonBeyondMaxDuration//超时结束 22 | }; 23 | 24 | /** 25 | * 录制结束invoke 26 | * 27 | * @param info 回调信息 28 | * @param isCancle YES:取消 NO:正常结束 29 | */ 30 | typedef void(^FinishRecordingBlock)(NSDictionary *info, WKRecorderFinishedReason finishReason); 31 | 32 | typedef void(^FocusAreaDidChanged)(); 33 | 34 | typedef void(^AuthorizationResult)(BOOL success); 35 | 36 | @interface WKMovieRecorder : NSObject 37 | 38 | + (WKMovieRecorder*) sharedRecorder; 39 | - (void) setup; 40 | - (void) shutdown; 41 | - (AVCaptureVideoPreviewLayer*) getPreviewLayer; 42 | - (void)prepareCaptureWithBlock:(void (^)())block; 43 | - (void) startCapture; 44 | - (void) pauseCapture; 45 | - (void) stopCapture; 46 | - (void) cancleCaputre; 47 | - (void) resumeCapture; 48 | - (void) startSession;//启动session 49 | - (BOOL) setScaleFactor:(CGFloat)factor;//设置缩放 50 | - (void) changeCamera; 51 | - (void) finishCapture; 52 | 53 | //回调 54 | @property (nonatomic, copy) FinishRecordingBlock finishBlock;//录制结束回调 55 | @property (nonatomic, copy) FocusAreaDidChanged focusAreaDidChangedBlock; 56 | @property (nonatomic, copy) AuthorizationResult authorizationResultBlock; 57 | 58 | - (instancetype)initWithMaxDuration:(NSTimeInterval)duration; 59 | 60 | @property (nonatomic, assign) CGSize cropSize; 61 | 62 | @property (nonatomic, strong, readonly) AVCaptureConnection *videoConnection; 63 | @property (nonatomic, strong, readonly) AVCaptureConnection *audioConnection; 64 | 65 | 66 | @property (nonatomic, strong, readonly) AVCaptureDeviceInput *videoDeviceInput; 67 | 68 | @property (nonatomic, assign, readonly) NSTimeInterval duration; 69 | 70 | //@property (nonatomic, strong, readonly) UIImage *lastFrame;//最后一帧图片 71 | 72 | @property (nonatomic, strong, readonly) NSURL *recordURL;//临时视频地址 73 | 74 | - (BOOL)isCapturing; 75 | @end 76 | -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKMovieRecorder.m: -------------------------------------------------------------------------------- 1 | // 2 | // WKMovieRecorder.m 3 | // CapturePause 4 | // 5 | // Created by 吴珂 on 16/7/7. 6 | // Copyright © 2016年 Geraint Davies. All rights reserved. 7 | // 8 | 9 | #import "WKMovieRecorder.h" 10 | #import "WKMovieWriter.h" 11 | #import "AssetsLibrary/ALAssetsLibrary.h" 12 | #import 13 | #import "WKVideoConverter.h" 14 | 15 | #define DebugMovie 0 16 | 17 | const NSString * const WKRecorderLastFrame = @"WKRecorderLastFrame"; 18 | const NSString * const WKRecorderMovieURL = @"WKRecorderMovieURL"; 19 | const NSString * const WKRecorderDuration = @"WKRecorderDuration"; 20 | const NSString * const WKRecorderAllFrames = @"WKRecorderAllFrames"; 21 | const NSString * const WKRecorderFirstFrame = @"WKRecorderFirstFrame"; 22 | 23 | static void *SessionRunningContext = &SessionRunningContext; 24 | static void *CapturingStillImageContext = &CapturingStillImageContext; 25 | static void *FocusAreaChangedContext = &FocusAreaChangedContext; 26 | 27 | typedef NS_ENUM( NSInteger, CaptureAVSetupResult ) { 28 | CaptureAVSetupResultSuccess, 29 | CaptureAVSetupResultCameraNotAuthorized, 30 | CaptureAVSetupResultSessionConfigurationFailed 31 | }; 32 | 33 | 34 | 35 | @interface WKMovieRecorder () 36 | < 37 | AVCaptureVideoDataOutputSampleBufferDelegate, 38 | AVCaptureAudioDataOutputSampleBufferDelegate, 39 | WKMovieWriterDelegate 40 | > 41 | { 42 | AVCaptureSession* _session; 43 | AVCaptureVideoPreviewLayer* _preview; 44 | 45 | WKMovieWriter* _writer; 46 | BOOL _isCapturing; 47 | BOOL _isPaused; 48 | BOOL _discont; 49 | int _currentFile; 50 | CMTime _timeOffset; 51 | CMTime _lastVideo; 52 | CMTime _lastAudio; 53 | 54 | NSTimeInterval _maxDuration; 55 | // UIImage *_lastFrame; 56 | } 57 | 58 | /** 59 | 准备完成后调用的block 60 | */ 61 | @property (nonatomic, copy) void (^prepareBlock)(void); 62 | 63 | 64 | // Session management. 65 | @property (nonatomic, strong) dispatch_queue_t sessionQueue; 66 | @property (nonatomic, strong) dispatch_queue_t videoDataOutputQueue; 67 | @property (nonatomic, strong) AVCaptureSession *session; 68 | @property (nonatomic, strong) AVCaptureDevice *captureDevice; 69 | @property (nonatomic, strong) AVCaptureDeviceInput *videoDeviceInput; 70 | @property (nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput; 71 | @property (nonatomic, strong) AVCaptureConnection *videoConnection; 72 | @property (nonatomic, strong) AVCaptureConnection *audioConnection; 73 | @property (nonatomic, strong) NSDictionary *videoCompressionSettings; 74 | @property (nonatomic, strong) NSDictionary *audioCompressionSettings; 75 | @property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *adaptor; 76 | @property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput; 77 | 78 | 79 | 80 | //Utilities 81 | @property (nonatomic, strong) NSMutableArray *frames;//存储录制帧 82 | @property (nonatomic, assign) CaptureAVSetupResult result; 83 | @property (atomic, readwrite) BOOL isCapturing; 84 | @property (atomic, readwrite) BOOL isPaused; 85 | @property (nonatomic, strong) NSTimer *durationTimer; 86 | 87 | @property (nonatomic, assign) WKRecorderFinishedReason finishReason; 88 | @end 89 | 90 | @implementation WKMovieRecorder 91 | @synthesize duration = _duration; 92 | + (WKMovieRecorder *)sharedRecorder 93 | { 94 | static WKMovieRecorder *recorder; 95 | static dispatch_once_t onceToken; 96 | dispatch_once(&onceToken, ^{ 97 | recorder = [[WKMovieRecorder alloc] initWithMaxDuration:CGFLOAT_MAX]; 98 | }); 99 | 100 | return recorder; 101 | } 102 | 103 | - (instancetype)initWithMaxDuration:(NSTimeInterval)duration 104 | { 105 | if(self = [self init]){ 106 | _maxDuration = duration; 107 | _duration = 0.f; 108 | } 109 | 110 | return self; 111 | } 112 | 113 | - (instancetype)init 114 | { 115 | self = [super init]; 116 | if (self) { 117 | _maxDuration = CGFLOAT_MAX; 118 | _duration = 0.f; 119 | _sessionQueue = dispatch_queue_create("wukong.movieRecorder.queue", DISPATCH_QUEUE_SERIAL ); 120 | _videoDataOutputQueue = dispatch_queue_create( "wukong.movieRecorder.video", DISPATCH_QUEUE_SERIAL ); 121 | dispatch_set_target_queue( _videoDataOutputQueue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0 ) ); 122 | } 123 | return self; 124 | } 125 | 126 | - (void)dealloc 127 | { 128 | [[NSNotificationCenter defaultCenter] removeObserver:self name:nil object:self.session]; 129 | [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureSessionWasInterruptedNotification object:self.session]; 130 | [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureSessionInterruptionEndedNotification object:self.session]; 131 | if (_captureDevice.position == AVCaptureDevicePositionBack) { 132 | 133 | [_captureDevice removeObserver:self forKeyPath:@"adjustingFocus"]; 134 | } 135 | 136 | [_session beginConfiguration]; 137 | [self.session removeInput:self.videoDeviceInput]; 138 | [_session commitConfiguration]; 139 | 140 | 141 | // [_session removeInput:self.videoDeviceInput]; 142 | 143 | 144 | 145 | if ([_session isRunning]){ 146 | [_session stopRunning]; 147 | _session = nil; 148 | } 149 | 150 | NSLog(@"%s", __FUNCTION__); 151 | } 152 | 153 | - (void)setup 154 | { 155 | if (_session == nil) 156 | { 157 | NSLog(@"Starting up server"); 158 | 159 | self.isCapturing = NO; 160 | self.isPaused = NO; 161 | _currentFile = 0; 162 | _discont = NO; 163 | 164 | 165 | 166 | 167 | self.session = [[AVCaptureSession alloc] init]; 168 | 169 | self.result = CaptureAVSetupResultSuccess; 170 | 171 | //权限检查 172 | dispatch_group_t group = dispatch_group_create(); 173 | [self checkAuthorization:group]; 174 | 175 | dispatch_async(dispatch_get_main_queue(), ^{ 176 | dispatch_group_notify(group, dispatch_get_main_queue(), ^{ 177 | NSLog(@"notity--------"); 178 | }); 179 | }); 180 | 181 | dispatch_group_notify(group, self.sessionQueue, ^{ 182 | if (self.result != CaptureAVSetupResultSuccess) { 183 | 184 | if (self.authorizationResultBlock) { 185 | self.authorizationResultBlock(NO); 186 | } 187 | return; 188 | } 189 | 190 | AVCaptureDevice *captureDevice = [[self class] deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack]; 191 | 192 | _captureDevice = captureDevice; 193 | 194 | NSError *error = nil; 195 | _videoDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error]; 196 | 197 | if (!_videoDeviceInput) { 198 | NSLog(@"未找到设备"); 199 | } 200 | 201 | 202 | //配置会话 203 | [self.session beginConfiguration]; 204 | 205 | int frameRate; 206 | if ( [NSProcessInfo processInfo].processorCount == 1 ) 207 | { 208 | if ([self.session canSetSessionPreset:AVCaptureSessionPresetLow]) { 209 | [self.session setSessionPreset:AVCaptureSessionPresetLow]; 210 | } 211 | frameRate = 10; 212 | }else{ 213 | if ([self.session canSetSessionPreset:AVCaptureSessionPreset640x480]) { 214 | [self.session setSessionPreset:AVCaptureSessionPreset640x480]; 215 | } 216 | frameRate = 30; 217 | } 218 | 219 | CMTime frameDuration = CMTimeMake( 1, frameRate ); 220 | 221 | if ( [_captureDevice lockForConfiguration:&error] ) { 222 | _captureDevice.activeVideoMaxFrameDuration = frameDuration; 223 | _captureDevice.activeVideoMinFrameDuration = frameDuration; 224 | [_captureDevice unlockForConfiguration]; 225 | } 226 | else { 227 | NSLog( @"videoDevice lockForConfiguration returned error %@", error ); 228 | } 229 | 230 | 231 | //Video 232 | if ([self.session canAddInput:_videoDeviceInput]) { 233 | 234 | [self.session addInput:_videoDeviceInput]; 235 | self.videoDeviceInput = _videoDeviceInput; 236 | [self.session removeOutput:_videoDataOutput]; 237 | 238 | AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init]; 239 | _videoDataOutput = videoOutput; 240 | videoOutput.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }; 241 | 242 | [videoOutput setSampleBufferDelegate:self queue:_videoDataOutputQueue]; 243 | 244 | videoOutput.alwaysDiscardsLateVideoFrames = NO; 245 | 246 | if ( [_session canAddOutput:videoOutput] ) { 247 | [_session addOutput:videoOutput]; 248 | 249 | [_captureDevice addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:FocusAreaChangedContext]; 250 | 251 | _videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo]; 252 | 253 | if(_videoConnection.isVideoStabilizationSupported){ 254 | _videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; 255 | } 256 | 257 | 258 | UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation; 259 | AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait; 260 | if ( statusBarOrientation != UIInterfaceOrientationUnknown ) { 261 | initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; 262 | } 263 | 264 | _videoConnection.videoOrientation = initialVideoOrientation; 265 | } 266 | 267 | } 268 | else{ 269 | NSLog(@"无法添加视频输入到会话"); 270 | } 271 | 272 | //audio 273 | AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 274 | AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error]; 275 | 276 | 277 | if ( ! audioDeviceInput ) { 278 | NSLog( @"Could not create audio device input: %@", error ); 279 | } 280 | 281 | if ( [self.session canAddInput:audioDeviceInput] ) { 282 | [self.session addInput:audioDeviceInput]; 283 | 284 | } 285 | else { 286 | NSLog( @"Could not add audio device input to the session" ); 287 | } 288 | 289 | 290 | 291 | AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init]; 292 | // Put audio on its own queue to ensure that our video processing doesn't cause us to drop audio 293 | dispatch_queue_t audioCaptureQueue = dispatch_queue_create( "wukong.movieRecorder.audio", DISPATCH_QUEUE_SERIAL ); 294 | [audioOut setSampleBufferDelegate:self queue:audioCaptureQueue]; 295 | 296 | 297 | if ( [self.session canAddOutput:audioOut] ) { 298 | [self.session addOutput:audioOut]; 299 | } 300 | _audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio]; 301 | 302 | [self.session commitConfiguration]; 303 | 304 | if (self.prepareBlock) { 305 | if (!_session.isRunning) { 306 | [_session startRunning]; 307 | } 308 | 309 | dispatch_async(dispatch_get_main_queue(), ^{ 310 | 311 | self.prepareBlock(); 312 | }); 313 | } 314 | }); 315 | 316 | _preview = [AVCaptureVideoPreviewLayer layerWithSession:_session]; 317 | _preview.videoGravity = AVLayerVideoGravityResizeAspectFill; 318 | } 319 | 320 | 321 | 322 | 323 | [self addObservers]; 324 | } 325 | 326 | 327 | /** 328 | 权限检查 329 | */ 330 | - (void)checkAuthorization:(dispatch_group_t)group 331 | { 332 | dispatch_group_enter(group); 333 | switch ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]) { 334 | case AVAuthorizationStatusNotDetermined: { 335 | [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) { 336 | self.result = granted ? CaptureAVSetupResultSuccess : CaptureAVSetupResultCameraNotAuthorized; 337 | dispatch_group_leave(group); 338 | }]; 339 | break; 340 | } 341 | case AVAuthorizationStatusAuthorized: { 342 | dispatch_group_leave(group); 343 | break; 344 | } 345 | default:{ 346 | self.result = CaptureAVSetupResultCameraNotAuthorized; 347 | dispatch_group_leave(group); 348 | } 349 | } 350 | 351 | if (self.result != CaptureAVSetupResultCameraNotAuthorized) { 352 | dispatch_group_enter(group); 353 | switch ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio]) { 354 | case AVAuthorizationStatusNotDetermined: { 355 | [AVCaptureDevice requestAccessForMediaType:AVMediaTypeAudio completionHandler:^(BOOL granted) { 356 | 357 | self.result = granted ? CaptureAVSetupResultSuccess : CaptureAVSetupResultCameraNotAuthorized; 358 | dispatch_group_leave(group); 359 | }]; 360 | break; 361 | } 362 | case AVAuthorizationStatusAuthorized: { 363 | dispatch_group_leave(group); 364 | break; 365 | } 366 | default:{ 367 | self.result = CaptureAVSetupResultCameraNotAuthorized; 368 | dispatch_group_leave(group); 369 | } 370 | } 371 | 372 | } 373 | } 374 | 375 | #pragma mark - Recording 376 | 377 | - (void) startCapture 378 | { 379 | @synchronized(self) 380 | { 381 | dispatch_async(_sessionQueue, ^{ 382 | 383 | 384 | if (!self.isCapturing) 385 | { 386 | if (![_session isRunning]) { 387 | [_session startRunning]; 388 | } 389 | NSLog(@"starting capture"); 390 | [self.frames removeAllObjects]; 391 | _currentFile++; 392 | // create the encoder once we have the audio params 393 | _writer = nil; 394 | self.isPaused = NO; 395 | _discont = NO; 396 | _timeOffset = CMTimeMake(0, 0); 397 | self.isCapturing = YES; 398 | 399 | dispatch_async(dispatch_get_main_queue(), ^{ 400 | 401 | _durationTimer = [NSTimer scheduledTimerWithTimeInterval:0.1f target:self selector:@selector(computeDuration:) userInfo:nil repeats:YES]; 402 | }); 403 | _duration = 0.f; 404 | } 405 | }); 406 | 407 | } 408 | } 409 | 410 | - (void)prepareCaptureWithBlock:(void (^)())block 411 | { 412 | self.prepareBlock = block; 413 | [self setup]; 414 | } 415 | 416 | - (void) stopCapture 417 | { 418 | [_session stopRunning]; 419 | [self finishCaptureWithReason:WKRecorderFinishedReasonNormal]; 420 | } 421 | 422 | - (void)cancleCaputre 423 | { 424 | [_session stopRunning]; 425 | [self finishCaptureWithReason:WKRecorderFinishedReasonCancle]; 426 | } 427 | 428 | - (void)finishCapture 429 | { 430 | [_session stopRunning]; 431 | } 432 | 433 | /** 434 | * 结束录制 435 | * 436 | * @param isCancle yes 取消 NO 正常结束 437 | */ 438 | - (void)finishCaptureWithReason:(WKRecorderFinishedReason)reason 439 | { 440 | @synchronized(self) 441 | { 442 | if (self.isCapturing) 443 | { 444 | // serialize with audio and video capture 445 | 446 | self.isCapturing = NO; 447 | [_durationTimer invalidate]; 448 | dispatch_async(_sessionQueue, ^{ 449 | switch (reason) { 450 | case WKRecorderFinishedReasonNormal:{ 451 | [_writer finishRecording]; 452 | break; 453 | 454 | } 455 | case WKRecorderFinishedReasonBeyondMaxDuration:{ 456 | [_writer finishRecording]; 457 | break; 458 | } 459 | case WKRecorderFinishedReasonCancle:{ 460 | [_writer cancleRecording]; 461 | break; 462 | } 463 | 464 | default: 465 | break; 466 | } 467 | 468 | self.finishReason = reason; 469 | }); 470 | 471 | } 472 | } 473 | 474 | } 475 | 476 | - (void) pauseCapture 477 | { 478 | @synchronized(self) 479 | { 480 | if (self.isCapturing) 481 | { 482 | NSLog(@"Pausing capture"); 483 | self.isPaused = YES; 484 | [_durationTimer invalidate]; 485 | _discont = YES; 486 | } 487 | } 488 | } 489 | 490 | - (void) resumeCapture 491 | { 492 | @synchronized(self) 493 | { 494 | if (self.isPaused) 495 | { 496 | NSLog(@"Resuming capture"); 497 | self.isPaused = NO; 498 | dispatch_async(dispatch_get_main_queue(), ^{ 499 | 500 | _durationTimer = [NSTimer scheduledTimerWithTimeInterval:0.1f target:self selector:@selector(computeDuration:) userInfo:nil repeats:YES]; 501 | }); 502 | } 503 | } 504 | } 505 | 506 | - (void) shutdown 507 | { 508 | NSLog(@"shutting down server"); 509 | if (_session) 510 | { 511 | [_session stopRunning]; 512 | _session = nil; 513 | } 514 | 515 | [_writer finishRecording]; 516 | 517 | } 518 | 519 | 520 | - (AVCaptureVideoPreviewLayer*) getPreviewLayer 521 | { 522 | return _preview; 523 | } 524 | 525 | 526 | #pragma mark 时常限制 527 | - (void)computeDuration:(NSTimer *)timer 528 | { 529 | if (self.isCapturing) { 530 | [self willChangeValueForKey:@"duration"]; 531 | _duration += 0.1; 532 | [self didChangeValueForKey:@"duration"]; 533 | NSLog(@"%f", _duration); 534 | if (_duration >= _maxDuration) { 535 | [self finishCaptureWithReason:WKRecorderFinishedReasonBeyondMaxDuration]; 536 | [timer invalidate]; 537 | NSLog(@"录制超时,结束录制"); 538 | } 539 | } 540 | } 541 | 542 | - (void)startSession 543 | { 544 | dispatch_async(self.sessionQueue, ^{ 545 | 546 | if (!_session.isRunning) { 547 | [_session startRunning]; 548 | } 549 | }); 550 | } 551 | 552 | 553 | - (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset 554 | { 555 | CMItemCount count; 556 | CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); 557 | CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count); 558 | CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); 559 | for (CMItemCount i = 0; i < count; i++) 560 | { 561 | pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); 562 | pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); 563 | } 564 | CMSampleBufferRef sout; 565 | CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); 566 | free(pInfo); 567 | return sout; 568 | } 569 | 570 | #pragma mark - setting 571 | 572 | - (BOOL)setScaleFactor:(CGFloat)factor 573 | { 574 | [_captureDevice lockForConfiguration:nil]; 575 | 576 | BOOL success = NO; 577 | 578 | if(_captureDevice.activeFormat.videoMaxZoomFactor > factor){ 579 | // _captureDevice.videoZoomFactor = factor; 580 | 581 | [_captureDevice rampToVideoZoomFactor:factor withRate:30.f];//平滑过渡 582 | 583 | NSLog(@"Current format: %@, max zoom factor: %f", _captureDevice.activeFormat, _captureDevice.activeFormat.videoMaxZoomFactor); 584 | success = YES; 585 | } 586 | [_captureDevice unlockForConfiguration]; 587 | 588 | return success; 589 | } 590 | 591 | - (void)changeCamera 592 | { 593 | dispatch_async( self.sessionQueue, ^{ 594 | 595 | AVCaptureDevice *currentVideoDevice = self.videoDeviceInput.device; 596 | AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified; 597 | AVCaptureDevicePosition currentPosition = currentVideoDevice.position; 598 | 599 | switch ( currentPosition ) 600 | { 601 | case AVCaptureDevicePositionUnspecified: 602 | case AVCaptureDevicePositionFront: 603 | preferredPosition = AVCaptureDevicePositionBack; 604 | break; 605 | case AVCaptureDevicePositionBack: 606 | preferredPosition = AVCaptureDevicePositionFront; 607 | break; 608 | } 609 | 610 | if (_captureDevice.position == AVCaptureDevicePositionBack) { 611 | [_captureDevice removeObserver:self forKeyPath:@"adjustingFocus"]; 612 | } 613 | 614 | _captureDevice = [[self class] deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition]; 615 | AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:nil]; 616 | 617 | [self.session beginConfiguration]; 618 | 619 | // Remove the existing device input first, since using the front and back camera simultaneously is not supported. 620 | [self.session removeInput:self.videoDeviceInput]; 621 | 622 | if ( [self.session canAddInput:videoDeviceInput] ) { 623 | 624 | [self.session addInput:videoDeviceInput]; 625 | 626 | if (_captureDevice.position != AVCaptureDevicePositionFront) { 627 | [_captureDevice lockForConfiguration:nil]; 628 | _captureDevice.subjectAreaChangeMonitoringEnabled = YES; 629 | [_captureDevice addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:FocusAreaChangedContext]; 630 | [_captureDevice unlockForConfiguration]; 631 | } 632 | 633 | self.videoDeviceInput = videoDeviceInput; 634 | } 635 | else { 636 | 637 | [self.session addInput:self.videoDeviceInput]; 638 | } 639 | 640 | 641 | 642 | _videoConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; 643 | 644 | if(_videoConnection.isVideoStabilizationSupported){ 645 | _videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; 646 | } 647 | 648 | UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation; 649 | AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait; 650 | if ( statusBarOrientation != UIInterfaceOrientationUnknown ) { 651 | initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; 652 | } 653 | 654 | _videoConnection.videoOrientation = initialVideoOrientation; 655 | 656 | [self.session commitConfiguration]; 657 | 658 | if (![_session isRunning]) { 659 | [_session startRunning]; 660 | } 661 | } ); 662 | } 663 | 664 | 665 | 666 | #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate、AVCaptureAudioDataOutputSampleBufferDelegate 667 | - (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 668 | { 669 | BOOL bVideo = YES; 670 | 671 | @synchronized(self) 672 | { 673 | if (!self.isCapturing || self.isPaused) 674 | { 675 | return; 676 | } 677 | if (connection != _videoConnection) 678 | { 679 | bVideo = NO; 680 | } 681 | if ((_writer == nil) && !bVideo) 682 | { 683 | NSString* filename = [NSString stringWithFormat:@"capture11%d.mp4", _currentFile]; 684 | NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename]; 685 | _recordURL = [NSURL fileURLWithPath:path]; 686 | _writer = [[WKMovieWriter alloc] initWithURL:_recordURL cropSize:_cropSize]; 687 | 688 | 689 | _writer.delegate = self; 690 | 691 | } 692 | if (_discont) 693 | { 694 | if (bVideo) 695 | { 696 | return; 697 | } 698 | _discont = NO; 699 | // calc adjustment 700 | CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 701 | CMTime last = bVideo ? _lastVideo : _lastAudio; 702 | if (last.flags & kCMTimeFlags_Valid) 703 | { 704 | if (_timeOffset.flags & kCMTimeFlags_Valid) 705 | { 706 | pts = CMTimeSubtract(pts, _timeOffset); 707 | } 708 | CMTime offset = CMTimeSubtract(pts, last); 709 | NSLog(@"Setting offset from %s", bVideo?"video": "audio"); 710 | NSLog(@"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale)); 711 | 712 | // this stops us having to set a scale for _timeOffset before we see the first video time 713 | if (_timeOffset.value == 0) 714 | { 715 | _timeOffset = offset; 716 | } 717 | else 718 | { 719 | _timeOffset = CMTimeAdd(_timeOffset, offset); 720 | } 721 | } 722 | _lastVideo.flags = 0; 723 | _lastAudio.flags = 0; 724 | } 725 | 726 | // retain so that we can release either this or modified one 727 | CFRetain(sampleBuffer); 728 | 729 | if (_timeOffset.value > 0) 730 | { 731 | CFRelease(sampleBuffer); 732 | sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset]; 733 | } 734 | 735 | // record most recent time so we know the length of the pause 736 | CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 737 | CMTime dur = CMSampleBufferGetDuration(sampleBuffer); 738 | if (dur.value > 0) 739 | { 740 | pts = CMTimeAdd(pts, dur); 741 | } 742 | if (bVideo) 743 | { 744 | _lastVideo = pts; 745 | @autoreleasepool { 746 | if (_maxDuration < 20.f || self.frames.count == 0) { 747 | UIImage *frame = [WKVideoConverter convertSampleBufferRefToUIImage:sampleBuffer]; 748 | [self.frames addObject:frame]; 749 | } 750 | } 751 | // _lastFrame = [WKVideoConverter convertSampleBufferRefToUIImage:sampleBuffer]; 752 | 753 | 754 | 755 | // _lastFrame = [[UIImage alloc] init]; 756 | 757 | // CGImageRef cgImage = [WKVideoConverter convertSamepleBufferRefToCGImage:sampleBuffer]; 758 | // [self.frames addObject:((__bridge id)(cgImage))]; 759 | // CGImageRelease(cgImage); 760 | // _lastFrame = [[UIImage alloc] init]; 761 | [_writer appendVideoBuffer:sampleBuffer]; 762 | } 763 | else 764 | { 765 | _lastAudio = pts; 766 | [_writer appendAudioBuffer:sampleBuffer]; 767 | } 768 | } 769 | 770 | // pass frame to encoder 771 | CFRelease(sampleBuffer); 772 | } 773 | 774 | 775 | #pragma mark - WKMovieWriterDelegate 776 | - (void)movieWriterDidFinishRecording:(WKMovieWriter *)recorder status:(BOOL)isCancle 777 | { 778 | self.isCapturing = NO; 779 | _writer = nil; 780 | 781 | NSString* filename = [NSString stringWithFormat:@"capture11%d.mp4", _currentFile]; 782 | NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename]; 783 | #if DebugMovie 784 | ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 785 | [library writeVideoAtPathToSavedPhotosAlbum:recorder.recordingURL completionBlock:^(NSURL *assetURL, NSError *error){ 786 | NSLog(@"save completed"); 787 | // [[NSFileManager defaultManager] removeItemAtPath:path error:nil]; 788 | }]; 789 | #endif 790 | 791 | if (self.finishBlock){ 792 | NSMutableDictionary *info = [@{WKRecorderMovieURL : [NSURL fileURLWithPath:path], 793 | WKRecorderDuration : @(_duration), 794 | } mutableCopy]; 795 | if (self.frames.count != 0) {//小视频 796 | [info setObject:[self.frames mutableCopy] forKey:WKRecorderAllFrames]; 797 | [info setObject:[self.frames firstObject] forKey:WKRecorderFirstFrame]; 798 | [info setObject:[self.frames lastObject] forKey:WKRecorderLastFrame]; 799 | } 800 | self.finishBlock(info, self.finishReason); 801 | } 802 | 803 | } 804 | 805 | 806 | 807 | #pragma mark KVO and Notifications 808 | - (void)addObservers 809 | { 810 | // [self.session addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:SessionRunningContext]; 811 | // [self.stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:CapturingStillImageContext]; 812 | 813 | // [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.videoDeviceInput.device]; 814 | // [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self.session]; 815 | // A session can only run when the app is full screen. It will be interrupted in a multi-app layout, introduced in iOS 9, 816 | // see also the documentation of AVCaptureSessionInterruptionReason. Add observers to handle these session interruptions 817 | // and show a preview is paused message. See the documentation of AVCaptureSessionWasInterruptedNotification for other 818 | // interruption reasons. 819 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionWasInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:self.session]; 820 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionInterruptionEnded:) name:AVCaptureSessionInterruptionEndedNotification object:self.session]; 821 | } 822 | 823 | - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context 824 | { 825 | if ( context == SessionRunningContext ) { 826 | // BOOL isSessionRunning = [change[NSKeyValueChangeNewKey] boolValue]; 827 | 828 | dispatch_async( dispatch_get_main_queue(), ^{ 829 | // NSLog(@"%s", __FUNCTION__); 830 | } ); 831 | }else if (context == FocusAreaChangedContext){ 832 | 833 | if ([change[NSKeyValueChangeNewKey] integerValue] == 1) { 834 | 835 | if (self.focusAreaDidChangedBlock) { 836 | dispatch_async( dispatch_get_main_queue(), ^{ 837 | // NSLog(@"%s", __FUNCTION__); 838 | self.focusAreaDidChangedBlock(); 839 | } ); 840 | } 841 | } 842 | 843 | } 844 | else { 845 | [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; 846 | } 847 | } 848 | 849 | 850 | - (void)subjectAreaDidChange:(NSNotification *)notification 851 | { 852 | CGPoint devicePoint = CGPointMake( 0.5, 0.5 ); 853 | // [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO]; 854 | // NSLog(@"%s", __FUNCTION__); 855 | } 856 | 857 | - (void)sessionRuntimeError:(NSNotification *)notification 858 | { 859 | NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; 860 | NSLog( @"Capture session runtime error: %@", error ); 861 | 862 | // Automatically try to restart the session running if media services were reset and the last start running succeeded. 863 | // Otherwise, enable the user to try to resume the session running. 864 | if ( error.code == AVErrorMediaServicesWereReset ) { 865 | dispatch_async( self.sessionQueue, ^{ 866 | if ( _session.isRunning ) { 867 | [self.session startRunning]; 868 | } 869 | else { 870 | dispatch_async( dispatch_get_main_queue(), ^{ 871 | 872 | } ); 873 | } 874 | } ); 875 | } 876 | else { 877 | // self.resumeButton.hidden = NO; 878 | } 879 | } 880 | 881 | - (void)sessionWasInterrupted:(NSNotification *)notification 882 | { 883 | // In some scenarios we want to enable the user to resume the session running. 884 | // For example, if music playback is initiated via control center while using AVCam, 885 | // then the user can let AVCam resume the session running, which will stop music playback. 886 | // Note that stopping music playback in control center will not automatically resume the session running. 887 | // Also note that it is not always possible to resume, see -[resumeInterruptedSession:]. 888 | BOOL showResumeButton = NO; 889 | 890 | // In iOS 9 and later, the userInfo dictionary contains information on why the session was interrupted. 891 | if ( &AVCaptureSessionInterruptionReasonKey ) { 892 | AVCaptureSessionInterruptionReason reason = [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]; 893 | NSLog( @"Capture session was interrupted with reason %ld", (long)reason ); 894 | 895 | if ( reason == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient || 896 | reason == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient ) { 897 | showResumeButton = YES; 898 | } 899 | else if ( reason == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps ) { 900 | 901 | [UIView animateWithDuration:0.25 animations:^{ 902 | 903 | }]; 904 | } 905 | } 906 | else { 907 | NSLog( @"Capture session was interrupted" ); 908 | showResumeButton = ( [UIApplication sharedApplication].applicationState == UIApplicationStateInactive ); 909 | } 910 | } 911 | 912 | - (void)sessionInterruptionEnded:(NSNotification *)notification 913 | { 914 | NSLog( @"Capture session interruption ended" ); 915 | 916 | if (!self.session.isRunning) { 917 | [self.session startRunning]; 918 | } 919 | } 920 | 921 | 922 | #pragma mark Device Configuration 923 | 924 | - (void)addVideoDeviceInput:(AVCaptureDeviceInput *)input videoDevice:(AVCaptureDevice *)device 925 | { 926 | if ( [self.session canAddInput:input] ) { 927 | 928 | 929 | 930 | [device lockForConfiguration:nil]; 931 | device.subjectAreaChangeMonitoringEnabled = YES; 932 | [device addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:FocusAreaChangedContext]; 933 | [device unlockForConfiguration]; 934 | 935 | 936 | 937 | // [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:device]; 938 | 939 | [[self class] setFlashMode:AVCaptureFlashModeAuto forDevice:device]; 940 | // [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:device]; 941 | 942 | if ([self.session canAddInput:input]) { 943 | 944 | [self.session addInput:input]; 945 | } 946 | 947 | self.videoDeviceInput = input; 948 | 949 | [self.session removeOutput:_videoDataOutput]; 950 | 951 | AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init]; 952 | _videoDataOutput = videoOutput; 953 | videoOutput.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }; 954 | 955 | [videoOutput setSampleBufferDelegate:self queue:_videoDataOutputQueue]; 956 | 957 | videoOutput.alwaysDiscardsLateVideoFrames = NO; 958 | 959 | if ( [_session canAddOutput:videoOutput] ) { 960 | [_session addOutput:videoOutput]; 961 | _videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo]; 962 | 963 | if(_videoConnection.isVideoStabilizationSupported){ 964 | _videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; 965 | } 966 | 967 | UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation; 968 | AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait; 969 | if ( statusBarOrientation != UIInterfaceOrientationUnknown ) { 970 | initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; 971 | } 972 | 973 | _videoConnection.videoOrientation = initialVideoOrientation; 974 | } 975 | 976 | // videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 977 | // [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, 978 | // nil]; 979 | } 980 | } 981 | /** 982 | * 获取设备 983 | * 984 | * @param mediaType 媒体类型 985 | * @param position 捕获设备位置 986 | * 987 | * @return 设备 988 | */ 989 | + (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position 990 | { 991 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType]; 992 | AVCaptureDevice *captureDevice = devices.firstObject; 993 | 994 | for ( AVCaptureDevice *device in devices ) { 995 | if ( device.position == position ) { 996 | captureDevice = device; 997 | break; 998 | } 999 | } 1000 | 1001 | return captureDevice; 1002 | } 1003 | 1004 | + (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device 1005 | { 1006 | if ( device.hasFlash && [device isFlashModeSupported:flashMode] ) { 1007 | NSError *error = nil; 1008 | if ( [device lockForConfiguration:&error] ) { 1009 | device.flashMode = flashMode; 1010 | [device unlockForConfiguration]; 1011 | } 1012 | else { 1013 | NSLog( @"Could not lock device for configuration: %@", error ); 1014 | } 1015 | } 1016 | } 1017 | 1018 | 1019 | //- (NSTimeInterval)duration 1020 | //{ 1021 | // return _duration; 1022 | //} 1023 | 1024 | - (NSMutableArray *)frames 1025 | { 1026 | if (!_frames) { 1027 | _frames = [NSMutableArray array]; 1028 | } 1029 | return _frames; 1030 | } 1031 | 1032 | - (BOOL)isCapturing 1033 | { 1034 | return _isCapturing; 1035 | } 1036 | 1037 | 1038 | 1039 | @end 1040 | -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKMovieWriter.h: -------------------------------------------------------------------------------- 1 | // 2 | // WKMovieWriter.h 3 | // CapturePause 4 | // 5 | // Created by 吴珂 on 16/7/7. 6 | // Copyright © 2016年 Geraint Davies. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @class WKMovieWriter; 13 | 14 | @protocol WKMovieWriterDelegate 15 | 16 | - (void)movieWriterDidFinishRecording:(WKMovieWriter *)recorder status:(BOOL)isCancle; 17 | 18 | @end 19 | 20 | @interface WKMovieWriter : NSObject 21 | 22 | @property (nonatomic, weak) id delegate; 23 | 24 | @property (nonatomic, strong, readonly) NSURL *recordingURL; 25 | 26 | - (instancetype)initWithURL:(NSURL *)URL; 27 | 28 | - (instancetype)initWithURL:(NSURL *)URL cropSize:(CGSize)cropSize; 29 | 30 | - (void)setCropSize:(CGSize)size; 31 | 32 | - (void)prepareRecording; 33 | 34 | - (void)finishRecording;//正常结束 35 | - (void)cancleRecording;//取消录制 36 | 37 | 38 | - (void)appendAudioBuffer:(CMSampleBufferRef)sampleBuffer; 39 | 40 | - (void)appendVideoBuffer:(CMSampleBufferRef)sampleBuffer; 41 | 42 | @end -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKMovieWriter.m: -------------------------------------------------------------------------------- 1 | // 2 | // WKMovieWriter.m 3 | // CapturePause 4 | // 5 | // Created by 吴珂 on 16/7/7. 6 | // Copyright © 2016年 Geraint Davies. All rights reserved. 7 | // 8 | 9 | 10 | #define SCREEN_WIDTH ([UIScreen mainScreen].bounds.size.width) 11 | #define SCREEN_HEIGHT ([UIScreen mainScreen].bounds.size.height) 12 | 13 | #import "WKMovieWriter.h" 14 | #import 15 | 16 | @interface WKMovieWriter () 17 | 18 | @property (nonatomic, strong) AVAssetWriterInput *videoInput; 19 | @property (nonatomic, strong) AVAssetWriterInput *audioInput; 20 | @property (nonatomic, strong) AVAssetWriter *videoWriter; 21 | @property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *adaptor; 22 | @property (nonatomic, assign) CMSampleBufferRef currentbuffer; 23 | @property (nonatomic, assign) CGSize cropSize; 24 | 25 | @end 26 | 27 | @implementation WKMovieWriter 28 | 29 | - (instancetype)initWithURL:(NSURL *)URL 30 | { 31 | if (self = [super init]) { 32 | 33 | _recordingURL = URL; 34 | 35 | [self prepareRecording]; 36 | 37 | } 38 | 39 | return self; 40 | } 41 | 42 | - (instancetype)initWithURL:(NSURL *)URL cropSize:(CGSize)cropSize 43 | { 44 | if (self = [super init]) { 45 | 46 | _recordingURL = URL; 47 | 48 | if (cropSize.width == 0 || cropSize.height == 0) { 49 | 50 | _cropSize = [UIScreen mainScreen].bounds.size; 51 | 52 | }else{ 53 | 54 | _cropSize = cropSize; 55 | } 56 | 57 | 58 | [self prepareRecording]; 59 | 60 | 61 | 62 | } 63 | 64 | return self; 65 | } 66 | 67 | - (void)setCropSize:(CGSize)size 68 | { 69 | _cropSize = size; 70 | } 71 | 72 | - (void)prepareRecording 73 | { 74 | //上保险 75 | NSString *filePath = [[self.videoWriter.outputURL absoluteString] stringByReplacingOccurrencesOfString:@"file://" withString:@""]; 76 | BOOL isDirectory = NO; 77 | if ([[NSFileManager defaultManager] fileExistsAtPath:filePath isDirectory:&isDirectory]) { 78 | if ([[NSFileManager defaultManager] removeItemAtURL:self.videoWriter.outputURL error:nil]) { 79 | NSLog(@""); 80 | } 81 | } 82 | 83 | //初始化 84 | NSString *betaCompressionDirectory = [[_recordingURL absoluteString] stringByReplacingOccurrencesOfString:@"file://" withString:@""]; 85 | 86 | NSError *error = nil; 87 | 88 | unlink([betaCompressionDirectory UTF8String]); 89 | //添加图像输入 90 | //--------------------------------------------初始化刻录机-------------------------------------------- 91 | self.videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory] 92 | fileType:AVFileTypeMPEG4 93 | error:&error]; 94 | NSParameterAssert(self.videoWriter); 95 | 96 | if(error) NSLog(@"error = %@", [error localizedDescription]); 97 | //-------------------------------------------------------------------------------------------------- 98 | 99 | 100 | 101 | 102 | 103 | //--------------------------------------------初始化图像信息输入参数-------------------------------------------- 104 | NSDictionary *videoSettings; 105 | 106 | 107 | 108 | if (_cropSize.height == 0 || _cropSize.width == 0) { 109 | 110 | _cropSize = [UIScreen mainScreen].bounds.size; 111 | 112 | } 113 | 114 | // CGFloat ratio = SCREEN_HEIGHT / SCREEN_WIDTH; 115 | // 116 | // while (fmodf(_cropSize.width, 16) != 0) { 117 | // _cropSize.width ++; 118 | // } 119 | // 120 | // _cropSize.height = _cropSize.width * ratio; 121 | 122 | 123 | 124 | videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 125 | AVVideoCodecH264, AVVideoCodecKey, 126 | [NSNumber numberWithInt:_cropSize.width], AVVideoWidthKey, 127 | [NSNumber numberWithInt:_cropSize.height], AVVideoHeightKey, 128 | AVVideoScalingModeResizeAspectFill,AVVideoScalingModeKey, 129 | nil]; 130 | 131 | 132 | 133 | self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; 134 | NSParameterAssert(self.videoInput); 135 | self.videoInput.expectsMediaDataInRealTime = YES; 136 | //-------------------------------------------------------------------------------------------------- 137 | 138 | 139 | 140 | 141 | 142 | //--------------------------------------------缓冲区参数设置-------------------------------------------- 143 | NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: 144 | [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; 145 | 146 | self.adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput 147 | 148 | sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary]; 149 | 150 | NSParameterAssert(self.videoInput); 151 | 152 | NSParameterAssert([self.videoWriter canAddInput:self.videoInput]); 153 | //-------------------------------------------------------------------------------------------------- 154 | 155 | 156 | 157 | //添加音频输入 158 | 159 | AudioChannelLayout acl; 160 | 161 | bzero( &acl, sizeof(acl)); 162 | 163 | acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; 164 | 165 | 166 | //音频配置 167 | NSDictionary* audioOutputSettings = nil; 168 | audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: 169 | 170 | [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey, 171 | 172 | [ NSNumber numberWithInt:64000], AVEncoderBitRateKey, 173 | 174 | [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, 175 | 176 | [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, 177 | 178 | [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, 179 | 180 | nil ]; 181 | 182 | 183 | 184 | self.audioInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio 185 | outputSettings: audioOutputSettings]; 186 | self.audioInput.expectsMediaDataInRealTime = YES; 187 | 188 | 189 | 190 | //图像和语音输入添加到刻录机 191 | [self.videoWriter addInput:self.audioInput]; 192 | 193 | [self.videoWriter addInput:self.videoInput]; 194 | 195 | switch (self.videoWriter.status) { 196 | case AVAssetWriterStatusUnknown:{ 197 | [self.videoWriter startWriting]; 198 | } 199 | 200 | break; 201 | 202 | default: 203 | { 204 | 205 | } 206 | break; 207 | } 208 | } 209 | 210 | - (void)finishRecording 211 | { 212 | [self finishRecordingIsCancle:NO]; 213 | } 214 | 215 | - (void)cancleRecording 216 | { 217 | [self finishRecordingIsCancle:YES]; 218 | } 219 | 220 | - (void)finishRecordingIsCancle:(BOOL)isCancle 221 | { 222 | [self.videoInput markAsFinished]; 223 | 224 | [self.videoWriter finishWritingWithCompletionHandler:^{ 225 | 226 | NSLog(@"写完了"); 227 | // NSURL *destinationURL = [NSURL fileURLWithPath:[self appendDocumentDir:@"test.mp4"]]; 228 | // NSError *error; 229 | // [[NSFileManager defaultManager] copyItemAtURL:self.recordingURL toURL:destinationURL error:&error]; 230 | // 231 | // if (error) { 232 | // NSLog(@"%@", [error debugDescription]); 233 | // } 234 | 235 | dispatch_async(dispatch_get_main_queue(), ^{ 236 | 237 | if([self.delegate respondsToSelector:@selector(movieWriterDidFinishRecording:status:)]){ 238 | [self.delegate movieWriterDidFinishRecording:self status:isCancle]; 239 | } 240 | }); 241 | 242 | 243 | 244 | 245 | }]; 246 | 247 | 248 | } 249 | 250 | 251 | 252 | - (void)appendVideoBuffer:(CMSampleBufferRef)sampleBuffer 253 | { 254 | if (self.videoWriter.status != AVAssetExportSessionStatusUnknown) { 255 | [self.videoWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; 256 | _currentbuffer = sampleBuffer; 257 | [self.videoInput appendSampleBuffer:sampleBuffer]; 258 | } 259 | } 260 | 261 | 262 | - (void)appendAudioBuffer:(CMSampleBufferRef)sampleBuffer 263 | { 264 | 265 | 266 | if (self.videoWriter.status != AVAssetExportSessionStatusUnknown) { 267 | [self.videoWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; 268 | 269 | _currentbuffer = sampleBuffer; 270 | 271 | [self.audioInput appendSampleBuffer:sampleBuffer]; 272 | 273 | } 274 | } 275 | 276 | 277 | - (NSString *)appendDocumentDir:(NSString *)path 278 | { 279 | NSString *docPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0]; 280 | return [docPath stringByAppendingPathComponent:path]; 281 | } 282 | 283 | 284 | - (void)dealloc 285 | { 286 | NSLog(@"%s", __FUNCTION__); 287 | } 288 | @end -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKProgressView.h: -------------------------------------------------------------------------------- 1 | // 2 | // WKProgressView.h 3 | // VideoCaptureDemo 4 | // 5 | // Created by 吴珂 on 16/5/20. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface WKProgressView : UIView 12 | 13 | @property (nonatomic, strong) UIColor *borderColor; 14 | @property (nonatomic, strong) UIColor *progressColor; 15 | @property (nonatomic) float progress;//0~1之间的数 16 | @property (nonatomic) float progressWidth; 17 | 18 | - (void)setProgress:(float)progress animated:(BOOL)animated; 19 | 20 | @end 21 | -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKProgressView.m: -------------------------------------------------------------------------------- 1 | // 2 | // WKProgressView.m 3 | // VideoCaptureDemo 4 | // 5 | // Created by 吴珂 on 16/5/20. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import "WKProgressView.h" 10 | 11 | #define kTestCircleRadius 30.f 12 | 13 | #define WKUseFullSector 0 14 | 15 | @interface WKProgressView () 16 | 17 | @property (nonatomic, strong) CAShapeLayer *borderLayer; 18 | @property (nonatomic, strong) UIBezierPath *trackPath; 19 | @property (nonatomic, strong) CAShapeLayer *progressLayer; 20 | @property (nonatomic, strong) UIBezierPath *progressPath; 21 | 22 | @end 23 | 24 | @implementation WKProgressView 25 | 26 | 27 | - (instancetype)init 28 | { 29 | self = [super init]; 30 | if (self) { 31 | self.userInteractionEnabled = NO; 32 | } 33 | return self; 34 | } 35 | 36 | - (id)initWithFrame:(CGRect)frame 37 | { 38 | self = [super initWithFrame:frame]; 39 | if (self) { 40 | //默认5 41 | self.progressWidth = 5; 42 | } 43 | return self; 44 | } 45 | 46 | - (void)setTrack 47 | { 48 | _trackPath = [UIBezierPath bezierPathWithArcCenter:CGPointMake(self.bounds.size.width / 2, self.bounds.size.width / 2) radius:self.bounds.size.width / 2 startAngle:0 endAngle:M_PI * 2 clockwise:YES];; 49 | self.borderLayer.path = _trackPath.CGPath; 50 | } 51 | 52 | - (void)setProgress 53 | { 54 | CGFloat radius; 55 | #if WKUseFullSector 56 | radius = (self.bounds.size.width) / 2 ; 57 | #else 58 | radius = (self.bounds.size.width - 5) / 2; 59 | #endif 60 | _progressPath = [UIBezierPath bezierPathWithArcCenter:CGPointMake(self.bounds.size.width / 2, self.bounds.size.width / 2) radius:radius / 2 startAngle:- M_PI_2 endAngle:(M_PI * 2) * _progress - M_PI_2 clockwise:YES]; 61 | self.progressLayer.path = _progressPath.CGPath; 62 | } 63 | 64 | 65 | - (void)setProgressWidth:(float)progressWidth 66 | { 67 | CGFloat radius; 68 | #if WKUseFullSector 69 | radius = (self.bounds.size.width); 70 | #else 71 | radius = (self.bounds.size.width - 5); 72 | #endif 73 | _progressWidth = progressWidth; 74 | self.borderLayer.lineWidth = 1; 75 | self.progressLayer.lineWidth = radius / 2; 76 | 77 | [self setTrack]; 78 | [self setProgress]; 79 | } 80 | 81 | - (void)setBorderColor:(UIColor *)trackColor 82 | { 83 | self.borderLayer.strokeColor = trackColor.CGColor; 84 | } 85 | 86 | - (void)setProgressColor:(UIColor *)progressColor 87 | { 88 | self.progressLayer.strokeColor = progressColor.CGColor; 89 | } 90 | 91 | - (void)setProgress:(float)progress 92 | { 93 | _progress = progress; 94 | dispatch_async(dispatch_get_main_queue(), ^{ 95 | 96 | [self setProgress]; 97 | }); 98 | } 99 | 100 | - (void)setProgress:(float)progress animated:(BOOL)animated 101 | { 102 | 103 | } 104 | 105 | 106 | 107 | #pragma mark - Lazy loading 108 | 109 | - (CAShapeLayer *)borderLayer 110 | { 111 | if (!_borderLayer) { 112 | _borderLayer = [CAShapeLayer new]; 113 | 114 | [self.layer addSublayer:_borderLayer]; 115 | _borderLayer.fillColor = nil; 116 | 117 | _borderLayer.lineCap = kCALineCapSquare; 118 | } 119 | return _borderLayer; 120 | } 121 | 122 | - (CAShapeLayer *)progressLayer 123 | { 124 | if (!_progressLayer) { 125 | _progressLayer = [CAShapeLayer new]; 126 | _progressLayer.fillColor = nil; 127 | _progressLayer.frame = self.bounds; 128 | 129 | [self.layer addSublayer:_progressLayer]; 130 | 131 | } 132 | return _progressLayer; 133 | } 134 | 135 | - (void)reset 136 | { 137 | self.borderLayer.path = nil; 138 | self.progressLayer.path = nil; 139 | } 140 | 141 | 142 | 143 | 144 | @end 145 | -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKTestAudioOutput.h: -------------------------------------------------------------------------------- 1 | // 2 | // WKTestAudioOutput.h 3 | // CenturyGuard 4 | // 5 | // Created by 吴珂 on 16/7/21. 6 | // Copyright © 2016年 sjyt. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface WKTestAudioOutput : AVCaptureAudioDataOutput 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKTestAudioOutput.m: -------------------------------------------------------------------------------- 1 | // 2 | // WKTestAudioOutput.m 3 | // CenturyGuard 4 | // 5 | // Created by 吴珂 on 16/7/21. 6 | // Copyright © 2016年 sjyt. All rights reserved. 7 | // 8 | 9 | #import "WKTestAudioOutput.h" 10 | 11 | @implementation WKTestAudioOutput 12 | 13 | - (void)dealloc 14 | { 15 | NSLog(@"=-=-=-= %s", __FUNCTION__); 16 | } 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKVideoConverter.h: -------------------------------------------------------------------------------- 1 | // 2 | // WKVideoConverter.h 3 | // VideoCaptureDemo 4 | // 5 | // Created by 吴珂 on 16/5/16. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | #import 12 | 13 | @class WKVideoConverter; 14 | @protocol WKVideoConverterDelegate 15 | 16 | - (void)videoConverter:(WKVideoConverter *)converter progress:(CGFloat)progress; 17 | - (void)videoConverterFinishConvert:(WKVideoConverter *)converter; 18 | 19 | @end 20 | 21 | typedef void (^block)(void); 22 | 23 | @interface WKVideoConverter : NSObject 24 | 25 | @property (nonatomic, weak) id delegate; 26 | 27 | - (void)convertVideoToImagesWithURL:(NSURL *)url finishBlock:(void (^)(id))finishBlock;//转成CGImage 28 | 29 | - (void)convertVideoFirstFrameWithURL:(NSURL *)url finishBlock:(void (^)(id))finishBlock;//转成CGImage 30 | 31 | - (void)convertVideoUIImagesWithURL:(NSURL *)url finishBlock:(void (^)(id images, NSTimeInterval duration))finishBlock;//images 32 | 33 | - (void)convertVideoToGifImageWithURL:(NSURL *)url destinationUrl:(NSURL *)destinationUrl finishBlock:(void (^)(void))finishBlock; 34 | 35 | + (CGImageRef)convertSamepleBufferRefToCGImage:(CMSampleBufferRef)sampleBufferRef; 36 | 37 | + (UIImage *)convertSampleBufferRefToUIImage:(CMSampleBufferRef)sampleBufferRef; 38 | 39 | @end 40 | -------------------------------------------------------------------------------- /WeChatSightDemo/WKMovieRecorder/WKVideoConverter.m: -------------------------------------------------------------------------------- 1 | // 2 | // WKVideoConverter.m 3 | // VideoCaptureDemo 4 | // 5 | // Created by 吴珂 on 16/5/16. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import "WKVideoConverter.h" 10 | #import 11 | #import 12 | #import 13 | #import 14 | 15 | 16 | @interface WKGenerateGifImageManager : NSObject 17 | 18 | @property (nonatomic, strong) NSOperationQueue *generateQueue; 19 | 20 | + (instancetype)shareInstance; 21 | - (NSOperationQueue *)addOperationWithBlock:(void (^)(void))block; 22 | 23 | @end 24 | 25 | @implementation WKGenerateGifImageManager 26 | 27 | - (instancetype)init 28 | { 29 | self = [super init]; 30 | if (self) { 31 | _generateQueue = [[NSOperationQueue alloc] init]; 32 | _generateQueue.maxConcurrentOperationCount = 1; 33 | 34 | } 35 | return self; 36 | } 37 | 38 | + (instancetype)shareInstance 39 | { 40 | static WKGenerateGifImageManager *manager = nil; 41 | static dispatch_once_t onceToken; 42 | dispatch_once(&onceToken, ^{ 43 | manager = [[WKGenerateGifImageManager alloc] init]; 44 | }); 45 | 46 | return manager; 47 | } 48 | 49 | - (NSOperation *)addOperationWithBlock:(void (^)(void))block 50 | { 51 | NSOperation *operation = [NSBlockOperation blockOperationWithBlock:^{ 52 | block(); 53 | }]; 54 | [_generateQueue addOperation:operation]; 55 | 56 | return operation; 57 | } 58 | 59 | @end 60 | 61 | typedef NS_ENUM(NSInteger, WKConvertType) { 62 | WKConvertTypeImage, 63 | WKConvertTypeImages 64 | }; 65 | 66 | typedef id (^HandleBlcok)(AVAssetReaderTrackOutput *outPut, AVAssetTrack *videoTrack); 67 | 68 | @interface WKVideoConverter () 69 | 70 | @property (nonatomic, strong) AVAssetReader *reader; 71 | 72 | @property (nonatomic, strong) dispatch_semaphore_t semaphore; 73 | 74 | @property (nonatomic, assign) NSInteger maxConcurrentNum; 75 | 76 | @property (nonatomic, strong) dispatch_queue_t convertQueue; 77 | 78 | @end 79 | 80 | @implementation WKVideoConverter 81 | 82 | - (instancetype)init 83 | { 84 | self = [super init]; 85 | if (self) { 86 | _maxConcurrentNum = 2; 87 | 88 | [self commonInit]; 89 | 90 | } 91 | return self; 92 | } 93 | 94 | + (instancetype)shareInstance 95 | { 96 | static WKVideoConverter *instance = nil; 97 | static dispatch_once_t onceToken; 98 | dispatch_once(&onceToken, ^{ 99 | if (instance == nil) { 100 | instance = [[WKVideoConverter alloc] init]; 101 | } 102 | }); 103 | return instance; 104 | } 105 | 106 | - (void)commonInit 107 | { 108 | _semaphore = dispatch_semaphore_create(_maxConcurrentNum); 109 | _convertQueue = dispatch_queue_create("wkConverter queue", DISPATCH_CURRENT_QUEUE_LABEL); 110 | } 111 | 112 | - (void)convertVideoToImagesWithURL:(NSURL *)url finishBlock:(void (^)(id))finishBlock 113 | { 114 | [self convertVideoFirstFrameWithURL:url type:WKConvertTypeImages finishBlock:finishBlock]; 115 | } 116 | 117 | - (void)convertVideoFirstFrameWithURL:(NSURL *)url finishBlock:(void (^)(id))finishBlock 118 | { 119 | // [self convertVideoFirstFrameWithURL:url type:WKConvertTypeImage finishBlock:finishBlock]; 120 | AVURLAsset* asset = [AVURLAsset URLAssetWithURL:url options:nil]; 121 | AVAssetImageGenerator* imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset]; 122 | UIImage* image = [UIImage imageWithCGImage:[imageGenerator copyCGImageAtTime:CMTimeMake(0, 1) actualTime:nil error:nil]]; 123 | finishBlock(image); 124 | } 125 | 126 | - (void)convertVideoFirstFrameWithURL:(NSURL *)url type:(WKConvertType)type finishBlock:(void (^)(id))finishBlock 127 | { 128 | @autoreleasepool { 129 | 130 | AVAsset *asset = [AVAsset assetWithURL:url]; 131 | 132 | if (![self isKindOfClass:[WKVideoConverter class]]) { 133 | return; 134 | } 135 | __weak typeof(self)weakSelf = self; 136 | 137 | dispatch_async(_convertQueue, ^{ 138 | 139 | dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_FOREVER); 140 | dispatch_queue_t backgroundQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0); 141 | dispatch_async(backgroundQueue, ^{ 142 | __strong typeof(weakSelf) strongSelf = weakSelf; 143 | NSError *error = nil; 144 | strongSelf.reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; 145 | NSLog(@""); 146 | 147 | 148 | if (error) { 149 | NSLog(@"%@", [error localizedDescription]); 150 | 151 | } 152 | 153 | NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; 154 | 155 | AVAssetTrack *videoTrack =[videoTracks firstObject]; 156 | if (!videoTrack) { 157 | return ; 158 | } 159 | int m_pixelFormatType; 160 | // 视频播放时, 161 | m_pixelFormatType = kCVPixelFormatType_32BGRA; 162 | // 其他用途,如视频压缩 163 | // m_pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; 164 | 165 | NSMutableDictionary *options = [NSMutableDictionary dictionary]; 166 | [options setObject:@(m_pixelFormatType) forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 167 | AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:options]; 168 | if ([strongSelf.reader canAddOutput:videoReaderOutput]) { 169 | [strongSelf.reader addOutput:videoReaderOutput]; 170 | [strongSelf.reader startReading]; 171 | 172 | } 173 | 174 | 175 | HandleBlcok handleBlock = [self handleVideoWithType:type]; 176 | 177 | id result = handleBlock(videoReaderOutput, videoTrack); 178 | 179 | if (finishBlock) { 180 | dispatch_async(dispatch_get_main_queue(), ^{ 181 | finishBlock(result); 182 | }); 183 | } 184 | 185 | dispatch_semaphore_signal(_semaphore); 186 | }); 187 | }); 188 | } 189 | 190 | } 191 | 192 | - (HandleBlcok)handleVideoWithType:(WKConvertType)type 193 | { 194 | 195 | HandleBlcok block; 196 | 197 | switch (type) { 198 | case WKConvertTypeImage: { 199 | block = ^(AVAssetReaderOutput *videoReaderOutput, AVAssetTrack *videoTrack){ 200 | UIImage *image; 201 | // 要确保nominalFrameRate>0,之前出现过android拍的0帧视频 202 | while ([self.reader status] == AVAssetReaderStatusReading && videoTrack.nominalFrameRate > 0) { 203 | // 读取 video sample 204 | @autoreleasepool { 205 | CMSampleBufferRef videoBuffer = [videoReaderOutput copyNextSampleBuffer]; 206 | 207 | CGImageRef cgimage = [WKVideoConverter convertSamepleBufferRefToCGImage:videoBuffer]; 208 | 209 | 210 | 211 | if (!(__bridge id)(cgimage)) 212 | { 213 | break; 214 | } 215 | 216 | image = [UIImage imageWithCGImage:cgimage]; 217 | 218 | 219 | 220 | CGImageRelease(cgimage); 221 | if (videoBuffer) { 222 | 223 | CMSampleBufferInvalidate(videoBuffer); 224 | CFRelease(videoBuffer); 225 | videoBuffer = NULL; 226 | } 227 | 228 | if (image) { 229 | return image; 230 | } 231 | } 232 | 233 | } 234 | return [[UIImage alloc] init]; 235 | }; 236 | 237 | break; 238 | } 239 | case WKConvertTypeImages: {//图片 240 | 241 | block = ^(AVAssetReaderOutput *videoReaderOutput, AVAssetTrack *videoTrack){ 242 | NSMutableArray *images = [NSMutableArray array]; 243 | CGFloat seconds = CMTimeGetSeconds(videoTrack.timeRange.duration); 244 | CGFloat totalFrame = videoTrack.nominalFrameRate * seconds; 245 | NSLog(@"%f", totalFrame); 246 | 247 | NSInteger convertedCount = 0; 248 | while ([self.reader status] == AVAssetReaderStatusReading && videoTrack.nominalFrameRate > 0) { 249 | // 读取 video sample 250 | CMSampleBufferRef videoBuffer = [videoReaderOutput copyNextSampleBuffer]; 251 | 252 | CGImageRef cgimage = [WKVideoConverter convertSamepleBufferRefToCGImage:videoBuffer]; 253 | 254 | 255 | 256 | if (!(__bridge id)(cgimage)) 257 | { 258 | break; 259 | } 260 | 261 | [images addObject:((__bridge id)(cgimage))]; 262 | 263 | CGImageRelease(cgimage); 264 | if (videoBuffer) { 265 | 266 | CMSampleBufferInvalidate(videoBuffer); 267 | CFRelease(videoBuffer); 268 | videoBuffer = NULL; 269 | } 270 | // 根据需要休眠一段时间;比如上层播放视频时每帧之间是有间隔的,这里的 sampleInternal 我设置为0.001秒 271 | [NSThread sleepForTimeInterval:0.001]; 272 | 273 | CGFloat progress = ++convertedCount / totalFrame; 274 | 275 | NSLog(@"process : %f", progress); 276 | 277 | if ([self.delegate respondsToSelector:@selector(videoConverter:progress:)]) { 278 | dispatch_async(dispatch_get_main_queue(), ^{ 279 | 280 | [self.delegate videoConverter:self progress:progress]; 281 | 282 | }); 283 | } 284 | 285 | if (fmodf(progress, 1.f) == 0.f) { 286 | if ([self.delegate respondsToSelector:@selector(videoConverterFinishConvert:)]) { 287 | [self.delegate videoConverterFinishConvert:self]; 288 | } 289 | } 290 | 291 | 292 | if (self.reader.status == AVAssetReaderStatusCompleted) { 293 | break; 294 | } 295 | } 296 | 297 | return images; 298 | }; 299 | } 300 | } 301 | 302 | 303 | return block; 304 | } 305 | 306 | 307 | 308 | //转成UIImage 309 | - (void)convertVideoUIImagesWithURL:(NSURL *)url finishBlock:(void (^)(id images, NSTimeInterval duration))finishBlock 310 | { 311 | 312 | 313 | AVAsset *asset = [AVAsset assetWithURL:url]; 314 | NSError *error = nil; 315 | self.reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; 316 | 317 | NSTimeInterval duration = CMTimeGetSeconds(asset.duration); 318 | __weak typeof(self)weakSelf = self; 319 | dispatch_queue_t backgroundQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0); 320 | dispatch_async(backgroundQueue, ^{ 321 | __strong typeof(weakSelf) strongSelf = weakSelf; 322 | NSLog(@""); 323 | 324 | 325 | if (error) { 326 | NSLog(@"%@", [error localizedDescription]); 327 | 328 | } 329 | 330 | NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; 331 | 332 | AVAssetTrack *videoTrack =[videoTracks firstObject]; 333 | if (!videoTrack) { 334 | return ; 335 | } 336 | int m_pixelFormatType; 337 | // 视频播放时, 338 | m_pixelFormatType = kCVPixelFormatType_32BGRA; 339 | // 其他用途,如视频压缩 340 | // m_pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; 341 | 342 | NSMutableDictionary *options = [NSMutableDictionary dictionary]; 343 | [options setObject:@(m_pixelFormatType) forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 344 | AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:options]; 345 | 346 | if ([strongSelf.reader canAddOutput:videoReaderOutput]) { 347 | 348 | [strongSelf.reader addOutput:videoReaderOutput]; 349 | } 350 | [strongSelf.reader startReading]; 351 | 352 | 353 | NSMutableArray *images = [NSMutableArray array]; 354 | // 要确保nominalFrameRate>0,之前出现过android拍的0帧视频 355 | while ([strongSelf.reader status] == AVAssetReaderStatusReading && videoTrack.nominalFrameRate > 0) { 356 | @autoreleasepool { 357 | // 读取 video sample 358 | CMSampleBufferRef videoBuffer = [videoReaderOutput copyNextSampleBuffer]; 359 | 360 | if (!videoBuffer) { 361 | break; 362 | } 363 | 364 | [images addObject:[WKVideoConverter convertSampleBufferRefToUIImage:videoBuffer]]; 365 | 366 | CFRelease(videoBuffer); 367 | } 368 | 369 | 370 | } 371 | if (finishBlock) { 372 | dispatch_async(dispatch_get_main_queue(), ^{ 373 | finishBlock(images, duration); 374 | }); 375 | } 376 | }); 377 | 378 | 379 | } 380 | 381 | - (void)convertVideoToGifImageWithURL:(NSURL *)url destinationUrl:(NSURL *)destinationUrl finishBlock:(void (^)(void))finishBlock 382 | { 383 | NSLog(@""); 384 | [self convertVideoUIImagesWithURL:url finishBlock:^(NSArray *images, NSTimeInterval duration) { 385 | NSLog(@"%p", images); 386 | [[WKGenerateGifImageManager shareInstance] addOperationWithBlock:^{ 387 | makeAnimatedGif(images, destinationUrl, duration); 388 | dispatch_async(dispatch_get_main_queue(), ^{ 389 | 390 | finishBlock(); 391 | }); 392 | }]; 393 | }]; 394 | } 395 | 396 | // Create a UIImage from sample buffer data 397 | // 官方回答 https://developer.apple.com/library/ios/qa/qa1702/_index.html 398 | + (CGImageRef)convertSamepleBufferRefToCGImage:(CMSampleBufferRef)sampleBufferRef 399 | { 400 | @autoreleasepool { 401 | 402 | // Get a CMSampleBuffer's Core Video image buffer for the media data 403 | CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef); 404 | // Lock the base address of the pixel buffer 405 | CVPixelBufferLockBaseAddress(imageBuffer, 0); 406 | 407 | // Get the number of bytes per row for the pixel buffer 408 | void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 409 | 410 | // Get the number of bytes per row for the pixel buffer 411 | size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 412 | // Get the pixel buffer width and height 413 | size_t width = CVPixelBufferGetWidth(imageBuffer); 414 | size_t height = CVPixelBufferGetHeight(imageBuffer); 415 | 416 | // Create a device-dependent RGB color space 417 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 418 | 419 | // Create a bitmap graphics context with the sample buffer data 420 | CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, 421 | bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 422 | // Create a Quartz image from the pixel data in the bitmap graphics context 423 | CGImageRef quartzImage = CGBitmapContextCreateImage(context); 424 | // Unlock the pixel buffer 425 | CVPixelBufferUnlockBaseAddress(imageBuffer,0); 426 | 427 | // Free up the context and color space 428 | CGContextRelease(context); 429 | CGColorSpaceRelease(colorSpace); 430 | 431 | return quartzImage; 432 | } 433 | 434 | } 435 | 436 | + (UIImage *)convertSampleBufferRefToUIImage:(CMSampleBufferRef)sampleBufferRef 437 | { 438 | @autoreleasepool { 439 | 440 | CGImageRef cgImage = [self convertSamepleBufferRefToCGImage:sampleBufferRef]; 441 | UIImage *image; 442 | // image = [UIImage imageWithCGImage:cgImage]; 443 | // CGImageRelease(cgImage); 444 | 445 | CGFloat height = CGImageGetHeight(cgImage); 446 | CGFloat width = CGImageGetWidth(cgImage); 447 | 448 | height = height / 5; 449 | width = width / 5; 450 | // UIGraphicsBeginImageContext(CGSizeMake(width, height)); 451 | UIGraphicsBeginImageContextWithOptions(CGSizeMake(width, height), NO, [UIScreen mainScreen].scale); 452 | 453 | #define UseUIImage 0 454 | #if UseUIImage 455 | 456 | [image drawInRect:CGRectMake(0, 0, width, height)]; 457 | #else 458 | 459 | CGContextRef context = UIGraphicsGetCurrentContext(); 460 | CGContextTranslateCTM(context, 0, height); 461 | CGContextScaleCTM(context, 1.0, -1.0); 462 | CGContextDrawImage(context, CGRectMake(0, 0, width, height), cgImage); 463 | 464 | 465 | 466 | #endif 467 | image = UIGraphicsGetImageFromCurrentImageContext(); 468 | 469 | UIGraphicsEndImageContext(); 470 | 471 | CGImageRelease(cgImage); 472 | 473 | NSData *imageData = UIImageJPEGRepresentation(image, 0.5); 474 | UIGraphicsEndImageContext(); 475 | return image; 476 | } 477 | } 478 | 479 | + (CGSize)compressSize:(CGSize)originalSize targetSize:(CGSize)targetSize 480 | { 481 | float actualHeight = originalSize.height; 482 | float actualWidth = originalSize.width; 483 | float maxHeight = targetSize.height; 484 | float maxWidth = targetSize.width; 485 | float imgRatio = actualWidth/actualHeight; 486 | float maxRatio = maxWidth/maxHeight; 487 | // float compressionQuality = 0.5;//50 percent compression 488 | 489 | if (actualHeight > maxHeight || actualWidth > maxWidth) 490 | { 491 | if(imgRatio < maxRatio) 492 | { 493 | //adjust width according to maxHeight 494 | imgRatio = maxHeight / actualHeight; 495 | actualWidth = imgRatio * actualWidth; 496 | actualHeight = maxHeight; 497 | } 498 | else if(imgRatio > maxRatio) 499 | { 500 | //adjust height according to maxWidth 501 | imgRatio = maxWidth / actualWidth; 502 | actualHeight = imgRatio * actualHeight; 503 | actualWidth = maxWidth; 504 | } 505 | else 506 | { 507 | actualHeight = maxHeight; 508 | actualWidth = maxWidth; 509 | } 510 | } 511 | 512 | return CGSizeMake(actualWidth, actualHeight); 513 | } 514 | 515 | 516 | static void makeAnimatedGif(NSArray *images, NSURL *gifURL, NSTimeInterval duration) { 517 | NSTimeInterval perSecond = duration /images.count; 518 | 519 | NSDictionary *fileProperties = @{ 520 | (__bridge id)kCGImagePropertyGIFDictionary: @{ 521 | (__bridge id)kCGImagePropertyGIFLoopCount: @0, // 0 means loop forever 522 | } 523 | }; 524 | 525 | NSDictionary *frameProperties = @{ 526 | (__bridge id)kCGImagePropertyGIFDictionary: @{ 527 | (__bridge id)kCGImagePropertyGIFDelayTime: @(perSecond), // a float (not double!) in seconds, rounded to centiseconds in the GIF data 528 | } 529 | }; 530 | 531 | // NSURL *documentsDirectoryURL = [[NSFileManager defaultManager] URLForDirectory:NSDocumentDirectory inDomain:NSUserDomainMask appropriateForURL:nil create:YES error:nil]; 532 | // NSURL *fileURL = [documentsDirectoryURL URLByAppendingPathComponent:@"animated.gif"]; 533 | 534 | CGImageDestinationRef destination = CGImageDestinationCreateWithURL((__bridge CFURLRef)gifURL, kUTTypeGIF, images.count, NULL); 535 | CGImageDestinationSetProperties(destination, (__bridge CFDictionaryRef)fileProperties); 536 | 537 | for (UIImage *image in images) { 538 | @autoreleasepool { 539 | 540 | CGImageDestinationAddImage(destination, image.CGImage, (__bridge CFDictionaryRef)frameProperties); 541 | } 542 | } 543 | 544 | if (!CGImageDestinationFinalize(destination)) { 545 | NSLog(@"failed to finalize image destination"); 546 | }else{ 547 | 548 | 549 | } 550 | CFRelease(destination); 551 | 552 | 553 | } 554 | 555 | - (void)dealloc 556 | { 557 | NSLog(@"%s", __FUNCTION__); 558 | } 559 | 560 | @end 561 | -------------------------------------------------------------------------------- /WeChatSightDemo/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // WeChatSightDemo 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /WeChatSightDemoTests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | 24 | 25 | -------------------------------------------------------------------------------- /WeChatSightDemoTests/WeChatSightDemoTests.m: -------------------------------------------------------------------------------- 1 | // 2 | // WeChatSightDemoTests.m 3 | // WeChatSightDemoTests 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface WeChatSightDemoTests : XCTestCase 12 | 13 | @end 14 | 15 | @implementation WeChatSightDemoTests 16 | 17 | - (void)setUp { 18 | [super setUp]; 19 | // Put setup code here. This method is called before the invocation of each test method in the class. 20 | } 21 | 22 | - (void)tearDown { 23 | // Put teardown code here. This method is called after the invocation of each test method in the class. 24 | [super tearDown]; 25 | } 26 | 27 | - (void)testExample { 28 | // This is an example of a functional test case. 29 | // Use XCTAssert and related functions to verify your tests produce the correct results. 30 | } 31 | 32 | - (void)testPerformanceExample { 33 | // This is an example of a performance test case. 34 | [self measureBlock:^{ 35 | // Put the code you want to measure the time of here. 36 | }]; 37 | } 38 | 39 | @end 40 | -------------------------------------------------------------------------------- /WeChatSightDemoUITests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | 24 | 25 | -------------------------------------------------------------------------------- /WeChatSightDemoUITests/WeChatSightDemoUITests.m: -------------------------------------------------------------------------------- 1 | // 2 | // WeChatSightDemoUITests.m 3 | // WeChatSightDemoUITests 4 | // 5 | // Created by 吴珂 on 16/8/19. 6 | // Copyright © 2016年 吴珂. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface WeChatSightDemoUITests : XCTestCase 12 | 13 | @end 14 | 15 | @implementation WeChatSightDemoUITests 16 | 17 | - (void)setUp { 18 | [super setUp]; 19 | 20 | // Put setup code here. This method is called before the invocation of each test method in the class. 21 | 22 | // In UI tests it is usually best to stop immediately when a failure occurs. 23 | self.continueAfterFailure = NO; 24 | // UI tests must launch the application that they test. Doing this in setup will make sure it happens for each test method. 25 | [[[XCUIApplication alloc] init] launch]; 26 | 27 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. 28 | } 29 | 30 | - (void)tearDown { 31 | // Put teardown code here. This method is called after the invocation of each test method in the class. 32 | [super tearDown]; 33 | } 34 | 35 | - (void)testExample { 36 | // Use recording to get started writing UI tests. 37 | // Use XCTAssert and related functions to verify your tests produce the correct results. 38 | } 39 | 40 | @end 41 | --------------------------------------------------------------------------------