├── .gitignore ├── ARCamera ├── ARCamera.xcodeproj │ ├── project.pbxproj │ └── xcshareddata │ │ └── xcschemes │ │ ├── Debug.xcscheme │ │ └── Release.xcscheme ├── ARCamera.xcworkspace │ ├── contents.xcworkspacedata │ └── xcshareddata │ │ ├── IDEWorkspaceChecks.plist │ │ └── WorkspaceSettings.xcsettings ├── Podfile ├── Podfile.lock ├── Pods │ ├── Manifest.lock │ ├── NextLevel │ │ ├── LICENSE │ │ ├── README.md │ │ └── Sources │ │ │ ├── NextLevel+AVFoundation.swift │ │ │ ├── NextLevel+CIContext.swift │ │ │ ├── NextLevel+CMSampleBuffer.swift │ │ │ ├── NextLevel+Foundation.swift │ │ │ ├── NextLevel+Metadata.swift │ │ │ ├── NextLevel+UIImage.swift │ │ │ ├── NextLevel.swift │ │ │ ├── NextLevelBufferRenderer.swift │ │ │ ├── NextLevelClip.swift │ │ │ ├── NextLevelConfiguration.swift │ │ │ ├── NextLevelGIFCreator.swift │ │ │ ├── NextLevelProtocols.swift │ │ │ └── NextLevelSession.swift │ ├── Pods.xcodeproj │ │ └── project.pbxproj │ ├── RPCircularProgress │ │ ├── LICENSE │ │ ├── README.md │ │ └── Source │ │ │ └── RPCircularProgress.swift │ └── Target Support Files │ │ ├── NextLevel │ │ ├── NextLevel-Info.plist │ │ ├── NextLevel-dummy.m │ │ ├── NextLevel-prefix.pch │ │ ├── NextLevel-umbrella.h │ │ ├── NextLevel.modulemap │ │ └── NextLevel.xcconfig │ │ ├── Pods-ARCamera │ │ ├── Pods-ARCamera-Info.plist │ │ ├── Pods-ARCamera-acknowledgements.markdown │ │ ├── Pods-ARCamera-acknowledgements.plist │ │ ├── Pods-ARCamera-dummy.m │ │ ├── Pods-ARCamera-frameworks.sh │ │ ├── Pods-ARCamera-umbrella.h │ │ ├── Pods-ARCamera.debug.xcconfig │ │ ├── Pods-ARCamera.modulemap │ │ └── Pods-ARCamera.release.xcconfig │ │ └── RPCircularProgress │ │ ├── RPCircularProgress-Info.plist │ │ ├── RPCircularProgress-dummy.m │ │ ├── RPCircularProgress-prefix.pch │ │ ├── RPCircularProgress-umbrella.h │ │ ├── RPCircularProgress.modulemap │ │ └── RPCircularProgress.xcconfig ├── Project │ ├── Assets.xcassets │ │ ├── AppIcon.appiconset │ │ │ └── Contents.json │ │ └── Contents.json │ ├── Base.xcconfig │ ├── Debug.xcconfig │ ├── Info.plist │ ├── LaunchScreen.storyboard │ ├── Release.xcconfig │ └── piemonte.usdz ├── Sources │ ├── AppDelegate.swift │ ├── RecordButton.swift │ └── ViewController.swift └── makefile ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | # 3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 4 | 5 | ## Build generated 6 | build/ 7 | DerivedData/ 8 | 9 | ## Various settings 10 | *.pbxuser 11 | !default.pbxuser 12 | *.mode1v3 13 | !default.mode1v3 14 | *.mode2v3 15 | !default.mode2v3 16 | *.perspectivev3 17 | !default.perspectivev3 18 | xcuserdata/ 19 | 20 | ## Other 21 | *.moved-aside 22 | *.xccheckout 23 | *.xcscmblueprint 24 | 25 | ## Obj-C/Swift specific 26 | *.hmap 27 | *.ipa 28 | *.dSYM.zip 29 | *.dSYM 30 | 31 | ## Playgrounds 32 | timeline.xctimeline 33 | playground.xcworkspace 34 | 35 | # Swift Package Manager 36 | # 37 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies. 38 | # Packages/ 39 | # Package.pins 40 | # Package.resolved 41 | .build/ 42 | 43 | # CocoaPods 44 | # 45 | # We recommend against adding the Pods directory to your .gitignore. However 46 | # you should judge for yourself, the pros and cons are mentioned at: 47 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control 48 | # 49 | # Pods/ 50 | 51 | # Carthage 52 | # 53 | # Add this line if you want to avoid checking in source code from Carthage dependencies. 54 | # Carthage/Checkouts 55 | 56 | Carthage/Build 57 | 58 | # fastlane 59 | # 60 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 61 | # screenshots whenever they are needed. 62 | # For more information about the recommended setup visit: 63 | # https://docs.fastlane.tools/best-practices/source-control/#source-control 64 | 65 | fastlane/report.xml 66 | fastlane/Preview.html 67 | fastlane/screenshots/**/*.png 68 | fastlane/test_output 69 | -------------------------------------------------------------------------------- /ARCamera/ARCamera.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 060E82F2227A5E57001909EE /* RecordButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 060E82F1227A5E57001909EE /* RecordButton.swift */; }; 11 | 06534215227975FD00FE0E24 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 06534214227975FD00FE0E24 /* Assets.xcassets */; }; 12 | 0672140D227B66B800D1DDDA /* piemonte.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 0672140C227B66B800D1DDDA /* piemonte.usdz */; }; 13 | 06AC206720A1006F00D37921 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 06AC206620A1006F00D37921 /* LaunchScreen.storyboard */; }; 14 | 06DE65A41E18E3BE00C064DE /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 06DE65A31E18E3BE00C064DE /* AppDelegate.swift */; }; 15 | 06DE65A61E18E3BE00C064DE /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 06DE65A51E18E3BE00C064DE /* ViewController.swift */; }; 16 | 6460331F200026E05E7313BC /* Pods_ARCamera.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 000E5E3FC44A0D153B7C1922 /* Pods_ARCamera.framework */; }; 17 | /* End PBXBuildFile section */ 18 | 19 | /* Begin PBXFileReference section */ 20 | 000E5E3FC44A0D153B7C1922 /* Pods_ARCamera.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_ARCamera.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 21 | 060E82F1227A5E57001909EE /* RecordButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = RecordButton.swift; path = Sources/RecordButton.swift; sourceTree = ""; }; 22 | 06534214227975FD00FE0E24 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 23 | 06534216227976ED00FE0E24 /* Base.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Base.xcconfig; sourceTree = ""; }; 24 | 06534217227976EE00FE0E24 /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Release.xcconfig; sourceTree = ""; }; 25 | 06534218227976EE00FE0E24 /* Debug.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Debug.xcconfig; sourceTree = ""; }; 26 | 066A17171E6F899000514A95 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 27 | 0672140C227B66B800D1DDDA /* piemonte.usdz */ = {isa = PBXFileReference; lastKnownFileType = file; path = piemonte.usdz; sourceTree = ""; }; 28 | 06AC206620A1006F00D37921 /* LaunchScreen.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = LaunchScreen.storyboard; path = Project/LaunchScreen.storyboard; sourceTree = SOURCE_ROOT; }; 29 | 06DE65A01E18E3BE00C064DE /* ARCamera.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = ARCamera.app; sourceTree = BUILT_PRODUCTS_DIR; }; 30 | 06DE65A31E18E3BE00C064DE /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = AppDelegate.swift; path = Sources/AppDelegate.swift; sourceTree = ""; }; 31 | 06DE65A51E18E3BE00C064DE /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = ViewController.swift; path = Sources/ViewController.swift; sourceTree = ""; }; 32 | 2458999E72D0C8C24D21BFBD /* Pods-ARCamera.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ARCamera.debug.xcconfig"; path = "Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera.debug.xcconfig"; sourceTree = ""; }; 33 | 9AB8BAFD35E4FB4AB22FC060 /* Pods-ARCamera.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ARCamera.release.xcconfig"; path = "Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera.release.xcconfig"; sourceTree = ""; }; 34 | /* End PBXFileReference section */ 35 | 36 | /* Begin PBXFrameworksBuildPhase section */ 37 | 06DE659D1E18E3BE00C064DE /* Frameworks */ = { 38 | isa = PBXFrameworksBuildPhase; 39 | buildActionMask = 2147483647; 40 | files = ( 41 | 6460331F200026E05E7313BC /* Pods_ARCamera.framework in Frameworks */, 42 | ); 43 | runOnlyForDeploymentPostprocessing = 0; 44 | }; 45 | /* End PBXFrameworksBuildPhase section */ 46 | 47 | /* Begin PBXGroup section */ 48 | 06DE65971E18E3BE00C064DE = { 49 | isa = PBXGroup; 50 | children = ( 51 | 06DE65B81E19009B00C064DE /* App */, 52 | 06DE65B51E18FF2D00C064DE /* Supporting Files */, 53 | 06DE65A11E18E3BE00C064DE /* Products */, 54 | 534FEF23053723F0E1BF66C2 /* Pods */, 55 | 8A5CC98E88F923A95371E120 /* Frameworks */, 56 | ); 57 | sourceTree = ""; 58 | }; 59 | 06DE65A11E18E3BE00C064DE /* Products */ = { 60 | isa = PBXGroup; 61 | children = ( 62 | 06DE65A01E18E3BE00C064DE /* ARCamera.app */, 63 | ); 64 | name = Products; 65 | sourceTree = ""; 66 | }; 67 | 06DE65B51E18FF2D00C064DE /* Supporting Files */ = { 68 | isa = PBXGroup; 69 | children = ( 70 | 0672140C227B66B800D1DDDA /* piemonte.usdz */, 71 | 06534216227976ED00FE0E24 /* Base.xcconfig */, 72 | 06534218227976EE00FE0E24 /* Debug.xcconfig */, 73 | 06534217227976EE00FE0E24 /* Release.xcconfig */, 74 | 06534214227975FD00FE0E24 /* Assets.xcassets */, 75 | 06AC206620A1006F00D37921 /* LaunchScreen.storyboard */, 76 | 066A17171E6F899000514A95 /* Info.plist */, 77 | ); 78 | name = "Supporting Files"; 79 | path = Project; 80 | sourceTree = ""; 81 | }; 82 | 06DE65B81E19009B00C064DE /* App */ = { 83 | isa = PBXGroup; 84 | children = ( 85 | 06DE65A31E18E3BE00C064DE /* AppDelegate.swift */, 86 | 06DE65A51E18E3BE00C064DE /* ViewController.swift */, 87 | 060E82F1227A5E57001909EE /* RecordButton.swift */, 88 | ); 89 | name = App; 90 | sourceTree = ""; 91 | }; 92 | 534FEF23053723F0E1BF66C2 /* Pods */ = { 93 | isa = PBXGroup; 94 | children = ( 95 | 2458999E72D0C8C24D21BFBD /* Pods-ARCamera.debug.xcconfig */, 96 | 9AB8BAFD35E4FB4AB22FC060 /* Pods-ARCamera.release.xcconfig */, 97 | ); 98 | name = Pods; 99 | sourceTree = ""; 100 | }; 101 | 8A5CC98E88F923A95371E120 /* Frameworks */ = { 102 | isa = PBXGroup; 103 | children = ( 104 | 000E5E3FC44A0D153B7C1922 /* Pods_ARCamera.framework */, 105 | ); 106 | name = Frameworks; 107 | sourceTree = ""; 108 | }; 109 | /* End PBXGroup section */ 110 | 111 | /* Begin PBXNativeTarget section */ 112 | 06DE659F1E18E3BE00C064DE /* ARCamera */ = { 113 | isa = PBXNativeTarget; 114 | buildConfigurationList = 06DE65B21E18E3BE00C064DE /* Build configuration list for PBXNativeTarget "ARCamera" */; 115 | buildPhases = ( 116 | 6C7384EB81F7FCC80DEF6D61 /* [CP] Check Pods Manifest.lock */, 117 | 06DE659C1E18E3BE00C064DE /* Sources */, 118 | 06DE659D1E18E3BE00C064DE /* Frameworks */, 119 | 06DE659E1E18E3BE00C064DE /* Resources */, 120 | 9D8217CFF194E90201EEA488 /* [CP] Embed Pods Frameworks */, 121 | ); 122 | buildRules = ( 123 | ); 124 | dependencies = ( 125 | ); 126 | name = ARCamera; 127 | productName = Connect; 128 | productReference = 06DE65A01E18E3BE00C064DE /* ARCamera.app */; 129 | productType = "com.apple.product-type.application"; 130 | }; 131 | /* End PBXNativeTarget section */ 132 | 133 | /* Begin PBXProject section */ 134 | 06DE65981E18E3BE00C064DE /* Project object */ = { 135 | isa = PBXProject; 136 | attributes = { 137 | LastSwiftUpdateCheck = 0820; 138 | LastUpgradeCheck = 1020; 139 | ORGANIZATIONNAME = "Patrick Piemonte"; 140 | TargetAttributes = { 141 | 06DE659F1E18E3BE00C064DE = { 142 | CreatedOnToolsVersion = 8.2.1; 143 | DevelopmentTeam = PW68C6MX6N; 144 | LastSwiftMigration = 1020; 145 | ProvisioningStyle = Automatic; 146 | }; 147 | }; 148 | }; 149 | buildConfigurationList = 06DE659B1E18E3BE00C064DE /* Build configuration list for PBXProject "ARCamera" */; 150 | compatibilityVersion = "Xcode 3.2"; 151 | developmentRegion = en; 152 | hasScannedForEncodings = 0; 153 | knownRegions = ( 154 | en, 155 | Base, 156 | ); 157 | mainGroup = 06DE65971E18E3BE00C064DE; 158 | productRefGroup = 06DE65A11E18E3BE00C064DE /* Products */; 159 | projectDirPath = ""; 160 | projectRoot = ""; 161 | targets = ( 162 | 06DE659F1E18E3BE00C064DE /* ARCamera */, 163 | ); 164 | }; 165 | /* End PBXProject section */ 166 | 167 | /* Begin PBXResourcesBuildPhase section */ 168 | 06DE659E1E18E3BE00C064DE /* Resources */ = { 169 | isa = PBXResourcesBuildPhase; 170 | buildActionMask = 2147483647; 171 | files = ( 172 | 06534215227975FD00FE0E24 /* Assets.xcassets in Resources */, 173 | 0672140D227B66B800D1DDDA /* piemonte.usdz in Resources */, 174 | 06AC206720A1006F00D37921 /* LaunchScreen.storyboard in Resources */, 175 | ); 176 | runOnlyForDeploymentPostprocessing = 0; 177 | }; 178 | /* End PBXResourcesBuildPhase section */ 179 | 180 | /* Begin PBXShellScriptBuildPhase section */ 181 | 6C7384EB81F7FCC80DEF6D61 /* [CP] Check Pods Manifest.lock */ = { 182 | isa = PBXShellScriptBuildPhase; 183 | buildActionMask = 2147483647; 184 | files = ( 185 | ); 186 | inputPaths = ( 187 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock", 188 | "${PODS_ROOT}/Manifest.lock", 189 | ); 190 | name = "[CP] Check Pods Manifest.lock"; 191 | outputPaths = ( 192 | "$(DERIVED_FILE_DIR)/Pods-ARCamera-checkManifestLockResult.txt", 193 | ); 194 | runOnlyForDeploymentPostprocessing = 0; 195 | shellPath = /bin/sh; 196 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; 197 | showEnvVarsInLog = 0; 198 | }; 199 | 9D8217CFF194E90201EEA488 /* [CP] Embed Pods Frameworks */ = { 200 | isa = PBXShellScriptBuildPhase; 201 | buildActionMask = 2147483647; 202 | files = ( 203 | ); 204 | inputPaths = ( 205 | "${PODS_ROOT}/Target Support Files/Pods-ARCamera/Pods-ARCamera-frameworks.sh", 206 | "${BUILT_PRODUCTS_DIR}/NextLevel/NextLevel.framework", 207 | "${BUILT_PRODUCTS_DIR}/RPCircularProgress/RPCircularProgress.framework", 208 | ); 209 | name = "[CP] Embed Pods Frameworks"; 210 | outputPaths = ( 211 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/NextLevel.framework", 212 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RPCircularProgress.framework", 213 | ); 214 | runOnlyForDeploymentPostprocessing = 0; 215 | shellPath = /bin/sh; 216 | shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-ARCamera/Pods-ARCamera-frameworks.sh\"\n"; 217 | showEnvVarsInLog = 0; 218 | }; 219 | /* End PBXShellScriptBuildPhase section */ 220 | 221 | /* Begin PBXSourcesBuildPhase section */ 222 | 06DE659C1E18E3BE00C064DE /* Sources */ = { 223 | isa = PBXSourcesBuildPhase; 224 | buildActionMask = 2147483647; 225 | files = ( 226 | 06DE65A61E18E3BE00C064DE /* ViewController.swift in Sources */, 227 | 06DE65A41E18E3BE00C064DE /* AppDelegate.swift in Sources */, 228 | 060E82F2227A5E57001909EE /* RecordButton.swift in Sources */, 229 | ); 230 | runOnlyForDeploymentPostprocessing = 0; 231 | }; 232 | /* End PBXSourcesBuildPhase section */ 233 | 234 | /* Begin XCBuildConfiguration section */ 235 | 06DE65B01E18E3BE00C064DE /* Debug */ = { 236 | isa = XCBuildConfiguration; 237 | baseConfigurationReference = 06534218227976EE00FE0E24 /* Debug.xcconfig */; 238 | buildSettings = { 239 | ALWAYS_SEARCH_USER_PATHS = NO; 240 | CLANG_ANALYZER_NONNULL = YES; 241 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 242 | CLANG_CXX_LIBRARY = "libc++"; 243 | CLANG_ENABLE_MODULES = YES; 244 | CLANG_ENABLE_OBJC_ARC = YES; 245 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 246 | CLANG_WARN_BOOL_CONVERSION = YES; 247 | CLANG_WARN_COMMA = YES; 248 | CLANG_WARN_CONSTANT_CONVERSION = YES; 249 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 250 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 251 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 252 | CLANG_WARN_EMPTY_BODY = YES; 253 | CLANG_WARN_ENUM_CONVERSION = YES; 254 | CLANG_WARN_INFINITE_RECURSION = YES; 255 | CLANG_WARN_INT_CONVERSION = YES; 256 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 257 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 258 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 259 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 260 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 261 | CLANG_WARN_STRICT_PROTOTYPES = YES; 262 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 263 | CLANG_WARN_UNREACHABLE_CODE = YES; 264 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 265 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 266 | COPY_PHASE_STRIP = NO; 267 | DEBUG_INFORMATION_FORMAT = dwarf; 268 | ENABLE_STRICT_OBJC_MSGSEND = YES; 269 | ENABLE_TESTABILITY = YES; 270 | GCC_C_LANGUAGE_STANDARD = gnu99; 271 | GCC_DYNAMIC_NO_PIC = NO; 272 | GCC_NO_COMMON_BLOCKS = YES; 273 | GCC_OPTIMIZATION_LEVEL = 0; 274 | GCC_PREPROCESSOR_DEFINITIONS = ( 275 | "DEBUG=1", 276 | "$(inherited)", 277 | ); 278 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 279 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 280 | GCC_WARN_UNDECLARED_SELECTOR = YES; 281 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 282 | GCC_WARN_UNUSED_FUNCTION = YES; 283 | GCC_WARN_UNUSED_VARIABLE = YES; 284 | IPHONEOS_DEPLOYMENT_TARGET = 12.0; 285 | MTL_ENABLE_DEBUG_INFO = YES; 286 | ONLY_ACTIVE_ARCH = YES; 287 | SDKROOT = iphoneos; 288 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 289 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 290 | TARGETED_DEVICE_FAMILY = "1,2"; 291 | }; 292 | name = Debug; 293 | }; 294 | 06DE65B11E18E3BE00C064DE /* Release */ = { 295 | isa = XCBuildConfiguration; 296 | baseConfigurationReference = 06534217227976EE00FE0E24 /* Release.xcconfig */; 297 | buildSettings = { 298 | ALWAYS_SEARCH_USER_PATHS = NO; 299 | CLANG_ANALYZER_NONNULL = YES; 300 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 301 | CLANG_CXX_LIBRARY = "libc++"; 302 | CLANG_ENABLE_MODULES = YES; 303 | CLANG_ENABLE_OBJC_ARC = YES; 304 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 305 | CLANG_WARN_BOOL_CONVERSION = YES; 306 | CLANG_WARN_COMMA = YES; 307 | CLANG_WARN_CONSTANT_CONVERSION = YES; 308 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 309 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 310 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 311 | CLANG_WARN_EMPTY_BODY = YES; 312 | CLANG_WARN_ENUM_CONVERSION = YES; 313 | CLANG_WARN_INFINITE_RECURSION = YES; 314 | CLANG_WARN_INT_CONVERSION = YES; 315 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 316 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 317 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 318 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 319 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 320 | CLANG_WARN_STRICT_PROTOTYPES = YES; 321 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 322 | CLANG_WARN_UNREACHABLE_CODE = YES; 323 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 324 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 325 | COPY_PHASE_STRIP = NO; 326 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 327 | ENABLE_NS_ASSERTIONS = NO; 328 | ENABLE_STRICT_OBJC_MSGSEND = YES; 329 | GCC_C_LANGUAGE_STANDARD = gnu99; 330 | GCC_NO_COMMON_BLOCKS = YES; 331 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 332 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 333 | GCC_WARN_UNDECLARED_SELECTOR = YES; 334 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 335 | GCC_WARN_UNUSED_FUNCTION = YES; 336 | GCC_WARN_UNUSED_VARIABLE = YES; 337 | IPHONEOS_DEPLOYMENT_TARGET = 12.0; 338 | MTL_ENABLE_DEBUG_INFO = NO; 339 | SDKROOT = iphoneos; 340 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 341 | TARGETED_DEVICE_FAMILY = "1,2"; 342 | VALIDATE_PRODUCT = YES; 343 | }; 344 | name = Release; 345 | }; 346 | 06DE65B31E18E3BE00C064DE /* Debug */ = { 347 | isa = XCBuildConfiguration; 348 | baseConfigurationReference = 2458999E72D0C8C24D21BFBD /* Pods-ARCamera.debug.xcconfig */; 349 | buildSettings = { 350 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 351 | DEVELOPMENT_TEAM = PW68C6MX6N; 352 | INFOPLIST_FILE = "$(SRCROOT)/Project/Info.plist"; 353 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 354 | PRODUCT_BUNDLE_IDENTIFIER = com.patrickpiemonte.ARCamera; 355 | PRODUCT_NAME = "$(TARGET_NAME)"; 356 | SWIFT_VERSION = 5.0; 357 | }; 358 | name = Debug; 359 | }; 360 | 06DE65B41E18E3BE00C064DE /* Release */ = { 361 | isa = XCBuildConfiguration; 362 | baseConfigurationReference = 9AB8BAFD35E4FB4AB22FC060 /* Pods-ARCamera.release.xcconfig */; 363 | buildSettings = { 364 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 365 | DEVELOPMENT_TEAM = PW68C6MX6N; 366 | INFOPLIST_FILE = "$(SRCROOT)/Project/Info.plist"; 367 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 368 | PRODUCT_BUNDLE_IDENTIFIER = com.patrickpiemonte.ARCamera; 369 | PRODUCT_NAME = "$(TARGET_NAME)"; 370 | SWIFT_VERSION = 5.0; 371 | }; 372 | name = Release; 373 | }; 374 | /* End XCBuildConfiguration section */ 375 | 376 | /* Begin XCConfigurationList section */ 377 | 06DE659B1E18E3BE00C064DE /* Build configuration list for PBXProject "ARCamera" */ = { 378 | isa = XCConfigurationList; 379 | buildConfigurations = ( 380 | 06DE65B01E18E3BE00C064DE /* Debug */, 381 | 06DE65B11E18E3BE00C064DE /* Release */, 382 | ); 383 | defaultConfigurationIsVisible = 0; 384 | defaultConfigurationName = Release; 385 | }; 386 | 06DE65B21E18E3BE00C064DE /* Build configuration list for PBXNativeTarget "ARCamera" */ = { 387 | isa = XCConfigurationList; 388 | buildConfigurations = ( 389 | 06DE65B31E18E3BE00C064DE /* Debug */, 390 | 06DE65B41E18E3BE00C064DE /* Release */, 391 | ); 392 | defaultConfigurationIsVisible = 0; 393 | defaultConfigurationName = Release; 394 | }; 395 | /* End XCConfigurationList section */ 396 | }; 397 | rootObject = 06DE65981E18E3BE00C064DE /* Project object */; 398 | } 399 | -------------------------------------------------------------------------------- /ARCamera/ARCamera.xcodeproj/xcshareddata/xcschemes/Debug.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 74 | 80 | 81 | 82 | 83 | 85 | 86 | 89 | 90 | 91 | -------------------------------------------------------------------------------- /ARCamera/ARCamera.xcodeproj/xcshareddata/xcschemes/Release.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 39 | 40 | 41 | 42 | 43 | 44 | 54 | 56 | 62 | 63 | 64 | 65 | 66 | 67 | 73 | 74 | 80 | 81 | 82 | 83 | 85 | 86 | 89 | 90 | 91 | -------------------------------------------------------------------------------- /ARCamera/ARCamera.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /ARCamera/ARCamera.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /ARCamera/ARCamera.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | BuildSystemType 6 | Original 7 | 8 | 9 | -------------------------------------------------------------------------------- /ARCamera/Podfile: -------------------------------------------------------------------------------- 1 | # include public/private pods 2 | source 'git@github.com:cocoapods/Specs.git' 3 | 4 | platform :ios, '12.0' 5 | 6 | workspace './ARCamera.xcworkspace' 7 | swift_version = '5.0' 8 | use_frameworks! 9 | 10 | target 'ARCamera' do 11 | pod 'NextLevel', '0.16.0' 12 | pod 'RPCircularProgress', '0.5.0' 13 | end 14 | 15 | post_install do |installer| 16 | installer.pods_project.targets.each do |target| 17 | # setup NextLevel for ARKit use 18 | if target.name == 'NextLevel' 19 | target.build_configurations.each do |config| 20 | config.build_settings['OTHER_SWIFT_FLAGS'] = '$(inherited) -DUSE_ARKIT' 21 | end 22 | end 23 | end 24 | end 25 | -------------------------------------------------------------------------------- /ARCamera/Podfile.lock: -------------------------------------------------------------------------------- 1 | PODS: 2 | - NextLevel (0.16.0) 3 | - RPCircularProgress (0.5.0) 4 | 5 | DEPENDENCIES: 6 | - NextLevel (= 0.16.0) 7 | - RPCircularProgress (= 0.5.0) 8 | 9 | SPEC REPOS: 10 | https://github.com/cocoapods/specs.git: 11 | - NextLevel 12 | - RPCircularProgress 13 | 14 | SPEC CHECKSUMS: 15 | NextLevel: 160b90a86bb36b6fda7c03b8f238a923df880ce5 16 | RPCircularProgress: 4499d4a1453dfbd3fe5bd8c6f344fd51cff3ad00 17 | 18 | PODFILE CHECKSUM: 78c5323aefc35d0227fd127e4367a22a63fc60e6 19 | 20 | COCOAPODS: 1.7.0.rc.2 21 | -------------------------------------------------------------------------------- /ARCamera/Pods/Manifest.lock: -------------------------------------------------------------------------------- 1 | PODS: 2 | - NextLevel (0.16.0) 3 | - RPCircularProgress (0.5.0) 4 | 5 | DEPENDENCIES: 6 | - NextLevel (= 0.16.0) 7 | - RPCircularProgress (= 0.5.0) 8 | 9 | SPEC REPOS: 10 | https://github.com/cocoapods/specs.git: 11 | - NextLevel 12 | - RPCircularProgress 13 | 14 | SPEC CHECKSUMS: 15 | NextLevel: 160b90a86bb36b6fda7c03b8f238a923df880ce5 16 | RPCircularProgress: 4499d4a1453dfbd3fe5bd8c6f344fd51cff3ad00 17 | 18 | PODFILE CHECKSUM: 78c5323aefc35d0227fd127e4367a22a63fc60e6 19 | 20 | COCOAPODS: 1.7.0.rc.2 21 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com), NextLevel (http://nextlevel.engineering/) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/README.md: -------------------------------------------------------------------------------- 1 |

Next Level

2 | 3 | `NextLevel` is a [Swift](https://developer.apple.com/swift/) camera system designed for easy integration, customized media capture, and image streaming in iOS. Integration can optionally leverage `AVFoundation` or `ARKit`. 4 | 5 | [![Build Status](https://travis-ci.org/NextLevel/NextLevel.svg?branch=master)](https://travis-ci.org/NextLevel/NextLevel) [![Pod Version](https://img.shields.io/cocoapods/v/NextLevel.svg?style=flat)](http://cocoadocs.org/docsets/NextLevel/) [![Swift Version](https://img.shields.io/badge/language-swift%205.0-brightgreen.svg)](https://developer.apple.com/swift) [![GitHub license](https://img.shields.io/badge/license-MIT-lightgrey.svg)](https://github.com/NextLevel/NextLevel/blob/master/LICENSE) 6 | 7 | | | Features | 8 | |:---------:|:---------------------------------------------------------------| 9 | | 🎬 | “[Vine](http://vine.co)-like” video clip recording and editing | 10 | | 🖼 | photo capture (raw, jpeg, and video frame) | 11 | | 👆 | customizable gestural interaction and interface | 12 | | 💠 | [ARKit integration](https://developer.apple.com/arkit/) (beta) | 13 | | 📷 | dual, wide angle, telephoto, & true depth support | 14 | | 🐢 | adjustable frame rate on supported hardware (ie fast/slow motion capture) | 15 | | 🎢 | depth data capture support & portrait effects matte support | 16 | | 🔍 | video zoom | 17 | | ⚖ | white balance, focus, and exposure adjustment | 18 | | 🔦 | flash and torch support | 19 | | 👯 | mirroring support | 20 | | ☀ | low light boost | 21 | | 🕶 | smooth auto-focus | 22 | | ⚙ | configurable encoding and compression settings | 23 | | 🛠 | simple media capture and editing API | 24 | | 🌀 | extensible API for image processing and CV | 25 | | 🐈 | animated GIF creator | 26 | | 😎 | face recognition; qr- and bar-codes recognition | 27 | | 🐦 | [Swift 5](https://developer.apple.com/swift/) | 28 | 29 | Need a different version of Swift? 30 | * `5.0` - Target your Podfile to the latest release or master 31 | * `4.2` - Target your Podfile to the `swift4.2` branch 32 | 33 | ## Quick Start 34 | 35 | ```ruby 36 | 37 | # CocoaPods 38 | pod "NextLevel", "~> 0.15.1" 39 | 40 | # Carthage 41 | github "nextlevel/NextLevel" ~> 0.15.1 42 | 43 | # Swift PM 44 | let package = Package( 45 | dependencies: [ 46 | .Package(url: "https://github.com/nextlevel/NextLevel", majorVersion: 0) 47 | ] 48 | ) 49 | 50 | ``` 51 | 52 | Alternatively, drop the NextLevel [source files](https://github.com/NextLevel/NextLevel/tree/master/Sources) or project file into your Xcode project. 53 | 54 | ## Important Configuration Note for ARKit and True Depth 55 | 56 | ARKit and the True Depth Camera software features are enabled with the inclusion of the Swift compiler flag `USE_ARKIT` and `USE_TRUE_DEPTH` respectively. 57 | 58 | Apple will [reject](https://github.com/NextLevel/NextLevel/issues/106) apps that link against ARKit or the True Depth Camera API and do not use them. 59 | 60 | If you use Cocoapods, you can include `-D USE_ARKIT` or `-D USE_TRUE_DEPTH` with the following `Podfile` addition or by adding it to your Xcode build settings. 61 | 62 | ```ruby 63 | installer.pods_project.targets.each do |target| 64 | # setup NextLevel for ARKit use 65 | if target.name == 'NextLevel' 66 | target.build_configurations.each do |config| 67 | config.build_settings['OTHER_SWIFT_FLAGS'] = ['$(inherited)', '-DUSE_ARKIT'] 68 | end 69 | end 70 | end 71 | ``` 72 | 73 | ## Overview 74 | 75 | Before starting, ensure that permission keys have been added to your app's `Info.plist`. 76 | 77 | ```xml 78 | NSCameraUsageDescription 79 | Allowing access to the camera lets you take photos and videos. 80 | NSMicrophoneUsageDescription 81 | Allowing access to the microphone lets you record audio. 82 | ``` 83 | 84 | ### Recording Video Clips 85 | 86 | Import the library. 87 | 88 | ```swift 89 | import NextLevel 90 | ``` 91 | 92 | Setup the camera preview. 93 | 94 | ```swift 95 | let screenBounds = UIScreen.main.bounds 96 | self.previewView = UIView(frame: screenBounds) 97 | if let previewView = self.previewView { 98 | previewView.autoresizingMask = [.flexibleWidth, .flexibleHeight] 99 | previewView.backgroundColor = UIColor.black 100 | NextLevel.shared.previewLayer.frame = previewView.bounds 101 | previewView.layer.addSublayer(NextLevel.shared.previewLayer) 102 | self.view.addSubview(previewView) 103 | } 104 | ``` 105 | 106 | Configure the capture session. 107 | 108 | ```swift 109 | override func viewDidLoad() { 110 | NextLevel.shared.delegate = self 111 | NextLevel.shared.deviceDelegate = self 112 | NextLevel.shared.videoDelegate = self 113 | NextLevel.shared.photoDelegate = self 114 | 115 | // modify .videoConfiguration, .audioConfiguration, .photoConfiguration properties 116 | // Compression, resolution, and maximum recording time options are available 117 | NextLevel.shared.videoConfiguration.maximumCaptureDuration = CMTimeMakeWithSeconds(5, 600) 118 | NextLevel.shared.audioConfiguration.bitRate = 44000 119 | } 120 | ``` 121 | 122 | Start/stop the session when appropriate. These methods create a new "session" instance for 'NextLevel.shared.session' when called. 123 | 124 | ```swift 125 | override func viewWillAppear(_ animated: Bool) { 126 | super.viewWillAppear(animated) 127 | NextLevel.shared.start() 128 | // … 129 | } 130 | ``` 131 | 132 | ```swift 133 | override func viewWillDisappear(_ animated: Bool) { 134 | super.viewWillDisappear(animated) 135 | NextLevel.shared.stop() 136 | // … 137 | } 138 | ``` 139 | 140 | Video record/pause. 141 | 142 | ```swift 143 | // record 144 | NextLevel.shared.record() 145 | 146 | // pause 147 | NextLevel.shared.pause() 148 | ``` 149 | 150 | ### Editing Recorded Clips 151 | 152 | Editing and finalizing the recorded session. 153 | ```swift 154 | 155 | if let session = NextLevel.shared.session { 156 | 157 | //.. 158 | 159 | // undo 160 | session.removeLastClip() 161 | 162 | // various editing operations can be done using the NextLevelSession methods 163 | 164 | // export 165 | session.mergeClips(usingPreset: AVAssetExportPresetHighestQuality, completionHandler: { (url: URL?, error: Error?) in 166 | if let _ = url { 167 | // 168 | } else if let _ = error { 169 | // 170 | } 171 | }) 172 | 173 | //.. 174 | 175 | } 176 | ``` 177 | Videos can also be processed using the [NextLevelSessionExporter](https://github.com/NextLevel/NextLevelSessionExporter), a media transcoding library in Swift. 178 | 179 | ## Custom Buffer Rendering 180 | 181 | ‘NextLevel’ was designed for sample buffer analysis and custom modification in real-time along side a rich set of camera features. 182 | 183 | Just to note, modifications performed on a buffer and provided back to NextLevel may potentially effect frame rate. 184 | 185 | Enable custom rendering. 186 | 187 | ```swift 188 | NextLevel.shared.isVideoCustomContextRenderingEnabled = true 189 | ``` 190 | 191 | Optional hook that allows reading `sampleBuffer` for analysis. 192 | 193 | ```swift 194 | extension CameraViewController: NextLevelVideoDelegate { 195 | 196 | // ... 197 | 198 | // video frame processing 199 | public func nextLevel(_ nextLevel: NextLevel, willProcessRawVideoSampleBuffer sampleBuffer: CMSampleBuffer) { 200 | // Use the sampleBuffer parameter in your system for continual analysis 201 | } 202 | ``` 203 | 204 | Another optional hook for reading buffers for modification, `imageBuffer`. This is also the recommended place to provide the buffer back to NextLevel for recording. 205 | 206 | ```swift 207 | extension CameraViewController: NextLevelVideoDelegate { 208 | 209 | // ... 210 | 211 | // enabled by isCustomContextVideoRenderingEnabled 212 | public func nextLevel(_ nextLevel: NextLevel, renderToCustomContextWithImageBuffer imageBuffer: CVPixelBuffer, onQueue queue: DispatchQueue) { 213 | // provide the frame back to NextLevel for recording 214 | if let frame = self._availableFrameBuffer { 215 | nextLevel.videoCustomContextImageBuffer = frame 216 | } 217 | } 218 | ``` 219 | 220 | NextLevel will check this property when writing buffers to a destination file. This works for both video and photos with `capturePhotoFromVideo`. 221 | 222 | ```swift 223 | nextLevel.videoCustomContextImageBuffer = modifiedFrame 224 | ``` 225 | 226 | ## About 227 | 228 | NextLevel was initally a weekend project that has now grown into a open community of camera platform enthusists. The software provides foundational components for managing media recording, camera interface customization, gestural interaction customization, and image streaming on iOS. The same capabilities can also be found in apps such as [Snapchat](http://snapchat.com), [Instagram](http://instagram.com), and [Vine](http://vine.co). 229 | 230 | The goal is to continue to provide a good foundation for quick integration (enabling projects to be taken to the next level) – allowing focus to placed on functionality that matters most whether it's realtime image processing, computer vision methods, augmented reality, or [computational photography](https://om.co/2018/07/23/even-leica-loves-computational-photography/). 231 | 232 | ## ARKit 233 | 234 | NextLevel provides components for capturing ARKit video and photo. This enables a variety of new camera features while leveraging the existing recording capabilities and media management of NextLevel. 235 | 236 | If you are trying to capture frames from SceneKit for ARKit recording, check out the [examples](https://github.com/NextLevel/examples) project. 237 | 238 | ## Documentation 239 | 240 | You can find [the docs here](https://nextlevel.github.io/NextLevel). Documentation is generated with [jazzy](https://github.com/realm/jazzy) and hosted on [GitHub-Pages](https://pages.github.com). 241 | 242 | ### Stickers 243 | 244 | If you found this project to be helpful, check out the [Next Level stickers](https://www.stickermule.com/en/user/1070732101/stickers). 245 | 246 | ### Project 247 | 248 | NextLevel is a community – contributions and discussions are welcome! 249 | 250 | - Feature idea? Open an [issue](https://github.com/nextlevel/NextLevel/issues). 251 | - Found a bug? Open an [issue](https://github.com/nextlevel/NextLevel/issues). 252 | - Need help? Use [Stack Overflow](http://stackoverflow.com/questions/tagged/nextlevel) with the tag ’nextlevel’. 253 | - Questions? Use [Stack Overflow](http://stackoverflow.com/questions/tagged/nextlevel) with the tag 'nextlevel'. 254 | - Want to contribute? Submit a pull request. 255 | 256 | ### Related Projects 257 | 258 | - [Player (Swift)](https://github.com/piemonte/player), video player in Swift 259 | - [PBJVideoPlayer (obj-c)](https://github.com/piemonte/PBJVideoPlayer), video player in obj-c 260 | - [NextLevelSessionExporter](https://github.com/NextLevel/NextLevelSessionExporter), media transcoding in Swift 261 | - [GPUImage3](https://github.com/BradLarson/GPUImage3), image processing library 262 | - [SCRecorder](https://github.com/rFlex/SCRecorder), obj-c capture library 263 | - [PBJVision](https://github.com/piemonte/PBJVision), obj-c capture library 264 | 265 | ## Resources 266 | 267 | * [iOS Device Camera Summary](https://developer.apple.com/library/archive/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/Cameras/Cameras.html) 268 | * [AV Foundation Programming Guide](https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/00_Introduction.html) 269 | * [AV Foundation Framework Reference](https://developer.apple.com/library/ios/documentation/AVFoundation/Reference/AVFoundationFramework/) 270 | * [ARKit Framework Reference](https://developer.apple.com/documentation/arkit) 271 | * [Swift Evolution](https://github.com/apple/swift-evolution) 272 | * [objc.io Camera and Photos](http://www.objc.io/issue-21/) 273 | * [objc.io Video](http://www.objc.io/issue-23/) 274 | * [objc.io Core Image and Video](https://www.objc.io/issues/23-video/core-image-video/) 275 | * [Cameras, ecommerce and machine learning](http://ben-evans.com/benedictevans/2016/11/20/ku6omictaredoge4cao9cytspbz4jt) 276 | * [Again, iPhone is the default camera](http://om.co/2016/12/07/again-iphone-is-the-default-camera/) 277 | 278 | ## License 279 | 280 | NextLevel is available under the MIT license, see the [LICENSE](https://github.com/NextLevel/NextLevel/blob/master/LICENSE) file for more information. 281 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevel+AVFoundation.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevel+AVFoundation.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import UIKit 27 | import Foundation 28 | import AVFoundation 29 | 30 | extension AVCaptureConnection { 31 | 32 | /// Returns the capture connection for the desired media type, otherwise nil. 33 | /// 34 | /// - Parameters: 35 | /// - mediaType: Specified media type. (i.e. AVMediaTypeVideo, AVMediaTypeAudio, etc.) 36 | /// - connections: Array of `AVCaptureConnection` objects to search 37 | /// - Returns: Capture connection for the desired media type, otherwise nil 38 | public class func connection(withMediaType mediaType: AVMediaType, fromConnections connections: [AVCaptureConnection]) -> AVCaptureConnection? { 39 | for connection: AVCaptureConnection in connections { 40 | for port: AVCaptureInput.Port in connection.inputPorts { 41 | if port.mediaType == mediaType { 42 | return connection 43 | } 44 | } 45 | } 46 | return nil 47 | } 48 | 49 | } 50 | 51 | extension AVCaptureDeviceInput { 52 | 53 | /// Returns the capture device input for the desired media type and capture session, otherwise nil. 54 | /// 55 | /// - Parameters: 56 | /// - mediaType: Specified media type. (i.e. AVMediaTypeVideo, AVMediaTypeAudio, etc.) 57 | /// - captureSession: Capture session for which to query 58 | /// - Returns: Desired capture device input for the associated media type, otherwise nil 59 | public class func deviceInput(withMediaType mediaType: AVMediaType, captureSession: AVCaptureSession) -> AVCaptureDeviceInput? { 60 | if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] { 61 | for deviceInput in inputs { 62 | if deviceInput.device.hasMediaType(mediaType) { 63 | return deviceInput 64 | } 65 | } 66 | } 67 | return nil 68 | } 69 | 70 | } 71 | 72 | extension AVCaptureDevice { 73 | 74 | // MARK: - device lookup 75 | 76 | /// Returns the capture device for the desired device type and position. 77 | /// #protip, NextLevelDevicePosition.avfoundationType can provide the AVFoundation type. 78 | /// 79 | /// - Parameters: 80 | /// - deviceType: Specified capture device type, (i.e. builtInMicrophone, builtInWideAngleCamera, etc.) 81 | /// - position: Desired position of device 82 | /// - Returns: Capture device for the specified type and position, otherwise nil 83 | public class func captureDevice(withType deviceType: AVCaptureDevice.DeviceType, forPosition position: AVCaptureDevice.Position) -> AVCaptureDevice? { 84 | let deviceTypes: [AVCaptureDevice.DeviceType] = [deviceType] 85 | if let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: position).devices.first { 86 | return discoverySession 87 | } 88 | return nil 89 | } 90 | 91 | /// Returns the default wide angle video device for the desired position, otherwise nil. 92 | /// 93 | /// - Parameter position: Desired position of the device 94 | /// - Returns: Wide angle video capture device, otherwise nil 95 | public class func wideAngleVideoDevice(forPosition position: AVCaptureDevice.Position) -> AVCaptureDevice? { 96 | let deviceTypes: [AVCaptureDevice.DeviceType] = [AVCaptureDevice.DeviceType.builtInWideAngleCamera] 97 | if let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: position).devices.first { 98 | return discoverySession 99 | } 100 | return nil 101 | } 102 | 103 | /// Returns the default telephoto video device for the desired position, otherwise nil. 104 | /// 105 | /// - Parameter position: Desired position of the device 106 | /// - Returns: Telephoto video capture device, otherwise nil 107 | public class func telephotoVideoDevice(forPosition position: AVCaptureDevice.Position) -> AVCaptureDevice? { 108 | let deviceTypes: [AVCaptureDevice.DeviceType] = [AVCaptureDevice.DeviceType.builtInTelephotoCamera] 109 | if let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: position).devices.first { 110 | return discoverySession 111 | } 112 | return nil 113 | } 114 | 115 | /// Returns the primary duo camera video device, if available, else the default wide angel camera, otherwise nil. 116 | /// 117 | /// - Parameter position: Desired position of the device 118 | /// - Returns: Primary video capture device found, otherwise nil 119 | public class func primaryVideoDevice(forPosition position: AVCaptureDevice.Position) -> AVCaptureDevice? { 120 | var deviceTypes: [AVCaptureDevice.DeviceType] = [AVCaptureDevice.DeviceType.builtInWideAngleCamera] 121 | if #available(iOS 11.0, *) { 122 | deviceTypes.append(.builtInDualCamera) 123 | } else { 124 | deviceTypes.append(.builtInDuoCamera) 125 | } 126 | 127 | // prioritize duo camera systems before wide angle 128 | let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: AVMediaType.video, position: position) 129 | for device in discoverySession.devices { 130 | if #available(iOS 11.0, *) { 131 | if (device.deviceType == AVCaptureDevice.DeviceType.builtInDualCamera) { 132 | return device 133 | } 134 | } else { 135 | if (device.deviceType == AVCaptureDevice.DeviceType.builtInDuoCamera) { 136 | return device 137 | } 138 | } 139 | } 140 | return discoverySession.devices.first 141 | } 142 | 143 | /// Returns the default video capture device, otherwise nil. 144 | /// 145 | /// - Returns: Default video capture device, otherwise nil 146 | public class func videoDevice() -> AVCaptureDevice? { 147 | return AVCaptureDevice.default(for: AVMediaType.video) 148 | } 149 | 150 | /// Returns the default audio capture device, otherwise nil. 151 | /// 152 | /// - Returns: default audio capture device, otherwise nil 153 | public class func audioDevice() -> AVCaptureDevice? { 154 | return AVCaptureDevice.default(for: AVMediaType.audio) 155 | } 156 | 157 | // MARK: - utilities 158 | 159 | /// Calculates focal length and principle point camera intrinsic parameters for OpenCV. 160 | /// (see Hartley's Mutiple View Geometry, Chapter 6) 161 | /// 162 | /// - Parameters: 163 | /// - focalLengthX: focal length along the x-axis 164 | /// - focalLengthY: focal length along the y-axis 165 | /// - principlePointX: principle point x-coordinate 166 | /// - principlePointY: principle point y-coordinate 167 | /// - Returns: `true` when the focal length and principle point parameters are successfully calculated. 168 | public func focalLengthAndPrinciplePoint(focalLengthX: inout Float, focalLengthY: inout Float, principlePointX: inout Float, principlePointY: inout Float) { 169 | let dimensions = CMVideoFormatDescriptionGetPresentationDimensions(self.activeFormat.formatDescription, usePixelAspectRatio: true, useCleanAperture: true) 170 | 171 | principlePointX = Float(dimensions.width) * 0.5 172 | principlePointY = Float(dimensions.height) * 0.5 173 | 174 | let horizontalFieldOfView = self.activeFormat.videoFieldOfView 175 | let verticalFieldOfView = (horizontalFieldOfView / principlePointX) * principlePointY 176 | 177 | focalLengthX = abs( Float(dimensions.width) / (2.0 * tan(horizontalFieldOfView / 180.0 * .pi / 2 )) ) 178 | focalLengthY = abs( Float(dimensions.height) / (2.0 * tan(verticalFieldOfView / 180.0 * .pi / 2 )) ) 179 | } 180 | 181 | } 182 | 183 | extension AVCaptureDevice.Format { 184 | 185 | /// Returns the maximum capable framerate for the desired capture format and minimum, otherwise zero. 186 | /// 187 | /// - Parameters: 188 | /// - format: Capture format to evaluate for a specific framerate. 189 | /// - minFrameRate: Lower bound time scale or minimum desired framerate. 190 | /// - Returns: Maximum capable framerate within the desired format and minimum constraints. 191 | public class func maxFrameRate(forFormat format: AVCaptureDevice.Format, minFrameRate: CMTimeScale) -> CMTimeScale { 192 | var lowestTimeScale: CMTimeScale = 0 193 | for range in format.videoSupportedFrameRateRanges { 194 | if range.minFrameDuration.timescale >= minFrameRate && (lowestTimeScale == 0 || range.minFrameDuration.timescale < lowestTimeScale) { 195 | lowestTimeScale = range.minFrameDuration.timescale 196 | } 197 | } 198 | return lowestTimeScale 199 | } 200 | 201 | /// Checks if the specified capture device format supports a desired framerate and dimensions. 202 | /// 203 | /// - Parameters: 204 | /// - frameRate: Desired frame rate 205 | /// - dimensions: Desired video dimensions 206 | /// - Returns: `true` if the capture device format supports the given criteria, otherwise false 207 | public func isSupported(withFrameRate frameRate: CMTimeScale, dimensions: CMVideoDimensions = CMVideoDimensions(width: 0, height: 0)) -> Bool { 208 | let formatDimensions = CMVideoFormatDescriptionGetDimensions(self.formatDescription) 209 | if (formatDimensions.width >= dimensions.width && formatDimensions.height >= dimensions.height) { 210 | for frameRateRange in self.videoSupportedFrameRateRanges { 211 | if frameRateRange.minFrameDuration.timescale >= frameRate && frameRateRange.maxFrameDuration.timescale <= frameRate { 212 | return true 213 | } 214 | } 215 | } 216 | return false 217 | } 218 | 219 | } 220 | 221 | extension AVCaptureDevice.Position { 222 | 223 | /// Checks if a camera device is available for a position. 224 | /// 225 | /// - Parameter devicePosition: Camera device position to query. 226 | /// - Returns: `true` if the camera device exists, otherwise false. 227 | public var isCameraDevicePositionAvailable: Bool { 228 | return UIImagePickerController.isCameraDeviceAvailable(self.uikitType) 229 | } 230 | 231 | /// UIKit device equivalent type 232 | public var uikitType: UIImagePickerController.CameraDevice { 233 | switch self { 234 | case .front: 235 | return .front 236 | case .unspecified: 237 | fallthrough 238 | case .back: 239 | fallthrough 240 | @unknown default: 241 | return .rear 242 | } 243 | } 244 | 245 | } 246 | 247 | extension AVCaptureDevice.WhiteBalanceGains { 248 | 249 | /// Normalize gain values for a capture device. 250 | /// 251 | /// - Parameter captureDevice: Device used for adjustment. 252 | /// - Returns: Normalized gains. 253 | public func normalize(_ captureDevice: AVCaptureDevice) -> AVCaptureDevice.WhiteBalanceGains { 254 | var newGains = self 255 | 256 | newGains.redGain = Swift.min(captureDevice.maxWhiteBalanceGain, Swift.max(1.0, newGains.redGain)) 257 | newGains.greenGain = Swift.min(captureDevice.maxWhiteBalanceGain, Swift.max(1.0, newGains.greenGain)) 258 | newGains.blueGain = Swift.min(captureDevice.maxWhiteBalanceGain, Swift.max(1.0, newGains.blueGain)) 259 | 260 | return newGains 261 | } 262 | 263 | } 264 | 265 | extension AVCaptureVideoOrientation { 266 | 267 | /// UIKit orientation equivalent type 268 | public var uikitType: UIDeviceOrientation { 269 | switch self { 270 | case .portrait: 271 | return .portrait 272 | case .landscapeLeft: 273 | return .landscapeLeft 274 | case .landscapeRight: 275 | return .landscapeRight 276 | case .portraitUpsideDown: 277 | return .portraitUpsideDown 278 | @unknown default: 279 | return .unknown 280 | } 281 | } 282 | 283 | internal static func avorientationFromUIDeviceOrientation(_ orientation: UIDeviceOrientation) -> AVCaptureVideoOrientation { 284 | var avorientation: AVCaptureVideoOrientation = .portrait 285 | switch orientation { 286 | case .portrait: 287 | break 288 | case .landscapeLeft: 289 | avorientation = .landscapeRight 290 | break 291 | case .landscapeRight: 292 | avorientation = .landscapeLeft 293 | break 294 | case .portraitUpsideDown: 295 | avorientation = .portraitUpsideDown 296 | break 297 | default: 298 | break 299 | } 300 | return avorientation 301 | } 302 | 303 | } 304 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevel+CIContext.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevel+CoreImage.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import UIKit 27 | import CoreImage 28 | import CoreMedia 29 | import Foundation 30 | 31 | extension CIContext { 32 | 33 | /// Factory for creating a CIContext using the available graphics API. 34 | /// 35 | /// - Parameter mtlDevice: Processor for computing 36 | /// - Returns: Default configuration rendering context, otherwise nil. 37 | public class func createDefaultCIContext(_ mtlDevice: MTLDevice? = nil) -> CIContext? { 38 | let options : [CIContextOption : Any] = [.outputColorSpace : CGColorSpaceCreateDeviceRGB(), 39 | .outputPremultiplied: true, 40 | .useSoftwareRenderer : NSNumber(booleanLiteral: false)] 41 | if let device = mtlDevice { 42 | return CIContext(mtlDevice: device, options: options) 43 | } else if let device = MTLCreateSystemDefaultDevice() { 44 | return CIContext(mtlDevice: device, options: options) 45 | } else if let eaglContext = EAGLContext(api: .openGLES2) { 46 | return CIContext(eaglContext: eaglContext, options: options) 47 | } else { 48 | return nil 49 | } 50 | } 51 | 52 | /// Creates a UIImage from the given sample buffer input 53 | /// 54 | /// - Parameter sampleBuffer: sample buffer input 55 | /// - Returns: UIImage from the sample buffer, otherwise nil 56 | public func uiimage(withSampleBuffer sampleBuffer: CMSampleBuffer) -> UIImage? { 57 | var sampleBufferImage: UIImage? = nil 58 | if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { 59 | let ciimage = CIImage(cvPixelBuffer: pixelBuffer) 60 | if let cgimage = self.createCGImage(ciimage, from: CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))) { 61 | sampleBufferImage = UIImage(cgImage: cgimage) 62 | } 63 | } 64 | return sampleBufferImage 65 | } 66 | 67 | /// Creates a UIImage from the given pixel buffer input 68 | /// 69 | /// - Parameter pixelBuffer: Pixel buffer input 70 | /// - Returns: UIImage from the pixel buffer, otherwise nil 71 | public func uiimage(withPixelBuffer pixelBuffer: CVPixelBuffer) -> UIImage? { 72 | var pixelBufferImage: UIImage? = nil 73 | if CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) == kCVReturnSuccess { 74 | let ciimage = CIImage(cvPixelBuffer: pixelBuffer) 75 | if let cgimage = self.createCGImage(ciimage, from: CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))) { 76 | pixelBufferImage = UIImage(cgImage: cgimage) 77 | } 78 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) 79 | } 80 | return pixelBufferImage 81 | } 82 | 83 | /// Orient a pixel buffer using an exif orientation value. 84 | /// 85 | /// - Parameters: 86 | /// - pixelBuffer: Pixel buffer input 87 | /// - orientation: CGImage orientation for the new pixel buffer 88 | /// - pixelBufferPool: Pixel buffer pool at which to allocate the new buffer 89 | /// - Returns: Oriented pixel buffer, otherwise nil 90 | @available(iOS 11.0, *) 91 | public func createPixelBuffer(fromPixelBuffer pixelBuffer: CVPixelBuffer, withOrientation orientation: CGImagePropertyOrientation, pixelBufferPool: CVPixelBufferPool) -> CVPixelBuffer? { 92 | var updatedPixelBuffer: CVPixelBuffer? = nil 93 | if CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &updatedPixelBuffer) == kCVReturnSuccess { 94 | if let updatedPixelBuffer = updatedPixelBuffer { 95 | CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) 96 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer, options: nil) 97 | let orientedImage = ciImage.oriented(orientation) 98 | self.render(orientedImage, to: updatedPixelBuffer) 99 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) 100 | return updatedPixelBuffer 101 | } 102 | } 103 | return nil 104 | } 105 | 106 | /// Create a pixel buffer from a MTLTexture and orientation value. 107 | /// 108 | /// - Parameters: 109 | /// - mtlTexture: Input texture to render 110 | /// - orientation: CGImage orientation for the new pixel buffer 111 | /// - pixelBufferPool: Pixel buffer pool at which to allocate the new buffer 112 | /// - Returns: Oriented pixel buffer, otherwise nil 113 | @available(iOS 11.0, *) 114 | public func createPixelBuffer(fromMTLTexture mtlTexture: MTLTexture, withOrientation orientation: CGImagePropertyOrientation, pixelBufferPool: CVPixelBufferPool) -> CVPixelBuffer? { 115 | var updatedPixelBuffer: CVPixelBuffer? = nil 116 | if CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &updatedPixelBuffer) == kCVReturnSuccess { 117 | if let updatedPixelBuffer = updatedPixelBuffer { 118 | // update orientation to match Metal's origin 119 | let ciImage = CIImage(mtlTexture: mtlTexture, options: nil) 120 | if let orientedImage = ciImage?.oriented(orientation) { 121 | CVPixelBufferLockBaseAddress(updatedPixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) 122 | self.render(orientedImage, to: updatedPixelBuffer) 123 | CVPixelBufferUnlockBaseAddress(updatedPixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) 124 | return updatedPixelBuffer 125 | } 126 | } 127 | } 128 | return nil 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevel+CMSampleBuffer.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevel+CMSampleBuffer.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import CoreMedia 27 | import Foundation 28 | 29 | extension CMSampleBuffer { 30 | 31 | /// Creates an offset `CMSampleBuffer` for the given time offset and duration. 32 | /// 33 | /// - Parameters: 34 | /// - sampleBuffer: Input sample buffer to copy and offset. 35 | /// - timeOffset: Time offset for the output sample buffer. 36 | /// - duration: Optional duration for the output sample buffer. 37 | /// - Returns: Sample buffer with the desired time offset and duration, otherwise nil. 38 | public class func createSampleBuffer(fromSampleBuffer sampleBuffer: CMSampleBuffer, withTimeOffset timeOffset: CMTime, duration: CMTime?) -> CMSampleBuffer? { 39 | var itemCount: CMItemCount = 0 40 | var status = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: 0, arrayToFill: nil, entriesNeededOut: &itemCount) 41 | if status != 0 { 42 | return nil 43 | } 44 | 45 | var timingInfo = [CMSampleTimingInfo](repeating: CMSampleTimingInfo(duration: CMTimeMake(value: 0, timescale: 0), presentationTimeStamp: CMTimeMake(value: 0, timescale: 0), decodeTimeStamp: CMTimeMake(value: 0, timescale: 0)), count: itemCount) 46 | status = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: itemCount, arrayToFill: &timingInfo, entriesNeededOut: &itemCount); 47 | if status != 0 { 48 | return nil 49 | } 50 | 51 | if let dur = duration { 52 | for i in 0 ..< itemCount { 53 | timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, timeOffset); 54 | timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, timeOffset); 55 | timingInfo[i].duration = dur 56 | } 57 | } else { 58 | for i in 0 ..< itemCount { 59 | timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, timeOffset); 60 | timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, timeOffset); 61 | } 62 | } 63 | 64 | var sampleBufferOffset: CMSampleBuffer? = nil 65 | CMSampleBufferCreateCopyWithNewTiming(allocator: kCFAllocatorDefault, sampleBuffer: sampleBuffer, sampleTimingEntryCount: itemCount, sampleTimingArray: &timingInfo, sampleBufferOut: &sampleBufferOffset); 66 | 67 | if let output = sampleBufferOffset { 68 | return output 69 | } else { 70 | return nil 71 | } 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevel+Foundation.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevel+Foundation.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import Foundation 27 | import AVFoundation 28 | 29 | // MARK: - Comparable 30 | 31 | extension Comparable { 32 | 33 | public func clamped(to limits: ClosedRange) -> Self { 34 | return min(max(self, limits.lowerBound), limits.upperBound) 35 | } 36 | 37 | } 38 | 39 | // MARK: - Data 40 | 41 | extension Data { 42 | 43 | /// Outputs a `Data` object with the desired metadata dictionary 44 | /// 45 | /// - Parameter metadata: metadata dictionary to be added 46 | /// - Returns: JPEG formatted image data 47 | public func jpegData(withMetadataDictionary metadata: [String: Any]) -> Data? { 48 | var imageDataWithMetadata: Data? = nil 49 | if let source = CGImageSourceCreateWithData(self as CFData, nil), 50 | let sourceType = CGImageSourceGetType(source) { 51 | let mutableData = NSMutableData() 52 | if let destination = CGImageDestinationCreateWithData(mutableData, sourceType, 1, nil) { 53 | CGImageDestinationAddImageFromSource(destination, source, 0, metadata as CFDictionary?) 54 | let success = CGImageDestinationFinalize(destination) 55 | if success == true { 56 | imageDataWithMetadata = mutableData as Data 57 | } else { 58 | print("could not finalize image with metadata") 59 | } 60 | } 61 | } 62 | return imageDataWithMetadata 63 | } 64 | 65 | } 66 | 67 | // MARK: - Date 68 | 69 | extension Date { 70 | 71 | static let dateFormatter: DateFormatter = iso8601DateFormatter() 72 | fileprivate static func iso8601DateFormatter() -> DateFormatter { 73 | let formatter = DateFormatter() 74 | formatter.calendar = Calendar(identifier: .iso8601) 75 | formatter.timeZone = TimeZone(secondsFromGMT: 0) 76 | formatter.locale = Locale(identifier: "en_US_POSIX") 77 | formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSZZZZZ" 78 | return formatter 79 | } 80 | 81 | // http://nshipster.com/nsformatter/ 82 | // http://unicode.org/reports/tr35/tr35-6.html#Date_Format_Patterns 83 | public func iso8601() -> String { 84 | return Date.iso8601DateFormatter().string(from: self) 85 | } 86 | 87 | } 88 | 89 | // MARK: - FileManager 90 | 91 | extension FileManager { 92 | 93 | /// Returns the available user designated storage space in bytes. 94 | /// 95 | /// - Returns: Number of available bytes in storage. 96 | public class func availableStorageSpaceInBytes() -> UInt64 { 97 | do { 98 | if let lastPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).last { 99 | let attributes = try FileManager.default.attributesOfFileSystem(forPath: lastPath) 100 | if let freeSize = attributes[FileAttributeKey.systemFreeSize] as? UInt64 { 101 | return freeSize 102 | } 103 | } 104 | } catch { 105 | print("could not determine user attributes of file system") 106 | return 0 107 | } 108 | return 0 109 | } 110 | 111 | } 112 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevel+Metadata.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevel+Metadata.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import UIKit 27 | import Foundation 28 | import AVFoundation 29 | import CoreMedia 30 | import ImageIO 31 | 32 | extension CMSampleBuffer { 33 | 34 | /// Extracts the metadata dictionary from a `CMSampleBuffer`. 35 | /// (ie EXIF: Aperture, Brightness, Exposure, FocalLength, etc) 36 | /// 37 | /// - Parameter sampleBuffer: sample buffer to be processed 38 | /// - Returns: metadata dictionary from the provided sample buffer 39 | public func metadata() -> [String : Any]? { 40 | 41 | if let cfmetadata = CMCopyDictionaryOfAttachments(allocator: kCFAllocatorDefault, target: self, attachmentMode: kCMAttachmentMode_ShouldPropagate) { 42 | if let metadata = cfmetadata as? [String : Any] { 43 | return metadata 44 | } 45 | } 46 | return nil 47 | 48 | } 49 | 50 | /// Appends the provided metadata dictionary key/value pairs. 51 | /// 52 | /// - Parameter metadataAdditions: Metadata key/value pairs to be appended. 53 | public func append(metadataAdditions: [String: Any]) { 54 | 55 | // append tiff metadata to buffer for proagation 56 | if let tiffDict: CFDictionary = CMCopyDictionaryOfAttachments(allocator: kCFAllocatorDefault, target: kCGImagePropertyTIFFDictionary, attachmentMode: kCMAttachmentMode_ShouldPropagate) { 57 | let tiffNSDict = tiffDict as NSDictionary 58 | var metaDict: [String: Any] = [:] 59 | for (key, value) in metadataAdditions { 60 | metaDict.updateValue(value as AnyObject, forKey: key) 61 | } 62 | for (key, value) in tiffNSDict { 63 | if let keyString = key as? String { 64 | metaDict.updateValue(value as AnyObject, forKey: keyString) 65 | } 66 | } 67 | CMSetAttachment(self, key: kCGImagePropertyTIFFDictionary, value: metaDict as CFTypeRef?, attachmentMode: kCMAttachmentMode_ShouldPropagate) 68 | } else { 69 | CMSetAttachment(self, key: kCGImagePropertyTIFFDictionary, value: metadataAdditions as CFTypeRef?, attachmentMode: kCMAttachmentMode_ShouldPropagate) 70 | } 71 | } 72 | 73 | } 74 | 75 | fileprivate let NextLevelMetadataTitle = "NextLevel" 76 | fileprivate let NextLevelMetadataArtist = "http://nextlevel.engineering/" 77 | 78 | extension NextLevel { 79 | 80 | internal class func tiffMetadata() -> [String: Any] { 81 | return [ kCGImagePropertyTIFFSoftware as String : NextLevelMetadataTitle, 82 | kCGImagePropertyTIFFArtist as String : NextLevelMetadataArtist, 83 | kCGImagePropertyTIFFDateTime as String : Date().iso8601() ] 84 | } 85 | 86 | internal class func assetWriterMetadata() -> [AVMutableMetadataItem] { 87 | let currentDevice = UIDevice.current 88 | 89 | let modelItem = AVMutableMetadataItem() 90 | modelItem.keySpace = AVMetadataKeySpace.common 91 | modelItem.key = AVMetadataKey.commonKeyModel as (NSCopying & NSObjectProtocol) 92 | modelItem.value = currentDevice.localizedModel as (NSCopying & NSObjectProtocol) 93 | 94 | let softwareItem = AVMutableMetadataItem() 95 | softwareItem.keySpace = AVMetadataKeySpace.common 96 | softwareItem.key = AVMetadataKey.commonKeySoftware as (NSCopying & NSObjectProtocol) 97 | softwareItem.value = NextLevelMetadataTitle as (NSCopying & NSObjectProtocol) 98 | 99 | let artistItem = AVMutableMetadataItem() 100 | artistItem.keySpace = AVMetadataKeySpace.common 101 | artistItem.key = AVMetadataKey.commonKeyArtist as (NSCopying & NSObjectProtocol) 102 | artistItem.value = NextLevelMetadataArtist as (NSCopying & NSObjectProtocol) 103 | 104 | let creationDateItem = AVMutableMetadataItem() 105 | creationDateItem.keySpace = AVMetadataKeySpace.common 106 | creationDateItem.key = AVMetadataKey.commonKeyCreationDate as (NSCopying & NSObjectProtocol) 107 | creationDateItem.value = Date().iso8601() as (NSCopying & NSObjectProtocol) 108 | 109 | return [modelItem, softwareItem, artistItem, creationDateItem] 110 | } 111 | 112 | } 113 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevel+UIImage.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevel+UIImage.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import UIKit 27 | 28 | extension UIImage { 29 | 30 | // via by BrikerMan on 2017/12/23. 31 | 32 | /// EZSE: Returns cropped image from CGRect 33 | public func nx_croppedImage(to ratio: CGFloat) -> UIImage { 34 | let height = self.size.width * ratio 35 | let y = (self.size.height - height) / 2 36 | 37 | let bound = CGRect(x: 0, y: y, width: self.size.width, height: height) 38 | let scaledBounds: CGRect = CGRect(x: bound.origin.x * self.scale, 39 | y: bound.origin.y * self.scale, 40 | width: bound.width * self.scale, 41 | height: bound.height * self.scale) 42 | let imageRef = self.cgImage?.cropping(to: scaledBounds) 43 | 44 | let croppedImage: UIImage = UIImage(cgImage: imageRef!, scale: self.scale, orientation: UIImage.Orientation.up) 45 | return croppedImage 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevelBufferRenderer.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevelBufferRenderer.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import UIKit 27 | import Foundation 28 | import CoreImage 29 | import Metal 30 | #if USE_ARKIT 31 | import ARKit 32 | import SceneKit 33 | #endif 34 | 35 | /// NextLevelBufferRenderer, provides the ability to render/record SceneKit frames 36 | @available(iOS 11.0, *) 37 | public class NextLevelBufferRenderer { 38 | 39 | // MARK: - properties 40 | 41 | /// Specifies if the renderer should automatically light up scenes that have no light source. 42 | public var autoenablesDefaultLighting: Bool = true { 43 | didSet { 44 | #if USE_ARKIT 45 | self._renderer?.autoenablesDefaultLighting = self.autoenablesDefaultLighting 46 | #endif 47 | } 48 | } 49 | 50 | /// Rendering context based on the ARSCNView metal device 51 | public var ciContext: CIContext? { 52 | get { 53 | return self._ciContext 54 | } 55 | } 56 | 57 | /// Pixel buffer pool for sharing 58 | public var pixelBufferPool: CVPixelBufferPool? { 59 | get { 60 | return self._pixelBufferPool 61 | } 62 | } 63 | 64 | /// Rendered video buffer output (using to write video file) 65 | public var videoBufferOutput: CVPixelBuffer? { 66 | get { 67 | return self._videoBufferOutput 68 | } 69 | } 70 | 71 | // MARK: - ivars 72 | 73 | fileprivate var _device: MTLDevice? 74 | fileprivate var _library: MTLLibrary? 75 | fileprivate var _commandQueue: MTLCommandQueue? 76 | fileprivate var _renderPassDescriptor: MTLRenderPassDescriptor = MTLRenderPassDescriptor() 77 | 78 | fileprivate var _texture: MTLTexture? 79 | 80 | fileprivate var _bufferWidth: Int = 0 81 | fileprivate var _bufferHeight: Int = 0 82 | fileprivate var _bufferFormatType: OSType = OSType(kCVPixelFormatType_32BGRA) 83 | fileprivate var _presentationFrame: CGRect = .zero 84 | 85 | fileprivate var _ciContext: CIContext? 86 | fileprivate var _pixelBufferPool: CVPixelBufferPool? 87 | fileprivate var _videoBufferOutput: CVPixelBuffer? 88 | 89 | #if USE_ARKIT 90 | fileprivate weak var _arView: ARSCNView? 91 | fileprivate var _renderer: SCNRenderer? 92 | 93 | public convenience init(view: ARSCNView) { 94 | self.init() 95 | 96 | self._arView = view 97 | self._presentationFrame = view.bounds 98 | 99 | #if !( targetEnvironment(simulator) ) 100 | self._device = view.device 101 | self._renderer = SCNRenderer(device: view.device, options: nil) 102 | self._renderer?.scene = view.scene 103 | self._renderer?.autoenablesDefaultLighting = self.autoenablesDefaultLighting 104 | #endif 105 | 106 | self._commandQueue = view.device?.makeCommandQueue() 107 | } 108 | #endif 109 | 110 | deinit { 111 | self._device = nil 112 | self._library = nil 113 | self._commandQueue = nil 114 | self._texture = nil 115 | 116 | #if USE_ARKIT 117 | self._renderer?.scene = nil 118 | self._renderer = nil 119 | self._arView = nil 120 | #endif 121 | 122 | self._ciContext = nil 123 | self._pixelBufferPool = nil 124 | self._videoBufferOutput = nil 125 | } 126 | 127 | } 128 | 129 | // MARK: - setup 130 | 131 | @available(iOS 11.0, *) 132 | extension NextLevelBufferRenderer { 133 | 134 | fileprivate func setupContextIfNecessary() { 135 | guard self._ciContext == nil else { 136 | return 137 | } 138 | 139 | guard let device = self._device else { 140 | return 141 | } 142 | 143 | self._ciContext = CIContext.createDefaultCIContext(device) 144 | } 145 | 146 | fileprivate func setupPixelBufferPoolIfNecessary(_ pixelBuffer: CVPixelBuffer, orientation: CGImagePropertyOrientation) { 147 | let width = CVPixelBufferGetWidth(pixelBuffer) 148 | let height = CVPixelBufferGetHeight(pixelBuffer) 149 | let formatType = CVPixelBufferGetPixelFormatType(pixelBuffer) 150 | 151 | // (width != self._bufferWidth) || (height != self._bufferHeight) || (formatType != self._bufferFormatType) || 152 | let bufferChanged: Bool = self._pixelBufferPool == nil 153 | if !bufferChanged { 154 | return 155 | } 156 | 157 | var renderWidth = width 158 | var renderHeight = height 159 | 160 | switch orientation { 161 | case .up: 162 | fallthrough 163 | case .upMirrored: 164 | fallthrough 165 | case .down: 166 | fallthrough 167 | case .downMirrored: 168 | renderWidth = height 169 | renderHeight = width 170 | break 171 | default: 172 | break 173 | } 174 | 175 | let poolAttributes: [String : AnyObject] = [String(kCVPixelBufferPoolMinimumBufferCountKey): NSNumber(integerLiteral: 1)] 176 | let pixelBufferAttributes: [String : AnyObject] = [String(kCVPixelBufferPixelFormatTypeKey) : NSNumber(integerLiteral: Int(formatType)), 177 | String(kCVPixelBufferWidthKey) : NSNumber(value: renderWidth), 178 | String(kCVPixelBufferHeightKey) : NSNumber(value: renderHeight), 179 | String(kCVPixelBufferMetalCompatibilityKey) : NSNumber(booleanLiteral: true), 180 | String(kCVPixelBufferIOSurfacePropertiesKey) : [:] as AnyObject ] 181 | 182 | var pixelBufferPool: CVPixelBufferPool? = nil 183 | if CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as CFDictionary, pixelBufferAttributes as CFDictionary, &pixelBufferPool) == kCVReturnSuccess { 184 | self._bufferWidth = renderWidth 185 | self._bufferHeight = renderHeight 186 | self._bufferFormatType = formatType 187 | self._pixelBufferPool = pixelBufferPool 188 | } 189 | } 190 | 191 | } 192 | 193 | // MARK: - rendering 194 | 195 | @available(iOS 11.0, *) 196 | extension NextLevelBufferRenderer { 197 | 198 | #if USE_ARKIT 199 | 200 | /// SCNSceneRendererDelegate hook for rendering 201 | /// 202 | /// - Parameters: 203 | /// - renderer: SCNSceneRendererDelegate renderer 204 | /// - scene: SCNSceneRendererDelegate scene 205 | /// - time: SCNSceneRendererDelegate time 206 | public func renderer(_ renderer: SCNSceneRenderer, didRenderScene scene: SCNScene, atTime time: TimeInterval) { 207 | guard let arView = self._arView, 208 | let pixelBuffer = arView.session.currentFrame?.capturedImage, 209 | let pointOfView = arView.pointOfView, 210 | let device = self._device else { 211 | return 212 | } 213 | 214 | self.setupContextIfNecessary() 215 | 216 | CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) 217 | 218 | // setup the offscreen texture now that the size was determined 219 | if self._texture == nil && self._bufferWidth > 0 && self._bufferHeight > 0 { 220 | let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .rgba8Unorm, 221 | width: self._bufferWidth, 222 | height: self._bufferHeight, 223 | mipmapped: false) 224 | textureDescriptor.usage = [.shaderRead, .renderTarget] 225 | textureDescriptor.storageMode = .private 226 | textureDescriptor.textureType = .type2D 227 | textureDescriptor.sampleCount = 1 228 | self._texture = device.makeTexture(descriptor: textureDescriptor) 229 | } 230 | 231 | if let commandBuffer = self._commandQueue?.makeCommandBuffer(), 232 | let texture = self._texture { 233 | self._renderPassDescriptor.colorAttachments[0].texture = texture 234 | self._renderPassDescriptor.colorAttachments[0].loadAction = .clear 235 | self._renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0, 0, 0, 1.0); 236 | self._renderPassDescriptor.colorAttachments[0].storeAction = .store 237 | 238 | let presentationAspectRatio = self._presentationFrame.size.width > self._presentationFrame.size.height ? 239 | self._presentationFrame.size.width / self._presentationFrame.size.height : 240 | self._presentationFrame.size.height / self._presentationFrame.size.width 241 | 242 | let textureAspectRatio = texture.width > texture.height ? 243 | CGFloat(texture.width) / CGFloat(texture.height) : 244 | CGFloat(texture.height) / CGFloat(texture.width) 245 | 246 | var viewport = CGRect(x: 0, y: 0, width: texture.width, height: texture.height) 247 | if presentationAspectRatio != textureAspectRatio { 248 | // aspectFill 249 | // print("texture \(texture.width) \(texture.height) \(self._presentationFrame.size.width) \(self._presentationFrame.size.height)") 250 | let scaleFactorWidth = CGFloat(CGFloat(texture.width) / self._presentationFrame.size.width) 251 | viewport = CGRect(x: 0, y: Int(CGFloat(texture.height - Int(self._presentationFrame.size.height * scaleFactorWidth)) * 0.5), 252 | width: texture.width, height: Int(self._presentationFrame.size.height * scaleFactorWidth) ) 253 | } 254 | 255 | self._renderer?.scene = scene 256 | self._renderer?.pointOfView = pointOfView 257 | self._renderer?.render(atTime: time, viewport: viewport, commandBuffer: commandBuffer, passDescriptor: self._renderPassDescriptor) 258 | 259 | commandBuffer.commit() 260 | } 261 | 262 | let orientation: CGImagePropertyOrientation = .downMirrored 263 | 264 | self.setupPixelBufferPoolIfNecessary(pixelBuffer, orientation: orientation) 265 | 266 | if let pixelBufferPool = self._pixelBufferPool, 267 | let texture = self._texture { 268 | if let pixelBufferOutput = self._ciContext?.createPixelBuffer(fromMTLTexture: texture, withOrientation: orientation, pixelBufferPool: pixelBufferPool) { 269 | self._videoBufferOutput = pixelBufferOutput 270 | } 271 | } 272 | 273 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) 274 | } 275 | 276 | #endif 277 | 278 | } 279 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevelClip.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevelClip.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import UIKit 27 | import Foundation 28 | import AVFoundation 29 | 30 | // NextLevelClip dictionary representation keys 31 | 32 | public let NextLevelClipFilenameKey = "NextLevelClipFilenameKey" 33 | public let NextLevelClipInfoDictKey = "NextLevelClipInfoDictKey" 34 | 35 | /// NextLevelClip, an object for managing a single media clip 36 | public class NextLevelClip { 37 | 38 | /// Unique identifier for a clip 39 | public var uuid: UUID { 40 | get { 41 | return self._uuid 42 | } 43 | } 44 | 45 | /// URL of the clip 46 | public var url: URL? { 47 | didSet { 48 | self._asset = nil 49 | } 50 | } 51 | 52 | /// True, if the clip's file exists 53 | public var fileExists: Bool { 54 | get { 55 | if let url = self.url { 56 | return FileManager.default.fileExists(atPath: url.path) 57 | } 58 | return false 59 | } 60 | } 61 | 62 | /// `AVAsset` of the clip 63 | public var asset: AVAsset? { 64 | get { 65 | if let url = self.url { 66 | if self._asset == nil { 67 | self._asset = AVAsset(url: url) 68 | } 69 | } 70 | return self._asset 71 | } 72 | } 73 | 74 | /// Duration of the clip, otherwise invalid. 75 | public var duration: CMTime { 76 | get { 77 | return self.asset?.duration ?? CMTime.zero 78 | } 79 | } 80 | 81 | /// Set to true if the clip's audio should be muted in the merged file 82 | public var isMutedOnMerge = false 83 | 84 | /// If it doesn't already exist, generates a thumbnail image of the clip. 85 | public var thumbnailImage: UIImage? { 86 | get { 87 | guard self._thumbnailImage == nil else { 88 | return self._thumbnailImage 89 | } 90 | 91 | if let asset = self.asset { 92 | let imageGenerator: AVAssetImageGenerator = AVAssetImageGenerator(asset: asset) 93 | imageGenerator.appliesPreferredTrackTransform = true 94 | 95 | do { 96 | let cgimage: CGImage = try imageGenerator.copyCGImage(at: CMTime.zero, actualTime: nil) 97 | let uiimage: UIImage = UIImage(cgImage: cgimage) 98 | self._thumbnailImage = uiimage 99 | } catch { 100 | print("NextLevel, unable to generate lastFrameImage for \(String(describing: self.url?.absoluteString)))") 101 | self._thumbnailImage = nil 102 | } 103 | } 104 | return self._thumbnailImage 105 | } 106 | } 107 | 108 | /// If it doesn't already exist, generates an image for the last frame of the clip. 109 | public var lastFrameImage: UIImage? { 110 | get { 111 | guard self._lastFrameImage == nil, 112 | let asset = self.asset 113 | else { 114 | return self._lastFrameImage 115 | } 116 | 117 | let imageGenerator: AVAssetImageGenerator = AVAssetImageGenerator(asset: asset) 118 | imageGenerator.appliesPreferredTrackTransform = true 119 | 120 | do { 121 | let cgimage: CGImage = try imageGenerator.copyCGImage(at: self.duration, actualTime: nil) 122 | let uiimage: UIImage = UIImage(cgImage: cgimage) 123 | self._lastFrameImage = uiimage 124 | } catch { 125 | print("NextLevel, unable to generate lastFrameImage for \(String(describing: self.url?.absoluteString))") 126 | self._lastFrameImage = nil 127 | } 128 | 129 | return self._lastFrameImage 130 | } 131 | } 132 | 133 | /// Frame rate at which the asset was recorded. 134 | public var frameRate: Float { 135 | get { 136 | if let tracks = self.asset?.tracks(withMediaType: AVMediaType.video), 137 | tracks.isEmpty == false { 138 | if let videoTrack = tracks.first { 139 | return videoTrack.nominalFrameRate 140 | } 141 | } 142 | return 0 143 | } 144 | } 145 | 146 | /// Dictionary containing metadata about the clip. 147 | public var infoDict: [String: Any]? { 148 | get { 149 | return self._infoDict 150 | } 151 | } 152 | 153 | /// Dictionary containing data for re-initialization of the clip. 154 | public var representationDict: [String:Any]? { 155 | get { 156 | if let infoDict = self.infoDict, 157 | let url = self.url { 158 | return [NextLevelClipFilenameKey:url.lastPathComponent, 159 | NextLevelClipInfoDictKey:infoDict] 160 | } else if let url = self.url { 161 | return [NextLevelClipFilenameKey:url.lastPathComponent] 162 | } else { 163 | return nil 164 | } 165 | } 166 | } 167 | 168 | // MARK: - class functions 169 | 170 | /// Class method initializer for a clip URL 171 | /// 172 | /// - Parameters: 173 | /// - filename: Filename for the media asset 174 | /// - directoryPath: Directory path for the media asset 175 | /// - Returns: Returns a URL for the designated clip, otherwise nil 176 | public class func clipURL(withFilename filename: String, directoryPath: String) -> URL? { 177 | var clipURL = URL(fileURLWithPath: directoryPath) 178 | clipURL.appendPathComponent(filename) 179 | return clipURL 180 | } 181 | 182 | /// Class method initializer for a NextLevelClip 183 | /// 184 | /// - Parameters: 185 | /// - url: URL of the media asset 186 | /// - infoDict: Dictionary containing metadata about the clip 187 | /// - Returns: Returns a NextLevelClip 188 | public class func clip(withUrl url: URL?, infoDict: [String: Any]?) -> NextLevelClip { 189 | return NextLevelClip(url: url, infoDict: infoDict) 190 | } 191 | 192 | // MARK: - private instance vars 193 | 194 | internal var _uuid: UUID = UUID() 195 | internal var _asset: AVAsset? 196 | internal var _infoDict: [String : Any]? 197 | internal var _thumbnailImage: UIImage? 198 | internal var _lastFrameImage: UIImage? 199 | 200 | // MARK: - object lifecycle 201 | 202 | /// Initialize a clip from a URL and dictionary. 203 | /// 204 | /// - Parameters: 205 | /// - url: URL and filename of the specified media asset 206 | /// - infoDict: Dictionary with NextLevelClip metadata information 207 | public convenience init(url: URL?, infoDict: [String : Any]?) { 208 | self.init() 209 | self.url = url 210 | self._infoDict = infoDict 211 | } 212 | 213 | /// Initialize a clip from a dictionary representation and directory name 214 | /// 215 | /// - Parameters: 216 | /// - directoryPath: Directory where the media asset is located 217 | /// - representationDict: Dictionary containing defining metadata about the clip 218 | public convenience init(directoryPath: String, representationDict: [String : Any]?) { 219 | if let clipDict = representationDict, 220 | let filename = clipDict[NextLevelClipFilenameKey] as? String, 221 | let url: URL = NextLevelClip.clipURL(withFilename: filename, directoryPath: directoryPath) { 222 | let infoDict = clipDict[NextLevelClipInfoDictKey] as? [String : Any] 223 | self.init(url: url, infoDict: infoDict) 224 | } else { 225 | self.init() 226 | } 227 | } 228 | 229 | deinit { 230 | self._asset = nil 231 | self._infoDict = nil 232 | self._thumbnailImage = nil 233 | self._lastFrameImage = nil 234 | } 235 | 236 | // MARK: - functions 237 | 238 | /// Removes the associated file representation on disk. 239 | public func removeFile() { 240 | do { 241 | if let url = self.url { 242 | try FileManager.default.removeItem(at: url) 243 | self.url = nil 244 | } 245 | } catch { 246 | print("NextLevel, error deleting a clip's file \(String(describing: self.url?.absoluteString))") 247 | } 248 | } 249 | 250 | } 251 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevelConfiguration.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevelConfiguration.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import UIKit 27 | import Foundation 28 | import AVFoundation 29 | #if USE_ARKIT 30 | import ARKit 31 | #endif 32 | 33 | // MARK: - MediaTypeConfiguration 34 | 35 | /// NextLevelConfiguration, media capture configuration object 36 | public class NextLevelConfiguration { 37 | 38 | // MARK: - types 39 | 40 | /// Aspect ratio, specifies dimensions for video output 41 | /// 42 | /// - active: active preset or specified dimensions (default) 43 | /// - square: 1:1 square 44 | /// - standard: 3:4 45 | /// - standardLandscape: 4:3, landscape 46 | /// - widescreen: 9:16 HD 47 | /// - widescreenLandscape: 16:9 HD landscape 48 | /// - instagram: 4:5 Instagram 49 | /// - instagramLandscape: 5:4 Instagram landscape 50 | /// - cinematic: 2.35:1 cinematic 51 | /// - custom: custom aspect ratio 52 | public enum AspectRatio: CustomStringConvertible { 53 | case active 54 | case square 55 | case standard 56 | case standardLandscape 57 | case widescreen 58 | case widescreenLandscape 59 | case instagram 60 | case instagramLandscape 61 | case cinematic 62 | case custom(w: Int, h: Int) 63 | 64 | public var dimensions: CGSize? { 65 | get { 66 | switch self { 67 | case .active: 68 | return nil 69 | case .square: 70 | return CGSize(width: 1, height: 1) 71 | case .standard: 72 | return CGSize(width: 3, height: 4) 73 | case .standardLandscape: 74 | return CGSize(width: 4, height: 3) 75 | case .widescreen: 76 | return CGSize(width: 9, height: 16) 77 | case .widescreenLandscape: 78 | return CGSize(width: 16, height: 9) 79 | case .instagram: 80 | return CGSize(width: 4, height: 5) 81 | case .instagramLandscape: 82 | return CGSize(width: 5, height: 4) 83 | case .cinematic: 84 | return CGSize(width: 2.35, height: 1) 85 | case .custom(let w, let h): 86 | return CGSize(width: w, height: h) 87 | } 88 | } 89 | } 90 | 91 | public var ratio: CGFloat? { 92 | get { 93 | switch self { 94 | case .active: 95 | return nil 96 | case .square: 97 | return 1 98 | case .custom(let w, let h): 99 | return CGFloat(h) / CGFloat(w) 100 | default: 101 | if let w = self.dimensions?.width, 102 | let h = self.dimensions?.height { 103 | return h / w 104 | } else { 105 | return nil 106 | } 107 | } 108 | } 109 | } 110 | 111 | public var description: String { 112 | get { 113 | switch self { 114 | case .active: 115 | return "Active" 116 | case .square: 117 | return "1:1 Square" 118 | case .standard: 119 | return "3:4 Standard" 120 | case .standardLandscape: 121 | return "4:3 Standard Landscape" 122 | case .widescreen: 123 | return "9:16 Widescreen HD" 124 | case .widescreenLandscape: 125 | return "16:9 Widescreen Landscape HD" 126 | case .instagram: 127 | return "4:5 Instagram" 128 | case .instagramLandscape: 129 | return "5:4 Instagram Landscape" 130 | case .cinematic: 131 | return "2.35:1 Cinematic" 132 | case .custom(let w, let h): 133 | return "\(w):\(h) Custom" 134 | } 135 | } 136 | } 137 | } 138 | 139 | // MARK: - properties 140 | 141 | /// AVFoundation configuration preset, see AVCaptureSession.h 142 | public var preset: AVCaptureSession.Preset 143 | 144 | /// Setting an options dictionary overrides all other properties set on a configuration object but allows full customization 145 | public var options: [String : Any]? 146 | 147 | // MARK: - object lifecycle 148 | 149 | public init() { 150 | self.preset = AVCaptureSession.Preset.high 151 | self.options = nil 152 | } 153 | 154 | // MARK: - func 155 | 156 | /// Provides an AVFoundation friendly dictionary for configuring output. 157 | /// 158 | /// - Parameter sampleBuffer: Sample buffer for extracting configuration information 159 | /// - Returns: Configuration dictionary for AVFoundation 160 | public func avcaptureSettingsDictionary(sampleBuffer: CMSampleBuffer? = nil, pixelBuffer: CVPixelBuffer? = nil) -> [String: Any]? { 161 | return self.options 162 | } 163 | } 164 | 165 | // MARK: - VideoConfiguration 166 | 167 | /// NextLevelVideoConfiguration, video capture configuration object 168 | public class NextLevelVideoConfiguration: NextLevelConfiguration { 169 | 170 | // MARK: - types 171 | 172 | public static let VideoBitRateDefault: Int = 2000000 173 | 174 | // MARK: - properties 175 | 176 | /// Average video bit rate (bits per second), AV dictionary key AVVideoAverageBitRateKey 177 | public var bitRate: Int = NextLevelVideoConfiguration.VideoBitRateDefault 178 | 179 | /// Dimensions for video output, AV dictionary keys AVVideoWidthKey, AVVideoHeightKey 180 | public var dimensions: CGSize? 181 | 182 | /// Output aspect ratio automatically sizes output dimensions, `active` indicates NextLevelVideoConfiguration.preset or NextLevelVideoConfiguration.dimensions 183 | public var aspectRatio: AspectRatio = .active 184 | 185 | /// Video output transform for display 186 | public var transform: CGAffineTransform = .identity 187 | 188 | /// Codec used to encode video, AV dictionary key AVVideoCodecKey 189 | public var codec: AVVideoCodecType 190 | 191 | /// Profile level for the configuration, AV dictionary key AVVideoProfileLevelKey (H.264 codec only) 192 | public var profileLevel: String? 193 | 194 | /// Video scaling mode, AV dictionary key AVVideoScalingModeKey 195 | /// (AVVideoScalingModeResizeAspectFill, AVVideoScalingModeResizeAspect, AVVideoScalingModeResize, AVVideoScalingModeFit) 196 | public var scalingMode: String = AVVideoScalingModeResizeAspectFill 197 | 198 | /// Maximum interval between key frames, 1 meaning key frames only, AV dictionary key AVVideoMaxKeyFrameIntervalKey 199 | public var maxKeyFrameInterval: Int? 200 | 201 | /// Video time scale, value/timescale = seconds 202 | public var timescale: Float64? 203 | 204 | /// Maximum recording duration, when set, session finishes automatically 205 | public var maximumCaptureDuration: CMTime? 206 | 207 | // MARK: - object lifecycle 208 | 209 | public override init() { 210 | if #available(iOS 11.0, *) { 211 | self.codec = AVVideoCodecType.h264 212 | } else { 213 | self.codec = AVVideoCodecType(rawValue: AVVideoCodecH264) 214 | } 215 | super.init() 216 | } 217 | 218 | // MARK: - func 219 | 220 | /// Provides an AVFoundation friendly dictionary for configuring output. 221 | /// 222 | /// - Parameter sampleBuffer: Sample buffer for extracting configuration information 223 | /// - Returns: Video configuration dictionary for AVFoundation 224 | public override func avcaptureSettingsDictionary(sampleBuffer: CMSampleBuffer? = nil, pixelBuffer: CVPixelBuffer? = nil) -> [String : Any]? { 225 | 226 | // if the client specified custom options, use those instead 227 | if let options = self.options { 228 | return options 229 | } 230 | 231 | var config: [String : Any] = [:] 232 | 233 | if let dimensions = self.dimensions { 234 | config[AVVideoWidthKey] = NSNumber(integerLiteral: Int(dimensions.width)) 235 | config[AVVideoHeightKey] = NSNumber(integerLiteral: Int(dimensions.height)) 236 | } else if let sampleBuffer = sampleBuffer, 237 | let formatDescription: CMFormatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) { 238 | 239 | // TODO: this is incorrect and needs to be fixed 240 | let videoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) 241 | switch self.aspectRatio { 242 | case .standard: 243 | config[AVVideoWidthKey] = NSNumber(integerLiteral: Int(videoDimensions.width)) 244 | config[AVVideoHeightKey] = NSNumber(integerLiteral: Int(videoDimensions.width * 3 / 4)) 245 | break 246 | case .widescreen: 247 | config[AVVideoWidthKey] = NSNumber(integerLiteral: Int(videoDimensions.width)) 248 | config[AVVideoHeightKey] = NSNumber(integerLiteral: Int(videoDimensions.width * 9 / 16)) 249 | break 250 | case .square: 251 | let min = Swift.min(videoDimensions.width, videoDimensions.height) 252 | config[AVVideoWidthKey] = NSNumber(integerLiteral: Int(min)) 253 | config[AVVideoHeightKey] = NSNumber(integerLiteral: Int(min)) 254 | break 255 | case .custom(let w, let h): 256 | config[AVVideoWidthKey] = NSNumber(integerLiteral: Int(videoDimensions.width)) 257 | config[AVVideoHeightKey] = NSNumber(integerLiteral: Int(videoDimensions.width * Int32(h) / Int32(w))) 258 | break 259 | case .active: 260 | fallthrough 261 | default: 262 | config[AVVideoWidthKey] = NSNumber(integerLiteral: Int(videoDimensions.width)) 263 | config[AVVideoHeightKey] = NSNumber(integerLiteral: Int(videoDimensions.height)) 264 | break 265 | } 266 | 267 | } else if let pixelBuffer = pixelBuffer { 268 | let width = CVPixelBufferGetWidth(pixelBuffer) 269 | let height = CVPixelBufferGetHeight(pixelBuffer) 270 | config[AVVideoWidthKey] = NSNumber(integerLiteral: Int(width)) 271 | config[AVVideoHeightKey] = NSNumber(integerLiteral: Int(height)) 272 | } 273 | 274 | config = self.update(config: config) 275 | 276 | config[AVVideoCodecKey] = self.codec 277 | config[AVVideoScalingModeKey] = self.scalingMode 278 | 279 | var compressionDict: [String : Any] = [:] 280 | compressionDict[AVVideoAverageBitRateKey] = NSNumber(integerLiteral: self.bitRate) 281 | compressionDict[AVVideoAllowFrameReorderingKey] = NSNumber(booleanLiteral: false) 282 | compressionDict[AVVideoExpectedSourceFrameRateKey] = NSNumber(integerLiteral: 30) 283 | if let profileLevel = self.profileLevel { 284 | compressionDict[AVVideoProfileLevelKey] = profileLevel 285 | } 286 | if let maxKeyFrameInterval = self.maxKeyFrameInterval { 287 | compressionDict[AVVideoMaxKeyFrameIntervalKey] = NSNumber(integerLiteral: maxKeyFrameInterval) 288 | } 289 | 290 | config[AVVideoCompressionPropertiesKey] = (compressionDict as NSDictionary) 291 | return config 292 | } 293 | 294 | /// Update configuration with size values. 295 | /// With MPEG-2 and MPEG-4 (and other DCT based codecs), compression is applied to a grid of 16x16 pixel macroblocks. 296 | /// With MPEG-4 Part 10 (AVC/H.264), multiple of 4 and 8 also works, but 16 is most efficient. 297 | /// So, to prevent appearing on broken(green) pixels, the sizes of captured video must be divided by 4, 8, or 16. 298 | /// 299 | /// - Parameters: 300 | /// - config: Input configuration dictionary 301 | /// - divisibleBy: Divisor 302 | /// - Returns: Configuration with appropriately divided sizes 303 | private func update(config: [String : Any], withSizeValuesDivisibleBy divisibleBy: Int = 16) -> [String : Any] { 304 | var config = config 305 | 306 | if let width = config[AVVideoWidthKey] as? Int { 307 | let newWidth = width - (width % divisibleBy) 308 | config[AVVideoWidthKey] = NSNumber(integerLiteral: newWidth) 309 | } 310 | if let height = config[AVVideoHeightKey] as? Int { 311 | let newHeight = height - (height % divisibleBy) 312 | config[AVVideoHeightKey] = NSNumber(integerLiteral: newHeight) 313 | } 314 | 315 | return config 316 | } 317 | 318 | } 319 | 320 | // MARK: - AudioConfiguration 321 | 322 | /// NextLevelAudioConfiguration, audio capture configuration object 323 | public class NextLevelAudioConfiguration: NextLevelConfiguration { 324 | 325 | // MARK: - types 326 | 327 | public static let AudioBitRateDefault: Int = 96000 328 | public static let AudioSampleRateDefault: Float64 = 44100 329 | public static let AudioChannelsCountDefault: Int = 2 330 | 331 | // MARK: - properties 332 | 333 | /// Audio bit rate, AV dictionary key AVEncoderBitRateKey 334 | public var bitRate: Int = NextLevelAudioConfiguration.AudioBitRateDefault 335 | 336 | /// Sample rate in hertz, AV dictionary key AVSampleRateKey 337 | public var sampleRate: Float64? 338 | 339 | /// Number of channels, AV dictionary key AVNumberOfChannelsKey 340 | public var channelsCount: Int? 341 | 342 | /// Audio data format identifier, AV dictionary key AVFormatIDKey 343 | /// https://developer.apple.com/reference/coreaudio/1613060-core_audio_data_types 344 | public var format: AudioFormatID = kAudioFormatMPEG4AAC 345 | 346 | // MARK: - object lifecycle 347 | 348 | public override init() { 349 | super.init() 350 | } 351 | 352 | // MARK: - funcs 353 | 354 | /// Provides an AVFoundation friendly dictionary for configuring output. 355 | /// 356 | /// - Parameter sampleBuffer: Sample buffer for extracting configuration information 357 | /// - Returns: Audio configuration dictionary for AVFoundation 358 | public override func avcaptureSettingsDictionary(sampleBuffer: CMSampleBuffer? = nil, pixelBuffer: CVPixelBuffer? = nil) -> [String: Any]? { 359 | // if the client specified custom options, use those instead 360 | if let options = self.options { 361 | return options 362 | } 363 | 364 | var config: [String : Any] = [AVEncoderBitRateKey : NSNumber(integerLiteral: self.bitRate)] 365 | 366 | if let sampleBuffer = sampleBuffer, let formatDescription: CMFormatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) { 367 | if let _ = self.sampleRate, let _ = self.channelsCount { 368 | // loading user provided settings after buffer use 369 | } else if let streamBasicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) { 370 | self.sampleRate = streamBasicDescription.pointee.mSampleRate 371 | self.channelsCount = Int(streamBasicDescription.pointee.mChannelsPerFrame) 372 | } 373 | 374 | var layoutSize: Int = 0 375 | if let currentChannelLayout = CMAudioFormatDescriptionGetChannelLayout(formatDescription, sizeOut: &layoutSize) { 376 | let currentChannelLayoutData = layoutSize > 0 ? Data(bytes: currentChannelLayout, count:layoutSize) : Data() 377 | config[AVChannelLayoutKey] = currentChannelLayoutData 378 | } 379 | } 380 | 381 | if let sampleRate = self.sampleRate { 382 | config[AVSampleRateKey] = sampleRate == 0 ? NSNumber(value: NextLevelAudioConfiguration.AudioSampleRateDefault) : NSNumber(value: sampleRate) 383 | } else { 384 | config[AVSampleRateKey] = NSNumber(value: NextLevelAudioConfiguration.AudioSampleRateDefault) 385 | } 386 | 387 | if let channels = self.channelsCount { 388 | config[AVNumberOfChannelsKey] = channels == 0 ? NSNumber(integerLiteral: NextLevelAudioConfiguration.AudioChannelsCountDefault) : NSNumber(integerLiteral: channels) 389 | } else { 390 | config[AVNumberOfChannelsKey] = NSNumber(integerLiteral: NextLevelAudioConfiguration.AudioChannelsCountDefault) 391 | } 392 | 393 | config[AVFormatIDKey] = NSNumber(value: self.format as UInt32) 394 | 395 | return config 396 | } 397 | } 398 | 399 | // MARK: - PhotoConfiguration 400 | 401 | /// NextLevelPhotoConfiguration, photo capture configuration object 402 | public class NextLevelPhotoConfiguration : NextLevelConfiguration { 403 | 404 | /// Codec used to encode photo, AV dictionary key AVVideoCodecKey 405 | public var codec: AVVideoCodecType 406 | 407 | /// When true, NextLevel should generate a thumbnail for the photo 408 | public var generateThumbnail: Bool = false 409 | 410 | /// Enabled high resolution capture 411 | public var isHighResolutionEnabled: Bool = false 412 | 413 | /// Enabled depth data capture with photo 414 | #if USE_TRUE_DEPTH 415 | public var isDepthDataEnabled: Bool = false 416 | #endif 417 | 418 | /// Enables portrait effects matte output for the photo 419 | public var isPortraitEffectsMatteEnabled: Bool = false 420 | 421 | // MARK: - ivars 422 | 423 | // change flashMode with NextLevel.flashMode 424 | internal var flashMode: AVCaptureDevice.FlashMode = .off 425 | 426 | // MARK: - object lifecycle 427 | 428 | override init() { 429 | if #available(iOS 11.0, *) { 430 | self.codec = AVVideoCodecType.hevc 431 | } else { 432 | self.codec = AVVideoCodecType(rawValue: AVVideoCodecJPEG) 433 | } 434 | super.init() 435 | } 436 | 437 | // MARK: - funcs 438 | 439 | /// Provides an AVFoundation friendly dictionary dictionary for configuration output. 440 | /// 441 | /// - Returns: Configuration dictionary for AVFoundation 442 | public func avcaptureDictionary() -> [String: Any]? { 443 | if let options = self.options { 444 | return options 445 | } else { 446 | var config: [String: Any] = [AVVideoCodecKey: self.codec] 447 | if self.generateThumbnail { 448 | let settings = AVCapturePhotoSettings() 449 | // iOS 11 GM fix 450 | // https://forums.developer.apple.com/thread/86810 451 | if settings.__availablePreviewPhotoPixelFormatTypes.count > 0 { 452 | if let formatType = settings.__availablePreviewPhotoPixelFormatTypes.first { 453 | config[kCVPixelBufferPixelFormatTypeKey as String] = formatType 454 | } 455 | } 456 | } 457 | return config 458 | } 459 | } 460 | } 461 | 462 | // MARK: - ARConfiguration 463 | 464 | @available(iOS 11.0, *) 465 | /// NextLevelARConfiguration, augmented reality configuration object 466 | public class NextLevelARConfiguration : NextLevelConfiguration { 467 | 468 | #if USE_ARKIT 469 | /// ARKit configuration 470 | public var config: ARConfiguration? 471 | 472 | /// ARKit session, note: the delegate queue will be overriden 473 | public var session: ARSession? 474 | 475 | /// Session run options 476 | public var runOptions: ARSession.RunOptions? 477 | #endif 478 | 479 | } 480 | 481 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevelGIFCreator.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevelGIFCreator.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import UIKit 27 | import Foundation 28 | import AVFoundation 29 | import ImageIO 30 | import MobileCoreServices 31 | 32 | private let NextLevelGIFCreatorQueueIdentifier = "engineering.NextLevel.GIF" 33 | 34 | public class NextLevelGIFCreator { 35 | 36 | // MARK: - properties 37 | 38 | /// Output directory where the GIF is created, default is tmp 39 | public var outputDirectory: String 40 | 41 | // MARK: - ivars 42 | 43 | fileprivate var _outputFilePath: URL? 44 | fileprivate var _fileExtension: String = "gif" 45 | 46 | fileprivate var _queue: DispatchQueue = DispatchQueue(label: NextLevelGIFCreatorQueueIdentifier, attributes: .concurrent) 47 | 48 | // MARK: - object lifecycle 49 | 50 | public init() { 51 | self.outputDirectory = NSTemporaryDirectory() 52 | } 53 | 54 | // MARK: - internal 55 | 56 | fileprivate func createOutputFilePath() -> URL? { 57 | let filename = "\(Date().iso8601())-NL.\(self._fileExtension)" 58 | 59 | var gifURL = URL(fileURLWithPath: outputDirectory, isDirectory: true) 60 | gifURL.appendPathComponent(filename) 61 | return gifURL 62 | } 63 | 64 | // MARK: - factory 65 | 66 | /// Creates an animated GIF from a sequence of images. 67 | /// 68 | /// - Parameters: 69 | /// - images: Frames for creating the sequence 70 | /// - delay: Time between each frame 71 | /// - loopCount: Number of loops built into the sequence, default is 0 72 | /// - completionHandler: Completion handler called when the operation finishes with success or failure 73 | public func create(gifWithImages images: [UIImage], delay: Float, loopCount: Int = 0, completionHandler: ((_ completed: Bool, _ gifPath: URL?) -> Void)? = nil) { 74 | guard let outputFilePath = self.createOutputFilePath() else { 75 | DispatchQueue.main.async { 76 | completionHandler?(false, nil) 77 | } 78 | return 79 | } 80 | 81 | self._outputFilePath = outputFilePath 82 | self._queue.async { 83 | guard let destination = CGImageDestinationCreateWithURL(outputFilePath as CFURL, kUTTypeGIF, images.count, nil) else { 84 | DispatchQueue.main.async { 85 | completionHandler?(false, nil) 86 | } 87 | return 88 | } 89 | 90 | let gifProperties: CFDictionary = [kCGImagePropertyGIFDictionary as String: [kCGImagePropertyGIFLoopCount as String: loopCount]] as CFDictionary 91 | let frameProperties: CFDictionary = [kCGImagePropertyGIFDictionary as String: [kCGImagePropertyGIFDelayTime as String: delay]] as CFDictionary 92 | 93 | CGImageDestinationSetProperties(destination, gifProperties) 94 | for image in images { 95 | if let cgImage = image.cgImage { 96 | CGImageDestinationAddImage(destination, cgImage, frameProperties) 97 | } 98 | } 99 | 100 | if CGImageDestinationFinalize(destination) { 101 | DispatchQueue.main.async { 102 | completionHandler?(true, self._outputFilePath) 103 | } 104 | } else { 105 | DispatchQueue.main.async { 106 | completionHandler?(false, nil) 107 | } 108 | } 109 | } 110 | } 111 | 112 | } 113 | -------------------------------------------------------------------------------- /ARCamera/Pods/NextLevel/Sources/NextLevelProtocols.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NextLevelProtocols.swift 3 | // NextLevel (http://nextlevel.engineering/) 4 | // 5 | // Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com) 6 | // 7 | // Permission is hereby granted, free of charge, to any person obtaining a copy 8 | // of this software and associated documentation files (the "Software"), to deal 9 | // in the Software without restriction, including without limitation the rights 10 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | // copies of the Software, and to permit persons to whom the Software is 12 | // furnished to do so, subject to the following conditions: 13 | // 14 | // The above copyright notice and this permission notice shall be included in all 15 | // copies or substantial portions of the Software. 16 | // 17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | // SOFTWARE. 24 | // 25 | 26 | import Foundation 27 | import AVFoundation 28 | import CoreVideo 29 | #if USE_ARKIT 30 | import ARKit 31 | #endif 32 | 33 | // MARK: - NextLevelDelegate Dictionary Keys 34 | 35 | /// Delegate callback dictionary key for photo metadata 36 | public let NextLevelPhotoMetadataKey = "NextLevelPhotoMetadataKey" 37 | 38 | /// Delegate callback dictionary key for JPEG data 39 | public let NextLevelPhotoJPEGKey = "NextLevelPhotoJPEGKey" 40 | 41 | /// Delegate callback dictionary key for cropped JPEG data 42 | public let NextLevelPhotoCroppedJPEGKey = "NextLevelPhotoCroppedJPEGKey" 43 | 44 | /// Delegate callback dictionary key for raw image data 45 | public let NextLevelPhotoRawImageKey = "NextLevelPhotoRawImageKey" 46 | 47 | /// Delegate callback dictionary key for a photo thumbnail 48 | public let NextLevelPhotoThumbnailKey = "NextLevelPhotoThumbnailKey" 49 | 50 | // MARK: - NextLevelDelegate 51 | 52 | /// NextLevel delegate, provides updates for authorization, configuration changes, session state, preview state, and mode changes. 53 | public protocol NextLevelDelegate: AnyObject { 54 | 55 | // configuration 56 | func nextLevel(_ nextLevel: NextLevel, didUpdateVideoConfiguration videoConfiguration: NextLevelVideoConfiguration) 57 | func nextLevel(_ nextLevel: NextLevel, didUpdateAudioConfiguration audioConfiguration: NextLevelAudioConfiguration) 58 | 59 | // session 60 | func nextLevelSessionWillStart(_ nextLevel: NextLevel) 61 | func nextLevelSessionDidStart(_ nextLevel: NextLevel) 62 | func nextLevelSessionDidStop(_ nextLevel: NextLevel) 63 | 64 | // session interruption 65 | func nextLevelSessionWasInterrupted(_ nextLevel: NextLevel) 66 | func nextLevelSessionInterruptionEnded(_ nextLevel: NextLevel) 67 | 68 | // mode 69 | func nextLevelCaptureModeWillChange(_ nextLevel: NextLevel) 70 | func nextLevelCaptureModeDidChange(_ nextLevel: NextLevel) 71 | 72 | } 73 | 74 | /// Preview delegate, provides update for 75 | public protocol NextLevelPreviewDelegate: AnyObject { 76 | 77 | // preview 78 | func nextLevelWillStartPreview(_ nextLevel: NextLevel) 79 | func nextLevelDidStopPreview(_ nextLevel: NextLevel) 80 | 81 | } 82 | 83 | /// Device delegate, provides updates on device position, orientation, clean aperture, focus, exposure, and white balances changes. 84 | public protocol NextLevelDeviceDelegate: AnyObject { 85 | 86 | // position, orientation 87 | func nextLevelDevicePositionWillChange(_ nextLevel: NextLevel) 88 | func nextLevelDevicePositionDidChange(_ nextLevel: NextLevel) 89 | func nextLevel(_ nextLevel: NextLevel, didChangeDeviceOrientation deviceOrientation: NextLevelDeviceOrientation) 90 | 91 | // format 92 | func nextLevel(_ nextLevel: NextLevel, didChangeDeviceFormat deviceFormat: AVCaptureDevice.Format) 93 | 94 | // aperture, lens 95 | func nextLevel(_ nextLevel: NextLevel, didChangeCleanAperture cleanAperture: CGRect) 96 | func nextLevel(_ nextLevel: NextLevel, didChangeLensPosition lensPosition: Float) 97 | 98 | // focus, exposure, white balance 99 | func nextLevelWillStartFocus(_ nextLevel: NextLevel) 100 | func nextLevelDidStopFocus(_ nextLevel: NextLevel) 101 | 102 | func nextLevelWillChangeExposure(_ nextLevel: NextLevel) 103 | func nextLevelDidChangeExposure(_ nextLevel: NextLevel) 104 | 105 | func nextLevelWillChangeWhiteBalance(_ nextLevel: NextLevel) 106 | func nextLevelDidChangeWhiteBalance(_ nextLevel: NextLevel) 107 | 108 | } 109 | 110 | // MARK: - NextLevelFlashAndTorchDelegate 111 | 112 | /// Flash and torch delegate, provides updates on active flash and torch related changes. 113 | public protocol NextLevelFlashAndTorchDelegate: AnyObject { 114 | 115 | func nextLevelDidChangeFlashMode(_ nextLevel: NextLevel) 116 | func nextLevelDidChangeTorchMode(_ nextLevel: NextLevel) 117 | 118 | func nextLevelFlashActiveChanged(_ nextLevel: NextLevel) 119 | func nextLevelTorchActiveChanged(_ nextLevel: NextLevel) 120 | 121 | func nextLevelFlashAndTorchAvailabilityChanged(_ nextLevel: NextLevel) 122 | 123 | } 124 | 125 | // MARK: - NextLevelVideoDelegate 126 | 127 | /// Video delegate, provides updates on video related recording and capture functionality. 128 | /// All methods are called on the main queue with the exception of nextLevel:renderToCustomContextWithSampleBuffer:onQueue. 129 | public protocol NextLevelVideoDelegate: AnyObject { 130 | 131 | // video zoom 132 | func nextLevel(_ nextLevel: NextLevel, didUpdateVideoZoomFactor videoZoomFactor: Float) 133 | 134 | // video processing 135 | func nextLevel(_ nextLevel: NextLevel, willProcessRawVideoSampleBuffer sampleBuffer: CMSampleBuffer, onQueue queue: DispatchQueue) 136 | func nextLevel(_ nextLevel: NextLevel, renderToCustomContextWithImageBuffer imageBuffer: CVPixelBuffer, onQueue queue: DispatchQueue) 137 | 138 | // ARKit video processing 139 | @available(iOS 11.0, *) 140 | func nextLevel(_ nextLevel: NextLevel, willProcessFrame frame: AnyObject, timestamp: TimeInterval, onQueue queue: DispatchQueue) 141 | 142 | // video recording session 143 | func nextLevel(_ nextLevel: NextLevel, didSetupVideoInSession session: NextLevelSession) 144 | func nextLevel(_ nextLevel: NextLevel, didSetupAudioInSession session: NextLevelSession) 145 | 146 | // clip start/stop 147 | func nextLevel(_ nextLevel: NextLevel, didStartClipInSession session: NextLevelSession) 148 | func nextLevel(_ nextLevel: NextLevel, didCompleteClip clip: NextLevelClip, inSession session: NextLevelSession) 149 | 150 | // clip file I/O 151 | func nextLevel(_ nextLevel: NextLevel, didAppendVideoSampleBuffer sampleBuffer: CMSampleBuffer, inSession session: NextLevelSession) 152 | func nextLevel(_ nextLevel: NextLevel, didSkipVideoSampleBuffer sampleBuffer: CMSampleBuffer, inSession session: NextLevelSession) 153 | 154 | func nextLevel(_ nextLevel: NextLevel, didAppendVideoPixelBuffer pixelBuffer: CVPixelBuffer, timestamp: TimeInterval, inSession session: NextLevelSession) 155 | func nextLevel(_ nextLevel: NextLevel, didSkipVideoPixelBuffer pixelBuffer: CVPixelBuffer, timestamp: TimeInterval, inSession session: NextLevelSession) 156 | 157 | func nextLevel(_ nextLevel: NextLevel, didAppendAudioSampleBuffer sampleBuffer: CMSampleBuffer, inSession session: NextLevelSession) 158 | func nextLevel(_ nextLevel: NextLevel, didSkipAudioSampleBuffer sampleBuffer: CMSampleBuffer, inSession session: NextLevelSession) 159 | 160 | func nextLevel(_ nextLevel: NextLevel, didCompleteSession session: NextLevelSession) 161 | 162 | // video frame photo 163 | func nextLevel(_ nextLevel: NextLevel, didCompletePhotoCaptureFromVideoFrame photoDict: [String : Any]?) 164 | 165 | } 166 | 167 | // MARK: - NextLevelPhotoDelegate 168 | 169 | /// Photo delegate, provides updates on photo related capture functionality. 170 | public protocol NextLevelPhotoDelegate: AnyObject { 171 | 172 | func nextLevel(_ nextLevel: NextLevel, willCapturePhotoWithConfiguration photoConfiguration: NextLevelPhotoConfiguration) 173 | func nextLevel(_ nextLevel: NextLevel, didCapturePhotoWithConfiguration photoConfiguration: NextLevelPhotoConfiguration) 174 | 175 | func nextLevel(_ nextLevel: NextLevel, didProcessPhotoCaptureWith photoDict: [String: Any]?, photoConfiguration: NextLevelPhotoConfiguration) 176 | func nextLevel(_ nextLevel: NextLevel, didProcessRawPhotoCaptureWith photoDict: [String: Any]?, photoConfiguration: NextLevelPhotoConfiguration) 177 | 178 | func nextLevelDidCompletePhotoCapture(_ nextLevel: NextLevel) 179 | 180 | @available(iOS 11.0, *) 181 | func nextLevel(_ nextLevel: NextLevel, didFinishProcessingPhoto photo: AVCapturePhoto) 182 | } 183 | 184 | // MARK: - NextLevelDepthDataDelegate 185 | 186 | #if USE_TRUE_DEPTH 187 | /// Depth data delegate, provides depth data updates 188 | public protocol NextLevelDepthDataDelegate: AnyObject { 189 | 190 | @available(iOS 11.0, *) 191 | func depthDataOutput(_ nextLevel: NextLevel, didOutput depthData: AVDepthData, timestamp: CMTime) 192 | 193 | @available(iOS 11.0, *) 194 | func depthDataOutput(_ nextLevel: NextLevel, didDrop depthData: AVDepthData, timestamp: CMTime, reason: AVCaptureOutput.DataDroppedReason) 195 | 196 | } 197 | #endif 198 | 199 | // MARK: - NextLevelPortraitEffectsMatteDelegate 200 | 201 | /// Portrait Effects Matte delegate, provides portrait effects matte updates 202 | public protocol NextLevelPortraitEffectsMatteDelegate: AnyObject { 203 | 204 | @available(iOS 12.0, *) 205 | func portraitEffectsMatteOutput(_ nextLevel: NextLevel, didOutput portraitEffectsMatte: AVPortraitEffectsMatte) 206 | 207 | } 208 | 209 | // MARK: - NextLevelMetadataOutputObjectsDelegate 210 | 211 | /// Metadata Output delegate, provides objects like faces and barcodes 212 | public protocol NextLevelMetadataOutputObjectsDelegate: AnyObject { 213 | 214 | func metadataOutputObjects(_ nextLevel: NextLevel, didOutput metadataObjects: [AVMetadataObject]) 215 | } 216 | -------------------------------------------------------------------------------- /ARCamera/Pods/RPCircularProgress/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Rob Phillips. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /ARCamera/Pods/RPCircularProgress/README.md: -------------------------------------------------------------------------------- 1 | ## RPCircularProgress 2 | 3 | [![Coverage Status](https://coveralls.io/repos/github/iwasrobbed/RPCircularProgress/badge.svg?branch=master)](https://coveralls.io/github/iwasrobbed/RPCircularProgress?branch=master) 4 | [![Build Status](https://travis-ci.org/iwasrobbed/RPCircularProgress.svg?branch=master)](https://travis-ci.org/iwasrobbed/RPCircularProgress) 5 | [![MIT licensed](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/iwasrobbed/RPCircularProgress/blob/master/LICENSE) 6 | [![CocoaPods](https://img.shields.io/cocoapods/v/RPCircularProgress.svg?maxAge=2592000)]() 7 | [![Swift](https://img.shields.io/badge/language-Swift-blue.svg)](https://swift.org) 8 | 9 | ⚠️ To use with Swift 2.3 please ensure you are using == 0.2.3 ⚠️ 10 | 11 | ⚠️ To use with Swift 3.x please ensure you are using >= 0.3.0 ⚠️ 12 | 13 | ⚠️ To use with Swift 4.x please ensure you are using >= 0.4.0 ⚠️ 14 | 15 | ⚠️ To use with Swift 4.2 please ensure you are using >= 0.4.1 ⚠️ 16 | 17 | ⚠️ To use with Swift 5.0 please ensure you are using >= 0.5.0 ⚠️ 18 | 19 | UIView subclass written in Swift to show circular progress. 20 | 21 | ![Example Usage](ohhai.gif) 22 | 23 | Is your app using it? [Let me know!](mailto:rob@desideratalabs.co) 24 | 25 | Please see the included example app for sample usage. 26 | 27 | ### Styles 28 | 29 | * Indeterminate (spins infinitely) 30 | * Normal (set progress between 0.0 and 1.0) 31 | * Mixed (can make it indeterminate, but also animate progress to 1.0) 32 | 33 | ### Public API 34 | 35 | ```swift 36 | /** 37 | Enables or disables the indeterminate (spinning) animation 38 | 39 | - parameter enabled: Whether or not to enable the animation (defaults to `true`) 40 | - parameter completion: An optional closure to execute after the animation completes 41 | */ 42 | open func enableIndeterminate(_ enabled: Bool = true, completion: CompletionBlock? = nil) {} 43 | ``` 44 | 45 | ```swift 46 | /** 47 | Updates the progress bar to the given value with the optional properties 48 | 49 | - parameter progress: The progress to update to, pinned between `0` and `1` 50 | - parameter animated: Whether or not the update should be animated (defaults to `true`) 51 | - parameter initialDelay: Sets an initial delay before the animation begins 52 | - parameter duration: Sets the overal duration that the animation should complete within 53 | - parameter completion: An optional closure to execute after the animation completes 54 | */ 55 | open func updateProgress(_ progress: CGFloat, animated: Bool = true, initialDelay: CFTimeInterval = 0, duration: CFTimeInterval? = nil, completion: CompletionBlock? = nil) {} 56 | ``` 57 | 58 | ### Properties 59 | 60 | Note: Most properties below are `@IBInspectable`, but I don't use Interface Builder personally so let me know if you see any issues. 61 | 62 | `var trackTintColor: UIColor` 63 | * The color of the empty progress track (gets drawn over) 64 | 65 | `var progressTintColor: UIColor` 66 | * The color of the progress bar 67 | 68 | `var innerTintColor: UIColor?` 69 | * The color the notched out circle within the progress area (if there is one) 70 | 71 | `var roundedCorners: Bool` 72 | * Sets whether or not the corners of the progress bar should be rounded 73 | 74 | `var thicknessRatio: CGFloat` 75 | * Sets how thick the progress bar should be (pinned between `0.01` and `1`) 76 | 77 | `var clockwiseProgress: Bool` 78 | * Sets whether or not the animation should be clockwise 79 | 80 | `var timingFunction: CAMediaTimingFunction` 81 | * A timing function defining the pacing of the animation. Defaults to ease in, ease out. 82 | 83 | `var progress: CGFloat` 84 | * Getter for the current progress (not observed from any active animations) 85 | 86 | `var indeterminateProgress: CGFloat` 87 | * Sets how much of the progress bar should be filled during an indeterminate animation, pinned between `0.05` and `0.9` 88 | * **Note:** This can be overriden / animated from by using updateProgress(...) 89 | 90 | `var indeterminateDuration: CFTimeInterval` 91 | * Controls the speed at which the indeterminate progress bar animates 92 | 93 | ### Supports 94 | Swift, ARC & iOS 8+, Autolayout or springs and struts 95 | 96 | ### A little help from my friends 97 | Please feel free to fork and create a pull request for bug fixes or improvements, being sure to maintain the general coding style, adding tests, and adding comments as necessary. 98 | 99 | ### Credit 100 | This library is effectively a Swift port of [DACircularProgress](https://github.com/danielamitay/DACircularProgress) with some minor changes to the API, so it should be fairly easy to act as a replacement. I really loved that library but unfortunately it wasn't being maintained. 101 | -------------------------------------------------------------------------------- /ARCamera/Pods/RPCircularProgress/Source/RPCircularProgress.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RPCircularProgress.swift 3 | // RPCircularProgress 4 | // 5 | // Created by Rob Phillips on 4/5/16. 6 | // Copyright © 2016 Glazed Donut, LLC. All rights reserved. 7 | // 8 | // See LICENSE for full license agreement. 9 | // 10 | 11 | import UIKit 12 | 13 | open class RPCircularProgress: UIView { 14 | 15 | // MARK: - Completion 16 | 17 | public typealias CompletionBlock = () -> Void 18 | 19 | // MARK: - Public API 20 | 21 | /** 22 | The color of the empty progress track (gets drawn over) 23 | */ 24 | @IBInspectable open var trackTintColor: UIColor { 25 | get { 26 | return progressLayer.trackTintColor 27 | } 28 | set { 29 | progressLayer.trackTintColor = newValue 30 | progressLayer.setNeedsDisplay() 31 | } 32 | } 33 | 34 | /** 35 | The color of the progress bar 36 | */ 37 | @IBInspectable open var progressTintColor: UIColor { 38 | get { 39 | return progressLayer.progressTintColor 40 | } 41 | set { 42 | progressLayer.progressTintColor = newValue 43 | progressLayer.setNeedsDisplay() 44 | } 45 | } 46 | 47 | /** 48 | The color the notched out circle within the progress area (if there is one) 49 | */ 50 | @IBInspectable open var innerTintColor: UIColor? { 51 | get { 52 | return progressLayer.innerTintColor 53 | } 54 | set { 55 | progressLayer.innerTintColor = newValue 56 | progressLayer.setNeedsDisplay() 57 | } 58 | } 59 | 60 | /** 61 | Sets whether or not the corners of the progress bar should be rounded 62 | */ 63 | @IBInspectable open var roundedCorners: Bool { 64 | get { 65 | return progressLayer.roundedCorners 66 | } 67 | set { 68 | progressLayer.roundedCorners = newValue 69 | progressLayer.setNeedsDisplay() 70 | } 71 | } 72 | 73 | /** 74 | Sets how thick the progress bar should be (pinned between `0.01` and `1`) 75 | */ 76 | @IBInspectable open var thicknessRatio: CGFloat { 77 | get { 78 | return progressLayer.thicknessRatio 79 | } 80 | set { 81 | progressLayer.thicknessRatio = pin(newValue, minValue: 0.01, maxValue: 1) 82 | progressLayer.setNeedsDisplay() 83 | } 84 | } 85 | 86 | /** 87 | Sets whether or not the animation should be clockwise 88 | */ 89 | @IBInspectable open var clockwiseProgress: Bool { 90 | get { 91 | return progressLayer.clockwiseProgress 92 | } 93 | set { 94 | progressLayer.clockwiseProgress = newValue 95 | progressLayer.setNeedsDisplay() 96 | } 97 | } 98 | 99 | /** 100 | A timing function defining the pacing of the animation. Defaults to ease in, ease out. 101 | */ 102 | open var timingFunction: CAMediaTimingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.easeInEaseOut) 103 | 104 | /** 105 | Getter for the current progress (not observed from any active animations) 106 | */ 107 | @IBInspectable open var progress: CGFloat { 108 | get { 109 | return progressLayer.progress 110 | } 111 | } 112 | 113 | /** 114 | Sets how much of the progress bar should be filled during an indeterminate animation, pinned between `0.05` and `0.9` 115 | 116 | **Note:** This can be overriden / animated from by using updateProgress(...) 117 | */ 118 | @IBInspectable open var indeterminateProgress: CGFloat { 119 | get { 120 | return progressLayer.indeterminateProgress 121 | } 122 | set { 123 | progressLayer.indeterminateProgress = pin(newValue, minValue: 0.05, maxValue: 0.9) 124 | } 125 | } 126 | 127 | /** 128 | Controls the speed at which the indeterminate progress bar animates 129 | */ 130 | @IBInspectable open var indeterminateDuration: CFTimeInterval = Defaults.indeterminateDuration 131 | 132 | // MARK: - Custom Base Layer 133 | 134 | fileprivate var progressLayer: ProgressLayer! { 135 | get { 136 | return (layer as! ProgressLayer) 137 | } 138 | } 139 | 140 | open override class var layerClass : AnyClass { 141 | return ProgressLayer.self 142 | } 143 | 144 | // Lifecycle 145 | 146 | /** 147 | Default initializer for the class 148 | 149 | - returns: A configured instance of self 150 | */ 151 | required public init() { 152 | super.init(frame: CGRect(x: 0, y: 0, width: 40, height: 40)) 153 | 154 | setupDefaults() 155 | } 156 | 157 | required public init?(coder aDecoder: NSCoder) { 158 | super.init(coder: aDecoder) 159 | 160 | setupDefaults() 161 | } 162 | 163 | open override func didMoveToWindow() { 164 | super.didMoveToWindow() 165 | 166 | if let window = window { 167 | progressLayer.contentsScale = window.screen.scale 168 | progressLayer.setNeedsDisplay() 169 | } 170 | } 171 | 172 | // MARK: - Indeterminate 173 | 174 | /** 175 | Enables or disables the indeterminate (spinning) animation 176 | 177 | - parameter enabled: Whether or not to enable the animation (defaults to `true`) 178 | - parameter completion: An optional closure to execute after the animation completes 179 | */ 180 | open func enableIndeterminate(_ enabled: Bool = true, completion: CompletionBlock? = nil) { 181 | if let animation = progressLayer.animation(forKey: AnimationKeys.indeterminate) { 182 | // Check if there are any closures to execute on the existing animation 183 | if let block = animation.value(forKey: AnimationKeys.completionBlock) as? CompletionBlockObject { 184 | block.action() 185 | } 186 | progressLayer.removeAnimation(forKey: AnimationKeys.indeterminate) 187 | 188 | // And notify of disabling completion 189 | completion?() 190 | } 191 | 192 | guard enabled else { return } 193 | 194 | addIndeterminateAnimation(completion) 195 | } 196 | 197 | // MARK: - Progress 198 | 199 | /** 200 | Updates the progress bar to the given value with the optional properties 201 | 202 | - parameter progress: The progress to update to, pinned between `0` and `1` 203 | - parameter animated: Whether or not the update should be animated (defaults to `true`) 204 | - parameter initialDelay: Sets an initial delay before the animation begins 205 | - parameter duration: Sets the overal duration that the animation should complete within 206 | - parameter completion: An optional closure to execute after the animation completes 207 | */ 208 | open func updateProgress(_ progress: CGFloat, animated: Bool = true, initialDelay: CFTimeInterval = 0, duration: CFTimeInterval? = nil, completion: CompletionBlock? = nil) { 209 | let pinnedProgress = pin(progress) 210 | if animated { 211 | 212 | // Get duration 213 | let animationDuration: CFTimeInterval 214 | if let duration = duration, duration != 0 { 215 | animationDuration = duration 216 | } else { 217 | // Same duration as UIProgressView animation 218 | animationDuration = CFTimeInterval(fabsf(Float(self.progress) - Float(pinnedProgress))) 219 | } 220 | 221 | // Get current progress (to avoid jumpy behavior) 222 | // Basic animations have their value reset to the original once the animation is finished 223 | // since only the presentation layer is animating 224 | var currentProgress: CGFloat = 0 225 | if let presentationLayer = progressLayer.presentation() { 226 | currentProgress = presentationLayer.progress 227 | } 228 | progressLayer.progress = currentProgress 229 | 230 | progressLayer.removeAnimation(forKey: AnimationKeys.progress) 231 | animate(progress, currentProgress: currentProgress, initialDelay: initialDelay, duration: animationDuration, completion: completion) 232 | } else { 233 | progressLayer.removeAnimation(forKey: AnimationKeys.progress) 234 | 235 | progressLayer.progress = pinnedProgress 236 | progressLayer.setNeedsDisplay() 237 | 238 | completion?() 239 | } 240 | } 241 | } 242 | 243 | // MARK: - Private API 244 | 245 | private extension RPCircularProgress { 246 | 247 | // MARK: - Defaults 248 | 249 | func setupDefaults() { 250 | progressLayer.trackTintColor = Defaults.trackTintColor 251 | progressLayer.progressTintColor = Defaults.progressTintColor 252 | progressLayer.innerTintColor = nil 253 | backgroundColor = Defaults.backgroundColor 254 | progressLayer.thicknessRatio = Defaults.thicknessRatio 255 | progressLayer.roundedCorners = Defaults.roundedCorners 256 | progressLayer.clockwiseProgress = Defaults.clockwiseProgress 257 | indeterminateDuration = Defaults.indeterminateDuration 258 | progressLayer.indeterminateProgress = Defaults.indeterminateProgress 259 | } 260 | 261 | // MARK: - Progress 262 | 263 | // Pin certain values between 0.0 and 1.0 264 | func pin(_ value: CGFloat, minValue: CGFloat = 0, maxValue: CGFloat = 1) -> CGFloat { 265 | return min(max(value, minValue), maxValue) 266 | } 267 | 268 | func animate(_ pinnedProgress: CGFloat, currentProgress: CGFloat, initialDelay: CFTimeInterval, duration: CFTimeInterval, completion: CompletionBlock?) { 269 | let animation = CABasicAnimation(keyPath: AnimationKeys.progress) 270 | animation.duration = duration 271 | animation.timingFunction = timingFunction 272 | animation.fromValue = currentProgress 273 | animation.fillMode = CAMediaTimingFillMode.forwards 274 | animation.isRemovedOnCompletion = false 275 | animation.toValue = pinnedProgress 276 | animation.beginTime = CACurrentMediaTime() + initialDelay 277 | animation.delegate = self 278 | if let completion = completion { 279 | let completionObject = CompletionBlockObject(action: completion) 280 | animation.setValue(completionObject, forKey: AnimationKeys.completionBlock) 281 | } 282 | progressLayer.add(animation, forKey: AnimationKeys.progress) 283 | } 284 | 285 | // MARK: - Indeterminate 286 | 287 | func addIndeterminateAnimation(_ completion: CompletionBlock?) { 288 | guard progressLayer.animation(forKey: AnimationKeys.indeterminate) == nil else { return } 289 | 290 | let animation = CABasicAnimation(keyPath: AnimationKeys.transformRotation) 291 | animation.byValue = clockwiseProgress ? 2 * Double.pi : -2 * Double.pi 292 | animation.duration = indeterminateDuration 293 | animation.repeatCount = Float.infinity 294 | animation.isRemovedOnCompletion = false 295 | progressLayer.progress = indeterminateProgress 296 | if let completion = completion { 297 | let completionObject = CompletionBlockObject(action: completion) 298 | animation.setValue(completionObject, forKey: AnimationKeys.completionBlock) 299 | } 300 | progressLayer.add(animation, forKey: AnimationKeys.indeterminate) 301 | } 302 | 303 | // Completion 304 | 305 | class CompletionBlockObject: NSObject { 306 | var action: CompletionBlock 307 | 308 | required init(action: @escaping CompletionBlock) { 309 | self.action = action 310 | } 311 | } 312 | 313 | // MARK: - Private Classes / Structs 314 | 315 | class ProgressLayer: CALayer { 316 | @NSManaged var trackTintColor: UIColor 317 | @NSManaged var progressTintColor: UIColor 318 | @NSManaged var innerTintColor: UIColor? 319 | 320 | @NSManaged var roundedCorners: Bool 321 | @NSManaged var clockwiseProgress: Bool 322 | @NSManaged var thicknessRatio: CGFloat 323 | 324 | @NSManaged var indeterminateProgress: CGFloat 325 | // This needs to have a setter/getter for it to work with CoreAnimation 326 | @NSManaged var progress: CGFloat 327 | 328 | override class func needsDisplay(forKey key: String) -> Bool { 329 | return key == AnimationKeys.progress ? true : super.needsDisplay(forKey: key) 330 | } 331 | 332 | override func draw(in ctx: CGContext) { 333 | let rect = bounds 334 | let centerPoint = CGPoint(x: rect.size.width / 2, y: rect.size.height / 2) 335 | let radius = min(rect.size.height, rect.size.width) / 2 336 | 337 | let progress: CGFloat = min(self.progress, CGFloat(1 - Float.ulpOfOne)) 338 | var radians: CGFloat = 0 339 | if clockwiseProgress { 340 | radians = CGFloat((Double(progress) * 2 * Double.pi) - (Double.pi / 2)) 341 | } else { 342 | radians = CGFloat(3 * (Double.pi / 2) - (Double(progress) * 2 * Double.pi)) 343 | } 344 | 345 | func fillTrack() { 346 | ctx.setFillColor(trackTintColor.cgColor) 347 | let trackPath = CGMutablePath() 348 | trackPath.move(to: centerPoint) 349 | trackPath.addArc(center: centerPoint, radius: radius, startAngle: CGFloat(2 * Double.pi), endAngle: 0, clockwise: true) 350 | trackPath.closeSubpath() 351 | ctx.addPath(trackPath) 352 | ctx.fillPath() 353 | } 354 | 355 | func fillProgressIfNecessary() { 356 | if progress == 0.0 { 357 | return 358 | } 359 | 360 | func fillProgress() { 361 | ctx.setFillColor(progressTintColor.cgColor) 362 | let progressPath = CGMutablePath() 363 | progressPath.move(to: centerPoint) 364 | progressPath.addArc(center: centerPoint, radius: radius, startAngle: CGFloat(3 * (Double.pi / 2)), endAngle: radians, clockwise: !clockwiseProgress) 365 | progressPath.closeSubpath() 366 | ctx.addPath(progressPath) 367 | ctx.fillPath() 368 | } 369 | 370 | func roundCornersIfNecessary() { 371 | if !roundedCorners { 372 | return 373 | } 374 | 375 | let pathWidth = radius * thicknessRatio 376 | let xOffset = radius * (1 + ((1 - (thicknessRatio / 2)) * CGFloat(cosf(Float(radians))))) 377 | let yOffset = radius * (1 + ((1 - (thicknessRatio / 2)) * CGFloat(sinf(Float(radians))))) 378 | let endpoint = CGPoint(x: xOffset, y: yOffset) 379 | 380 | let startEllipseRect = CGRect(x: centerPoint.x - pathWidth / 2, y: 0, width: pathWidth, height: pathWidth) 381 | ctx.addEllipse(in: startEllipseRect) 382 | ctx.fillPath() 383 | 384 | let endEllipseRect = CGRect(x: endpoint.x - pathWidth / 2, y: endpoint.y - pathWidth / 2, width: pathWidth, height: pathWidth) 385 | ctx.addEllipse(in: endEllipseRect) 386 | ctx.fillPath() 387 | } 388 | 389 | fillProgress() 390 | roundCornersIfNecessary() 391 | } 392 | 393 | func notchCenterCircle() { 394 | ctx.setBlendMode(.clear) 395 | let innerRadius = radius * (1 - thicknessRatio) 396 | let clearRect = CGRect(x: centerPoint.x - innerRadius, y: centerPoint.y - innerRadius, width: innerRadius * 2, height: innerRadius * 2) 397 | ctx.addEllipse(in: clearRect) 398 | ctx.fillPath() 399 | 400 | func fillInnerTintIfNecessary() { 401 | if let innerTintColor = innerTintColor { 402 | ctx.setBlendMode(.normal) 403 | ctx.setFillColor(innerTintColor.cgColor) 404 | ctx.addEllipse(in: clearRect) 405 | ctx.fillPath() 406 | } 407 | } 408 | 409 | fillInnerTintIfNecessary() 410 | } 411 | 412 | fillTrack() 413 | fillProgressIfNecessary() 414 | notchCenterCircle() 415 | } 416 | } 417 | 418 | struct Defaults { 419 | static let trackTintColor = UIColor(white: 1.0, alpha: 0.3) 420 | static let progressTintColor = UIColor.white 421 | static let backgroundColor = UIColor.clear 422 | 423 | static let progress: CGFloat = 0 424 | static let thicknessRatio: CGFloat = 0.3 425 | static let roundedCorners = true 426 | static let clockwiseProgress = true 427 | static let indeterminateDuration: CFTimeInterval = 1.0 428 | static let indeterminateProgress: CGFloat = 0.3 429 | } 430 | 431 | struct AnimationKeys { 432 | static let indeterminate = "indeterminateAnimation" 433 | static let progress = "progress" 434 | static let transformRotation = "transform.rotation" 435 | static let completionBlock = "completionBlock" 436 | static let toValue = "toValue" 437 | } 438 | 439 | } 440 | 441 | // MARK: - Animation Delegate 442 | 443 | extension RPCircularProgress: CAAnimationDelegate { 444 | 445 | public func animationDidStop(_ anim: CAAnimation, finished flag: Bool) { 446 | let completedValue = anim.value(forKey: AnimationKeys.toValue) 447 | if let completedValue = completedValue as? CGFloat { 448 | progressLayer.progress = completedValue 449 | } 450 | 451 | if let block = anim.value(forKey: AnimationKeys.completionBlock) as? CompletionBlockObject { 452 | block.action() 453 | } 454 | } 455 | 456 | } 457 | 458 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/NextLevel/NextLevel-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | ${EXECUTABLE_NAME} 9 | CFBundleIdentifier 10 | ${PRODUCT_BUNDLE_IDENTIFIER} 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | ${PRODUCT_NAME} 15 | CFBundlePackageType 16 | FMWK 17 | CFBundleShortVersionString 18 | 0.16.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | ${CURRENT_PROJECT_VERSION} 23 | NSPrincipalClass 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/NextLevel/NextLevel-dummy.m: -------------------------------------------------------------------------------- 1 | #import 2 | @interface PodsDummy_NextLevel : NSObject 3 | @end 4 | @implementation PodsDummy_NextLevel 5 | @end 6 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/NextLevel/NextLevel-prefix.pch: -------------------------------------------------------------------------------- 1 | #ifdef __OBJC__ 2 | #import 3 | #else 4 | #ifndef FOUNDATION_EXPORT 5 | #if defined(__cplusplus) 6 | #define FOUNDATION_EXPORT extern "C" 7 | #else 8 | #define FOUNDATION_EXPORT extern 9 | #endif 10 | #endif 11 | #endif 12 | 13 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/NextLevel/NextLevel-umbrella.h: -------------------------------------------------------------------------------- 1 | #ifdef __OBJC__ 2 | #import 3 | #else 4 | #ifndef FOUNDATION_EXPORT 5 | #if defined(__cplusplus) 6 | #define FOUNDATION_EXPORT extern "C" 7 | #else 8 | #define FOUNDATION_EXPORT extern 9 | #endif 10 | #endif 11 | #endif 12 | 13 | 14 | FOUNDATION_EXPORT double NextLevelVersionNumber; 15 | FOUNDATION_EXPORT const unsigned char NextLevelVersionString[]; 16 | 17 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/NextLevel/NextLevel.modulemap: -------------------------------------------------------------------------------- 1 | framework module NextLevel { 2 | umbrella header "NextLevel-umbrella.h" 3 | 4 | export * 5 | module * { export * } 6 | } 7 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/NextLevel/NextLevel.xcconfig: -------------------------------------------------------------------------------- 1 | CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/NextLevel 2 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 3 | OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS 4 | PODS_BUILD_DIR = ${BUILD_DIR} 5 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) 6 | PODS_ROOT = ${SRCROOT} 7 | PODS_TARGET_SRCROOT = ${PODS_ROOT}/NextLevel 8 | PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier} 9 | SKIP_INSTALL = YES 10 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | ${EXECUTABLE_NAME} 9 | CFBundleIdentifier 10 | ${PRODUCT_BUNDLE_IDENTIFIER} 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | ${PRODUCT_NAME} 15 | CFBundlePackageType 16 | FMWK 17 | CFBundleShortVersionString 18 | 1.0.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | ${CURRENT_PROJECT_VERSION} 23 | NSPrincipalClass 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera-acknowledgements.markdown: -------------------------------------------------------------------------------- 1 | # Acknowledgements 2 | This application makes use of the following third party libraries: 3 | 4 | ## NextLevel 5 | 6 | The MIT License (MIT) 7 | 8 | Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com), NextLevel (http://nextlevel.engineering/) 9 | 10 | Permission is hereby granted, free of charge, to any person obtaining a copy 11 | of this software and associated documentation files (the "Software"), to deal 12 | in the Software without restriction, including without limitation the rights 13 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 14 | copies of the Software, and to permit persons to whom the Software is 15 | furnished to do so, subject to the following conditions: 16 | 17 | The above copyright notice and this permission notice shall be included in all 18 | copies or substantial portions of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 21 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 22 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 23 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 24 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 25 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 26 | SOFTWARE. 27 | 28 | 29 | ## RPCircularProgress 30 | 31 | The MIT License (MIT) 32 | 33 | Copyright (c) 2016 Rob Phillips. 34 | 35 | Permission is hereby granted, free of charge, to any person obtaining a copy 36 | of this software and associated documentation files (the "Software"), to deal 37 | in the Software without restriction, including without limitation the rights 38 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 39 | copies of the Software, and to permit persons to whom the Software is 40 | furnished to do so, subject to the following conditions: 41 | 42 | The above copyright notice and this permission notice shall be included in 43 | all copies or substantial portions of the Software. 44 | 45 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 46 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 47 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 48 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 49 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 50 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 51 | THE SOFTWARE. 52 | Generated by CocoaPods - https://cocoapods.org 53 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera-acknowledgements.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | PreferenceSpecifiers 6 | 7 | 8 | FooterText 9 | This application makes use of the following third party libraries: 10 | Title 11 | Acknowledgements 12 | Type 13 | PSGroupSpecifier 14 | 15 | 16 | FooterText 17 | The MIT License (MIT) 18 | 19 | Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com), NextLevel (http://nextlevel.engineering/) 20 | 21 | Permission is hereby granted, free of charge, to any person obtaining a copy 22 | of this software and associated documentation files (the "Software"), to deal 23 | in the Software without restriction, including without limitation the rights 24 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 25 | copies of the Software, and to permit persons to whom the Software is 26 | furnished to do so, subject to the following conditions: 27 | 28 | The above copyright notice and this permission notice shall be included in all 29 | copies or substantial portions of the Software. 30 | 31 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 32 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 33 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 34 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 35 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 36 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 37 | SOFTWARE. 38 | 39 | License 40 | MIT 41 | Title 42 | NextLevel 43 | Type 44 | PSGroupSpecifier 45 | 46 | 47 | FooterText 48 | The MIT License (MIT) 49 | 50 | Copyright (c) 2016 Rob Phillips. 51 | 52 | Permission is hereby granted, free of charge, to any person obtaining a copy 53 | of this software and associated documentation files (the "Software"), to deal 54 | in the Software without restriction, including without limitation the rights 55 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 56 | copies of the Software, and to permit persons to whom the Software is 57 | furnished to do so, subject to the following conditions: 58 | 59 | The above copyright notice and this permission notice shall be included in 60 | all copies or substantial portions of the Software. 61 | 62 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 63 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 64 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 65 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 66 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 67 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 68 | THE SOFTWARE. 69 | License 70 | MIT 71 | Title 72 | RPCircularProgress 73 | Type 74 | PSGroupSpecifier 75 | 76 | 77 | FooterText 78 | Generated by CocoaPods - https://cocoapods.org 79 | Title 80 | 81 | Type 82 | PSGroupSpecifier 83 | 84 | 85 | StringsTable 86 | Acknowledgements 87 | Title 88 | Acknowledgements 89 | 90 | 91 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera-dummy.m: -------------------------------------------------------------------------------- 1 | #import 2 | @interface PodsDummy_Pods_ARCamera : NSObject 3 | @end 4 | @implementation PodsDummy_Pods_ARCamera 5 | @end 6 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera-frameworks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | set -u 4 | set -o pipefail 5 | 6 | function on_error { 7 | echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" 8 | } 9 | trap 'on_error $LINENO' ERR 10 | 11 | if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then 12 | # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy 13 | # frameworks to, so exit 0 (signalling the script phase was successful). 14 | exit 0 15 | fi 16 | 17 | echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" 18 | mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" 19 | 20 | COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" 21 | SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" 22 | 23 | # Used as a return value for each invocation of `strip_invalid_archs` function. 24 | STRIP_BINARY_RETVAL=0 25 | 26 | # This protects against multiple targets copying the same framework dependency at the same time. The solution 27 | # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html 28 | RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") 29 | 30 | # Copies and strips a vendored framework 31 | install_framework() 32 | { 33 | if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then 34 | local source="${BUILT_PRODUCTS_DIR}/$1" 35 | elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then 36 | local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" 37 | elif [ -r "$1" ]; then 38 | local source="$1" 39 | fi 40 | 41 | local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" 42 | 43 | if [ -L "${source}" ]; then 44 | echo "Symlinked..." 45 | source="$(readlink "${source}")" 46 | fi 47 | 48 | # Use filter instead of exclude so missing patterns don't throw errors. 49 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" 50 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" 51 | 52 | local basename 53 | basename="$(basename -s .framework "$1")" 54 | binary="${destination}/${basename}.framework/${basename}" 55 | 56 | if ! [ -r "$binary" ]; then 57 | binary="${destination}/${basename}" 58 | elif [ -L "${binary}" ]; then 59 | echo "Destination binary is symlinked..." 60 | dirname="$(dirname "${binary}")" 61 | binary="${dirname}/$(readlink "${binary}")" 62 | fi 63 | 64 | # Strip invalid architectures so "fat" simulator / device frameworks work on device 65 | if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then 66 | strip_invalid_archs "$binary" 67 | fi 68 | 69 | # Resign the code if required by the build settings to avoid unstable apps 70 | code_sign_if_enabled "${destination}/$(basename "$1")" 71 | 72 | # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. 73 | if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then 74 | local swift_runtime_libs 75 | swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) 76 | for lib in $swift_runtime_libs; do 77 | echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" 78 | rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" 79 | code_sign_if_enabled "${destination}/${lib}" 80 | done 81 | fi 82 | } 83 | 84 | # Copies and strips a vendored dSYM 85 | install_dsym() { 86 | local source="$1" 87 | if [ -r "$source" ]; then 88 | # Copy the dSYM into a the targets temp dir. 89 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" 90 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" 91 | 92 | local basename 93 | basename="$(basename -s .framework.dSYM "$source")" 94 | binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" 95 | 96 | # Strip invalid architectures so "fat" simulator / device frameworks work on device 97 | if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then 98 | strip_invalid_archs "$binary" 99 | fi 100 | 101 | if [[ $STRIP_BINARY_RETVAL == 1 ]]; then 102 | # Move the stripped file into its final destination. 103 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" 104 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" 105 | else 106 | # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. 107 | touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" 108 | fi 109 | fi 110 | } 111 | 112 | # Copies the bcsymbolmap files of a vendored framework 113 | install_bcsymbolmap() { 114 | local bcsymbolmap_path="$1" 115 | local destination="${BUILT_PRODUCTS_DIR}" 116 | echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"" 117 | rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}" 118 | } 119 | 120 | # Signs a framework with the provided identity 121 | code_sign_if_enabled() { 122 | if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then 123 | # Use the current code_sign_identity 124 | echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" 125 | local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" 126 | 127 | if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then 128 | code_sign_cmd="$code_sign_cmd &" 129 | fi 130 | echo "$code_sign_cmd" 131 | eval "$code_sign_cmd" 132 | fi 133 | } 134 | 135 | # Strip invalid architectures 136 | strip_invalid_archs() { 137 | binary="$1" 138 | # Get architectures for current target binary 139 | binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" 140 | # Intersect them with the architectures we are building for 141 | intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" 142 | # If there are no archs supported by this binary then warn the user 143 | if [[ -z "$intersected_archs" ]]; then 144 | echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." 145 | STRIP_BINARY_RETVAL=0 146 | return 147 | fi 148 | stripped="" 149 | for arch in $binary_archs; do 150 | if ! [[ "${ARCHS}" == *"$arch"* ]]; then 151 | # Strip non-valid architectures in-place 152 | lipo -remove "$arch" -output "$binary" "$binary" 153 | stripped="$stripped $arch" 154 | fi 155 | done 156 | if [[ "$stripped" ]]; then 157 | echo "Stripped $binary of architectures:$stripped" 158 | fi 159 | STRIP_BINARY_RETVAL=1 160 | } 161 | 162 | 163 | if [[ "$CONFIGURATION" == "Debug" ]]; then 164 | install_framework "${BUILT_PRODUCTS_DIR}/NextLevel/NextLevel.framework" 165 | install_framework "${BUILT_PRODUCTS_DIR}/RPCircularProgress/RPCircularProgress.framework" 166 | fi 167 | if [[ "$CONFIGURATION" == "Release" ]]; then 168 | install_framework "${BUILT_PRODUCTS_DIR}/NextLevel/NextLevel.framework" 169 | install_framework "${BUILT_PRODUCTS_DIR}/RPCircularProgress/RPCircularProgress.framework" 170 | fi 171 | if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then 172 | wait 173 | fi 174 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera-umbrella.h: -------------------------------------------------------------------------------- 1 | #ifdef __OBJC__ 2 | #import 3 | #else 4 | #ifndef FOUNDATION_EXPORT 5 | #if defined(__cplusplus) 6 | #define FOUNDATION_EXPORT extern "C" 7 | #else 8 | #define FOUNDATION_EXPORT extern 9 | #endif 10 | #endif 11 | #endif 12 | 13 | 14 | FOUNDATION_EXPORT double Pods_ARCameraVersionNumber; 15 | FOUNDATION_EXPORT const unsigned char Pods_ARCameraVersionString[]; 16 | 17 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera.debug.xcconfig: -------------------------------------------------------------------------------- 1 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES 2 | FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/NextLevel" "${PODS_CONFIGURATION_BUILD_DIR}/RPCircularProgress" 3 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 4 | HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/NextLevel/NextLevel.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/RPCircularProgress/RPCircularProgress.framework/Headers" 5 | LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' 6 | OTHER_LDFLAGS = $(inherited) -framework "NextLevel" -framework "RPCircularProgress" 7 | OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS 8 | PODS_BUILD_DIR = ${BUILD_DIR} 9 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) 10 | PODS_PODFILE_DIR_PATH = ${SRCROOT}/. 11 | PODS_ROOT = ${SRCROOT}/Pods 12 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera.modulemap: -------------------------------------------------------------------------------- 1 | framework module Pods_ARCamera { 2 | umbrella header "Pods-ARCamera-umbrella.h" 3 | 4 | export * 5 | module * { export * } 6 | } 7 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/Pods-ARCamera/Pods-ARCamera.release.xcconfig: -------------------------------------------------------------------------------- 1 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES 2 | FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/NextLevel" "${PODS_CONFIGURATION_BUILD_DIR}/RPCircularProgress" 3 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 4 | HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/NextLevel/NextLevel.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/RPCircularProgress/RPCircularProgress.framework/Headers" 5 | LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' 6 | OTHER_LDFLAGS = $(inherited) -framework "NextLevel" -framework "RPCircularProgress" 7 | OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS 8 | PODS_BUILD_DIR = ${BUILD_DIR} 9 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) 10 | PODS_PODFILE_DIR_PATH = ${SRCROOT}/. 11 | PODS_ROOT = ${SRCROOT}/Pods 12 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/RPCircularProgress/RPCircularProgress-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | ${EXECUTABLE_NAME} 9 | CFBundleIdentifier 10 | ${PRODUCT_BUNDLE_IDENTIFIER} 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | ${PRODUCT_NAME} 15 | CFBundlePackageType 16 | FMWK 17 | CFBundleShortVersionString 18 | 0.5.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | ${CURRENT_PROJECT_VERSION} 23 | NSPrincipalClass 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/RPCircularProgress/RPCircularProgress-dummy.m: -------------------------------------------------------------------------------- 1 | #import 2 | @interface PodsDummy_RPCircularProgress : NSObject 3 | @end 4 | @implementation PodsDummy_RPCircularProgress 5 | @end 6 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/RPCircularProgress/RPCircularProgress-prefix.pch: -------------------------------------------------------------------------------- 1 | #ifdef __OBJC__ 2 | #import 3 | #else 4 | #ifndef FOUNDATION_EXPORT 5 | #if defined(__cplusplus) 6 | #define FOUNDATION_EXPORT extern "C" 7 | #else 8 | #define FOUNDATION_EXPORT extern 9 | #endif 10 | #endif 11 | #endif 12 | 13 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/RPCircularProgress/RPCircularProgress-umbrella.h: -------------------------------------------------------------------------------- 1 | #ifdef __OBJC__ 2 | #import 3 | #else 4 | #ifndef FOUNDATION_EXPORT 5 | #if defined(__cplusplus) 6 | #define FOUNDATION_EXPORT extern "C" 7 | #else 8 | #define FOUNDATION_EXPORT extern 9 | #endif 10 | #endif 11 | #endif 12 | 13 | 14 | FOUNDATION_EXPORT double RPCircularProgressVersionNumber; 15 | FOUNDATION_EXPORT const unsigned char RPCircularProgressVersionString[]; 16 | 17 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/RPCircularProgress/RPCircularProgress.modulemap: -------------------------------------------------------------------------------- 1 | framework module RPCircularProgress { 2 | umbrella header "RPCircularProgress-umbrella.h" 3 | 4 | export * 5 | module * { export * } 6 | } 7 | -------------------------------------------------------------------------------- /ARCamera/Pods/Target Support Files/RPCircularProgress/RPCircularProgress.xcconfig: -------------------------------------------------------------------------------- 1 | CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/RPCircularProgress 2 | GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 3 | OTHER_SWIFT_FLAGS = $(inherited) -D COCOAPODS 4 | PODS_BUILD_DIR = ${BUILD_DIR} 5 | PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME) 6 | PODS_ROOT = ${SRCROOT} 7 | PODS_TARGET_SRCROOT = ${PODS_ROOT}/RPCircularProgress 8 | PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier} 9 | SKIP_INSTALL = YES 10 | -------------------------------------------------------------------------------- /ARCamera/Project/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /ARCamera/Project/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /ARCamera/Project/Base.xcconfig: -------------------------------------------------------------------------------- 1 | // 2 | // Base.xcconfig 3 | // 4 | // Created by Patrick Piemonte on 3/26/13. 5 | // 6 | 7 | PRODUCT_NAME = $(TARGET_NAME) 8 | //INFOPLIST_FILE = $(TARGET_NAME)/Info.plist 9 | SWIFT_VERSION = 5.0 10 | 11 | CURRENT_PROJECT_VERSION = 0.2.2 12 | DYLIB_CURRENT_VERSION = $(CURRENT_PROJECT_VERSION) 13 | 14 | ARCHS[sdk=iphoneos*] = $(ARCHS_STANDARD_INCLUDING_64_BIT) 15 | IPHONEOS_DEPLOYMENT_TARGET = 11.0 16 | 17 | SKIP_INSTALL = NO 18 | DEAD_CODE_STRIPPING = YES; 19 | 20 | OTHER_CFLAGS = -fconstant-cfstrings 21 | DEBUG_INFORMATION_FORMAT = dwarf-with-dsym 22 | 23 | GCC_VERSION = com.apple.compilers.llvm.clang.1_0 24 | GCC_C_LANGUAGE_STANDARD = c99 25 | CLANG_CXX_LANGUAGE_STANDARD = c++11 26 | CLANG_CXX_LIBRARY = libc++ 27 | 28 | GCC_PRECOMPILE_PREFIX_HEADER = YES 29 | GCC_PREFIX_HEADER = $(TARGET_NAME)/$(TARGET_NAME)-Prefix.pch 30 | 31 | CLANG_ENABLE_OBJC_ARC = YES 32 | 33 | GCC_DYNAMIC_NO_PIC = NO 34 | GCC_INLINES_ARE_PRIVATE_EXTERN = YES 35 | GCC_SYMBOLS_PRIVATE_EXTERN = NO 36 | 37 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES 38 | CLANG_WARN_EMPTY_BODY = YES 39 | CLANG_WARN_CONSTANT_CONVERSION = YES 40 | CLANG_WARN_ENUM_CONVERSION = YES 41 | CLANG_WARN_INT_CONVERSION = YES 42 | CLANG_WARN_BOOL_CONVERSION = YES 43 | 44 | GCC_ENABLE_OBJC_EXCEPTIONS = NO 45 | -------------------------------------------------------------------------------- /ARCamera/Project/Debug.xcconfig: -------------------------------------------------------------------------------- 1 | // 2 | // Debug.xcconfig 3 | // 4 | // Created by Patrick Piemonte on 3/26/13. 5 | // 6 | 7 | #include "Base.xcconfig" 8 | 9 | SWIFT_OPTIMIZATION_LEVEL = -Onone 10 | GCC_OPTIMIZATION_LEVEL = 0 11 | GCC_PREPROCESSOR_DEFINITIONS = $(INHERITED) DEBUG=1 12 | 13 | GCC_TREAT_WARNINGS_AS_ERRORS = YES 14 | GCC_WARN_ABOUT_MISSING_PROTOTYPES[sdk=iphone*] = YES 15 | GCC_WARN_ABOUT_RETURN_TYPE = YES 16 | GCC_WARN_SHADOW[sdk=iphone*] = YES 17 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES 18 | GCC_WARN_UNDECLARED_SELECTOR = YES 19 | GCC_WARN_UNINITIALIZED_AUTOS = YES 20 | GCC_WARN_ABOUT_DEPRECATED_FUNCTIONS = YES 21 | GCC_WARN_MULTIPLE_DEFINITION_TYPES_FOR_SELECTOR = YES 22 | GCC_WARN_CHECK_SWITCH_STATEMENTS = YES 23 | GCC_WARN_INITIALIZER_NOT_FULLY_BRACKETED = YES 24 | GCC_WARN_MISSING_PARENTHESES = YES 25 | GCC_WARN_ABOUT_MISSING_FIELD_INITIALIZERS = YES 26 | GCC_WARN_SIGN_COMPARE = YES 27 | GCC_WARN_ABOUT_MISSING_NEWLINE = YES 28 | GCC_WARN_UNUSED_VARIABLE = YES 29 | GCC_WARN_UNUSED_FUNCTION = YES 30 | GCC_WARN_UNUSED_LABEL = YES 31 | GCC_WARN_UNUSED_VALUE = YES 32 | GCC_WARN_UNUSED_VARIABLE = YES 33 | GCC_WARN_TYPECHECK_CALLS_TO_PRINTF = YES 34 | GCC_WARN_HIDDEN_VIRTUAL_FUNCTIONS = YES 35 | GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO = YES 36 | GCC_WARN_NON_VIRTUAL_DESTRUCTOR = YES 37 | 38 | CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION = YES 39 | CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS[sdk=iphone*] = YES 40 | CLANG_WARN_OBJC_EXPLICIT_OWNERSHIP_TYPE = YES 41 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES 42 | CLANG_WARN_IMPLICIT_SIGN_CONVERSION = YES 43 | 44 | CLANG_WARN_CXX0X_EXTENSIONS = NO 45 | 46 | CLANG_ANALYZER_SECURITY_INSECUREAPI_STRCPY = YES 47 | CLANG_ANALYZER_SECURITY_FLOATLOOPCOUNTER = YES 48 | 49 | ALWAYS_SEARCH_USER_PATHS = NO 50 | COPY_PHASE_STRIP = NO 51 | ONLY_ACTIVE_ARCH = YES 52 | 53 | OTHER_CFLAGS[sdk=iphone*] = $(OTHER_CFLAGS) -Wall -Wconversion -Wundeclared-selector -Wobjc-autosynthesis-property-ivar-name-match 54 | 55 | -------------------------------------------------------------------------------- /ARCamera/Project/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIcons 10 | 11 | CFBundleIcons~ipad 12 | 13 | CFBundleIdentifier 14 | $(PRODUCT_BUNDLE_IDENTIFIER) 15 | CFBundleInfoDictionaryVersion 16 | 6.0 17 | CFBundleName 18 | $(PRODUCT_NAME) 19 | CFBundlePackageType 20 | APPL 21 | CFBundleShortVersionString 22 | 1.0.0 23 | CFBundleVersion 24 | 0.0.1 25 | LSRequiresIPhoneOS 26 | 27 | NSCameraUsageDescription 28 | Allowing access to the camera lets you view and record. 29 | NSMicrophoneUsageDescription 30 | Allowing access to the microphone lets you hear and record sounds. 31 | NSPhotoLibraryUsageDescription 32 | NextLevel saves photos and videos to your library. 33 | UILaunchStoryboardName 34 | LaunchScreen 35 | UIRequiredDeviceCapabilities 36 | 37 | armv7 38 | arkit 39 | 40 | UISupportedInterfaceOrientations 41 | 42 | UIInterfaceOrientationPortrait 43 | 44 | UISupportedInterfaceOrientations~ipad 45 | 46 | UIInterfaceOrientationPortrait 47 | UIInterfaceOrientationPortraitUpsideDown 48 | UIInterfaceOrientationLandscapeLeft 49 | UIInterfaceOrientationLandscapeRight 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /ARCamera/Project/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /ARCamera/Project/Release.xcconfig: -------------------------------------------------------------------------------- 1 | // 2 | // Release.xcconfig 3 | // 4 | // Created by Patrick Piemonte on 3/26/13. 5 | // 6 | 7 | #include "Base.xcconfig" 8 | 9 | DEAD_CODE_STRIPPING = YES 10 | COPY_PHASE_STRIP = YES 11 | VALIDATE_PRODUCT = YES 12 | 13 | SWIFT_OPTIMIZATION_LEVEL = -Owholemodule 14 | GCC_OPTIMIZATION_LEVEL = s 15 | 16 | GCC_PREPROCESSOR_DEFINITIONS = _LIBCPP_VISIBLE= NDEBUG=1 NS_BLOCK_ASSERTIONS=1 17 | OTHER_CFLAGS[sdk=iphone*] = $(OTHER_CFLAGS) -DNS_BLOCK_ASSERTIONS=1 18 | -------------------------------------------------------------------------------- /ARCamera/Project/piemonte.usdz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NextLevel/examples/a61c1b58ca14ece28f13a7bd18b3580ff96fea24/ARCamera/Project/piemonte.usdz -------------------------------------------------------------------------------- /ARCamera/Sources/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // 4 | // Copyright (c) 2018-present patrick piemonte (http://patrickpiemonte.com) 5 | // 6 | // Permission is hereby granted, free of charge, to any person obtaining a copy 7 | // of this software and associated documentation files (the "Software"), to deal 8 | // in the Software without restriction, including without limitation the rights 9 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | // copies of the Software, and to permit persons to whom the Software is 11 | // furnished to do so, subject to the following conditions: 12 | // 13 | // The above copyright notice and this permission notice shall be included in all 14 | // copies or substantial portions of the Software. 15 | // 16 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | // SOFTWARE. 23 | // 24 | 25 | 26 | import UIKit 27 | 28 | @UIApplicationMain 29 | class AppDelegate: UIResponder, UIApplicationDelegate { 30 | 31 | var window: UIWindow? 32 | 33 | // MARK: - UIApplicationDelegate 34 | 35 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 36 | 37 | // root view controller setup 38 | self.window = UIWindow(frame:UIScreen.main.bounds) 39 | self.window?.backgroundColor = UIColor.black 40 | 41 | let viewController = ViewController() 42 | self.window?.rootViewController = viewController 43 | self.window?.makeKeyAndVisible() 44 | return true 45 | } 46 | 47 | func applicationWillResignActive(_ application: UIApplication) { 48 | } 49 | 50 | func applicationDidEnterBackground(_ application: UIApplication) { 51 | } 52 | 53 | func applicationWillEnterForeground(_ application: UIApplication) { 54 | } 55 | 56 | func applicationDidBecomeActive(_ application: UIApplication) { 57 | } 58 | 59 | func applicationWillTerminate(_ application: UIApplication) { 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /ARCamera/Sources/RecordButton.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RecordButton.swift 3 | // 4 | // Copyright (c) 2018-present patrick piemonte (http://patrickpiemonte.com) 5 | // 6 | // Permission is hereby granted, free of charge, to any person obtaining a copy 7 | // of this software and associated documentation files (the "Software"), to deal 8 | // in the Software without restriction, including without limitation the rights 9 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | // copies of the Software, and to permit persons to whom the Software is 11 | // furnished to do so, subject to the following conditions: 12 | // 13 | // The above copyright notice and this permission notice shall be included in all 14 | // copies or substantial portions of the Software. 15 | // 16 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | // SOFTWARE. 23 | // 24 | 25 | import UIKit 26 | import Foundation 27 | import RPCircularProgress 28 | 29 | /// quick hacky progress "button" 30 | public class RecordButton: UIView { 31 | 32 | internal lazy var _recordIndicatorProgressBackground: UIView = { 33 | let view = UIView() 34 | view.backgroundColor = .clear 35 | return view 36 | }() 37 | internal lazy var _recordIndicatorProgress: RPCircularProgress = { 38 | let indicatorProgress = RPCircularProgress() 39 | indicatorProgress.roundedCorners = false 40 | indicatorProgress.thicknessRatio = 1 41 | indicatorProgress.trackTintColor = UIColor.white 42 | indicatorProgress.progressTintColor = UIColor.red 43 | indicatorProgress.isUserInteractionEnabled = false 44 | return indicatorProgress 45 | }() 46 | 47 | public override init(frame: CGRect) { 48 | super.init(frame: frame) 49 | 50 | self.isUserInteractionEnabled = true 51 | self.backgroundColor = .clear 52 | 53 | self._recordIndicatorProgressBackground.isUserInteractionEnabled = false 54 | self._recordIndicatorProgressBackground.frame = self.bounds.insetBy(dx: 10.0, dy: 10.0) 55 | self._recordIndicatorProgressBackground.layer.cornerRadius = self._recordIndicatorProgressBackground.frame.size.height * 0.5 56 | self._recordIndicatorProgressBackground.layer.borderWidth = 2.0 57 | self._recordIndicatorProgressBackground.layer.borderColor = UIColor.white.cgColor 58 | 59 | self._recordIndicatorProgressBackground.center = self.center 60 | self.addSubview(self._recordIndicatorProgressBackground) 61 | 62 | self._recordIndicatorProgress.frame = self._recordIndicatorProgressBackground.bounds 63 | self._recordIndicatorProgress.center = self.center 64 | self.addSubview(self._recordIndicatorProgress) 65 | } 66 | 67 | public required init?(coder aDecoder: NSCoder) { 68 | fatalError("\(#function) has not been implemented") 69 | } 70 | 71 | } 72 | 73 | extension RecordButton { 74 | 75 | public func startRecordingAnimation() { 76 | UIView.animate(withDuration: 0.2, delay: 0, options: .curveEaseInOut, animations: { 77 | self._recordIndicatorProgress.transform = CGAffineTransform(scaleX: 1.65, y: 1.65) 78 | }) { (completed: Bool) in 79 | } 80 | } 81 | 82 | public func stopRecordingAnimation() { 83 | UIView.animate(withDuration: 0.15, delay: 0, options: .curveEaseOut, animations: { 84 | self._recordIndicatorProgress.transform = .identity 85 | }) { (completed: Bool) in 86 | } 87 | } 88 | 89 | public func updateProgress(progress: Float, animated: Bool) { 90 | self._recordIndicatorProgress.updateProgress(CGFloat(progress), animated: animated, completion: nil) 91 | } 92 | 93 | public func reset() { 94 | self._recordIndicatorProgress.transform = .identity 95 | self._recordIndicatorProgress.updateProgress(0) 96 | } 97 | 98 | } 99 | -------------------------------------------------------------------------------- /ARCamera/makefile: -------------------------------------------------------------------------------- 1 | # setup pods 2 | setup: 3 | @echo [installing cocoapods] 4 | @sudo /usr/bin/gem install -n /usr/local/bin cocoapods --pre 5 | @pod setup 6 | 7 | pods: 8 | @echo [updating pods] 9 | @echo ensure you have the latest cocoapods, run make setup 10 | @echo 11 | @-rm Podfile.lock 12 | @-rm -rf Pods 13 | @pod install 14 | 15 | cleanpods: 16 | @echo [removing local Pods caches] 17 | @echo /Users/$$USER/.cocoapods/* 18 | @echo ensure to re-run make setup! 19 | @-rm -rf /Users/$$USER/.cocoapods/* 20 | 21 | .PHONY: setup pods cleanpods 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016-present patrick piemonte (http://patrickpiemonte.com), NextLevel (http://nextlevel.engineering/) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | `NextLevel` is a set of [Swift](https://developer.apple.com/swift/) camera system components designed for easy integration, customized media capture, and image streaming in iOS. Integration can optionally leverage `AVFoundation` or `ARKit`. 2 | 3 | This repository is for examples of the various ways to integrate `NextLevel` into a project. Feel free to contribute! 4 | 5 | ## Examples 6 | 7 | - [ARCamera](https://github.com/NextLevel/examples/tree/master/ARCamera), capture & record ARKit videos, photos 8 | 9 | ## Resources 10 | 11 | * [iOS Device Camera Summary](https://developer.apple.com/library/archive/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/Cameras/Cameras.html) 12 | * [AV Foundation Programming Guide](https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/00_Introduction.html) 13 | * [AV Foundation Framework Reference](https://developer.apple.com/library/ios/documentation/AVFoundation/Reference/AVFoundationFramework/) 14 | * [ARKit Framework Reference](https://developer.apple.com/documentation/arkit) 15 | * [NextLevel](https://github.com/NextLevel/NextLevel), media capture in Swift 16 | * [NextLevelSessionExporter](https://github.com/NextLevel/NextLevelSessionExporter), media transcoding in Swift 17 | 18 | ## License 19 | 20 | `examples` is available under the MIT license, see the [LICENSE](https://github.com/NextLevel/examples/blob/master/LICENSE) file for more information. 21 | --------------------------------------------------------------------------------