├── .gitignore ├── LICENSE ├── ObjectTracker ├── ObjectTracker.xcodeproj │ ├── project.pbxproj │ └── project.xcworkspace │ │ └── contents.xcworkspacedata └── ObjectTracker │ ├── AppDelegate.swift │ ├── Assets.xcassets │ └── AppIcon.appiconset │ │ └── Contents.json │ ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard │ ├── Info.plist │ └── ViewController.swift ├── README.md ├── demo_screenshot.png ├── demo_video.gif └── demo_video.mp4 /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | # 3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 4 | 5 | ## Build generated 6 | build/ 7 | DerivedData/ 8 | 9 | ## Various settings 10 | *.pbxuser 11 | !default.pbxuser 12 | *.mode1v3 13 | !default.mode1v3 14 | *.mode2v3 15 | !default.mode2v3 16 | *.perspectivev3 17 | !default.perspectivev3 18 | xcuserdata/ 19 | 20 | ## Other 21 | *.moved-aside 22 | *.xccheckout 23 | *.xcscmblueprint 24 | 25 | ## Obj-C/Swift specific 26 | *.hmap 27 | *.ipa 28 | *.dSYM.zip 29 | *.dSYM 30 | 31 | ## Playgrounds 32 | timeline.xctimeline 33 | playground.xcworkspace 34 | 35 | # Swift Package Manager 36 | # 37 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies. 38 | # Packages/ 39 | # Package.pins 40 | .build/ 41 | 42 | # CocoaPods 43 | # 44 | # We recommend against adding the Pods directory to your .gitignore. However 45 | # you should judge for yourself, the pros and cons are mentioned at: 46 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control 47 | # 48 | # Pods/ 49 | 50 | # Carthage 51 | # 52 | # Add this line if you want to avoid checking in source code from Carthage dependencies. 53 | # Carthage/Checkouts 54 | 55 | Carthage/Build 56 | 57 | # fastlane 58 | # 59 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 60 | # screenshots whenever they are needed. 61 | # For more information about the recommended setup visit: 62 | # https://docs.fastlane.tools/best-practices/source-control/#source-control 63 | 64 | fastlane/report.xml 65 | fastlane/Preview.html 66 | fastlane/screenshots 67 | fastlane/test_output 68 | .DS_Store 69 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 jeffreybergier 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /ObjectTracker/ObjectTracker.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 503D3A381EE9AE7B009EB3BB /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 503D3A371EE9AE7B009EB3BB /* AppDelegate.swift */; }; 11 | 503D3A3A1EE9AE7B009EB3BB /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 503D3A391EE9AE7B009EB3BB /* ViewController.swift */; }; 12 | 503D3A3D1EE9AE7B009EB3BB /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 503D3A3B1EE9AE7B009EB3BB /* Main.storyboard */; }; 13 | 503D3A3F1EE9AE7B009EB3BB /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 503D3A3E1EE9AE7B009EB3BB /* Assets.xcassets */; }; 14 | 503D3A421EE9AE7B009EB3BB /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 503D3A401EE9AE7B009EB3BB /* LaunchScreen.storyboard */; }; 15 | 5052171B1EEB15F7007F5524 /* README.md in Sources */ = {isa = PBXBuildFile; fileRef = 505217191EEB15F6007F5524 /* README.md */; }; 16 | 5052171C1EEB15F7007F5524 /* LICENSE in Resources */ = {isa = PBXBuildFile; fileRef = 5052171A1EEB15F6007F5524 /* LICENSE */; }; 17 | /* End PBXBuildFile section */ 18 | 19 | /* Begin PBXFileReference section */ 20 | 503D3A341EE9AE7B009EB3BB /* ObjectTracker.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = ObjectTracker.app; sourceTree = BUILT_PRODUCTS_DIR; }; 21 | 503D3A371EE9AE7B009EB3BB /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 22 | 503D3A391EE9AE7B009EB3BB /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 23 | 503D3A3C1EE9AE7B009EB3BB /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 24 | 503D3A3E1EE9AE7B009EB3BB /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 25 | 503D3A411EE9AE7B009EB3BB /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 26 | 503D3A431EE9AE7B009EB3BB /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 27 | 505217191EEB15F6007F5524 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 28 | 5052171A1EEB15F6007F5524 /* LICENSE */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; name = LICENSE; path = ../LICENSE; sourceTree = ""; }; 29 | /* End PBXFileReference section */ 30 | 31 | /* Begin PBXFrameworksBuildPhase section */ 32 | 503D3A311EE9AE7B009EB3BB /* Frameworks */ = { 33 | isa = PBXFrameworksBuildPhase; 34 | buildActionMask = 2147483647; 35 | files = ( 36 | ); 37 | runOnlyForDeploymentPostprocessing = 0; 38 | }; 39 | /* End PBXFrameworksBuildPhase section */ 40 | 41 | /* Begin PBXGroup section */ 42 | 503D3A2B1EE9AE7B009EB3BB = { 43 | isa = PBXGroup; 44 | children = ( 45 | 5052171A1EEB15F6007F5524 /* LICENSE */, 46 | 505217191EEB15F6007F5524 /* README.md */, 47 | 503D3A361EE9AE7B009EB3BB /* ObjectTracker */, 48 | 503D3A351EE9AE7B009EB3BB /* Products */, 49 | ); 50 | sourceTree = ""; 51 | }; 52 | 503D3A351EE9AE7B009EB3BB /* Products */ = { 53 | isa = PBXGroup; 54 | children = ( 55 | 503D3A341EE9AE7B009EB3BB /* ObjectTracker.app */, 56 | ); 57 | name = Products; 58 | sourceTree = ""; 59 | }; 60 | 503D3A361EE9AE7B009EB3BB /* ObjectTracker */ = { 61 | isa = PBXGroup; 62 | children = ( 63 | 503D3A371EE9AE7B009EB3BB /* AppDelegate.swift */, 64 | 503D3A391EE9AE7B009EB3BB /* ViewController.swift */, 65 | 503D3A3B1EE9AE7B009EB3BB /* Main.storyboard */, 66 | 503D3A3E1EE9AE7B009EB3BB /* Assets.xcassets */, 67 | 503D3A401EE9AE7B009EB3BB /* LaunchScreen.storyboard */, 68 | 503D3A431EE9AE7B009EB3BB /* Info.plist */, 69 | ); 70 | path = ObjectTracker; 71 | sourceTree = ""; 72 | }; 73 | /* End PBXGroup section */ 74 | 75 | /* Begin PBXNativeTarget section */ 76 | 503D3A331EE9AE7B009EB3BB /* ObjectTracker */ = { 77 | isa = PBXNativeTarget; 78 | buildConfigurationList = 503D3A461EE9AE7B009EB3BB /* Build configuration list for PBXNativeTarget "ObjectTracker" */; 79 | buildPhases = ( 80 | 503D3A301EE9AE7B009EB3BB /* Sources */, 81 | 503D3A311EE9AE7B009EB3BB /* Frameworks */, 82 | 503D3A321EE9AE7B009EB3BB /* Resources */, 83 | ); 84 | buildRules = ( 85 | ); 86 | dependencies = ( 87 | ); 88 | name = ObjectTracker; 89 | productName = ObjectTracker; 90 | productReference = 503D3A341EE9AE7B009EB3BB /* ObjectTracker.app */; 91 | productType = "com.apple.product-type.application"; 92 | }; 93 | /* End PBXNativeTarget section */ 94 | 95 | /* Begin PBXProject section */ 96 | 503D3A2C1EE9AE7B009EB3BB /* Project object */ = { 97 | isa = PBXProject; 98 | attributes = { 99 | LastSwiftUpdateCheck = 0900; 100 | LastUpgradeCheck = 0900; 101 | ORGANIZATIONNAME = "Saturday Apps"; 102 | TargetAttributes = { 103 | 503D3A331EE9AE7B009EB3BB = { 104 | CreatedOnToolsVersion = 9.0; 105 | }; 106 | }; 107 | }; 108 | buildConfigurationList = 503D3A2F1EE9AE7B009EB3BB /* Build configuration list for PBXProject "ObjectTracker" */; 109 | compatibilityVersion = "Xcode 8.0"; 110 | developmentRegion = en; 111 | hasScannedForEncodings = 0; 112 | knownRegions = ( 113 | en, 114 | Base, 115 | ); 116 | mainGroup = 503D3A2B1EE9AE7B009EB3BB; 117 | productRefGroup = 503D3A351EE9AE7B009EB3BB /* Products */; 118 | projectDirPath = ""; 119 | projectRoot = ""; 120 | targets = ( 121 | 503D3A331EE9AE7B009EB3BB /* ObjectTracker */, 122 | ); 123 | }; 124 | /* End PBXProject section */ 125 | 126 | /* Begin PBXResourcesBuildPhase section */ 127 | 503D3A321EE9AE7B009EB3BB /* Resources */ = { 128 | isa = PBXResourcesBuildPhase; 129 | buildActionMask = 2147483647; 130 | files = ( 131 | 503D3A421EE9AE7B009EB3BB /* LaunchScreen.storyboard in Resources */, 132 | 503D3A3F1EE9AE7B009EB3BB /* Assets.xcassets in Resources */, 133 | 5052171C1EEB15F7007F5524 /* LICENSE in Resources */, 134 | 503D3A3D1EE9AE7B009EB3BB /* Main.storyboard in Resources */, 135 | ); 136 | runOnlyForDeploymentPostprocessing = 0; 137 | }; 138 | /* End PBXResourcesBuildPhase section */ 139 | 140 | /* Begin PBXSourcesBuildPhase section */ 141 | 503D3A301EE9AE7B009EB3BB /* Sources */ = { 142 | isa = PBXSourcesBuildPhase; 143 | buildActionMask = 2147483647; 144 | files = ( 145 | 5052171B1EEB15F7007F5524 /* README.md in Sources */, 146 | 503D3A3A1EE9AE7B009EB3BB /* ViewController.swift in Sources */, 147 | 503D3A381EE9AE7B009EB3BB /* AppDelegate.swift in Sources */, 148 | ); 149 | runOnlyForDeploymentPostprocessing = 0; 150 | }; 151 | /* End PBXSourcesBuildPhase section */ 152 | 153 | /* Begin PBXVariantGroup section */ 154 | 503D3A3B1EE9AE7B009EB3BB /* Main.storyboard */ = { 155 | isa = PBXVariantGroup; 156 | children = ( 157 | 503D3A3C1EE9AE7B009EB3BB /* Base */, 158 | ); 159 | name = Main.storyboard; 160 | sourceTree = ""; 161 | }; 162 | 503D3A401EE9AE7B009EB3BB /* LaunchScreen.storyboard */ = { 163 | isa = PBXVariantGroup; 164 | children = ( 165 | 503D3A411EE9AE7B009EB3BB /* Base */, 166 | ); 167 | name = LaunchScreen.storyboard; 168 | sourceTree = ""; 169 | }; 170 | /* End PBXVariantGroup section */ 171 | 172 | /* Begin XCBuildConfiguration section */ 173 | 503D3A441EE9AE7B009EB3BB /* Debug */ = { 174 | isa = XCBuildConfiguration; 175 | buildSettings = { 176 | ALWAYS_SEARCH_USER_PATHS = NO; 177 | CLANG_ANALYZER_NONNULL = YES; 178 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 179 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 180 | CLANG_CXX_LIBRARY = "libc++"; 181 | CLANG_ENABLE_MODULES = YES; 182 | CLANG_ENABLE_OBJC_ARC = YES; 183 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 184 | CLANG_WARN_BOOL_CONVERSION = YES; 185 | CLANG_WARN_COMMA = YES; 186 | CLANG_WARN_CONSTANT_CONVERSION = YES; 187 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 188 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 189 | CLANG_WARN_EMPTY_BODY = YES; 190 | CLANG_WARN_ENUM_CONVERSION = YES; 191 | CLANG_WARN_INFINITE_RECURSION = YES; 192 | CLANG_WARN_INT_CONVERSION = YES; 193 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 194 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 195 | CLANG_WARN_STRICT_PROTOTYPES = YES; 196 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 197 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 198 | CLANG_WARN_UNREACHABLE_CODE = YES; 199 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 200 | CODE_SIGN_IDENTITY = "iPhone Developer"; 201 | COPY_PHASE_STRIP = NO; 202 | DEBUG_INFORMATION_FORMAT = dwarf; 203 | ENABLE_STRICT_OBJC_MSGSEND = YES; 204 | ENABLE_TESTABILITY = YES; 205 | GCC_C_LANGUAGE_STANDARD = gnu11; 206 | GCC_DYNAMIC_NO_PIC = NO; 207 | GCC_NO_COMMON_BLOCKS = YES; 208 | GCC_OPTIMIZATION_LEVEL = 0; 209 | GCC_PREPROCESSOR_DEFINITIONS = ( 210 | "DEBUG=1", 211 | "$(inherited)", 212 | ); 213 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 214 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 215 | GCC_WARN_UNDECLARED_SELECTOR = YES; 216 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 217 | GCC_WARN_UNUSED_FUNCTION = YES; 218 | GCC_WARN_UNUSED_VARIABLE = YES; 219 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 220 | MTL_ENABLE_DEBUG_INFO = YES; 221 | ONLY_ACTIVE_ARCH = YES; 222 | SDKROOT = iphoneos; 223 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 224 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 225 | }; 226 | name = Debug; 227 | }; 228 | 503D3A451EE9AE7B009EB3BB /* Release */ = { 229 | isa = XCBuildConfiguration; 230 | buildSettings = { 231 | ALWAYS_SEARCH_USER_PATHS = NO; 232 | CLANG_ANALYZER_NONNULL = YES; 233 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 234 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 235 | CLANG_CXX_LIBRARY = "libc++"; 236 | CLANG_ENABLE_MODULES = YES; 237 | CLANG_ENABLE_OBJC_ARC = YES; 238 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 239 | CLANG_WARN_BOOL_CONVERSION = YES; 240 | CLANG_WARN_COMMA = YES; 241 | CLANG_WARN_CONSTANT_CONVERSION = YES; 242 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 243 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 244 | CLANG_WARN_EMPTY_BODY = YES; 245 | CLANG_WARN_ENUM_CONVERSION = YES; 246 | CLANG_WARN_INFINITE_RECURSION = YES; 247 | CLANG_WARN_INT_CONVERSION = YES; 248 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 249 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 250 | CLANG_WARN_STRICT_PROTOTYPES = YES; 251 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 252 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 253 | CLANG_WARN_UNREACHABLE_CODE = YES; 254 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 255 | CODE_SIGN_IDENTITY = "iPhone Developer"; 256 | COPY_PHASE_STRIP = NO; 257 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 258 | ENABLE_NS_ASSERTIONS = NO; 259 | ENABLE_STRICT_OBJC_MSGSEND = YES; 260 | GCC_C_LANGUAGE_STANDARD = gnu11; 261 | GCC_NO_COMMON_BLOCKS = YES; 262 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 263 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 264 | GCC_WARN_UNDECLARED_SELECTOR = YES; 265 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 266 | GCC_WARN_UNUSED_FUNCTION = YES; 267 | GCC_WARN_UNUSED_VARIABLE = YES; 268 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 269 | MTL_ENABLE_DEBUG_INFO = NO; 270 | SDKROOT = iphoneos; 271 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 272 | VALIDATE_PRODUCT = YES; 273 | }; 274 | name = Release; 275 | }; 276 | 503D3A471EE9AE7B009EB3BB /* Debug */ = { 277 | isa = XCBuildConfiguration; 278 | buildSettings = { 279 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 280 | DEVELOPMENT_TEAM = V6ESYGU6CV; 281 | INFOPLIST_FILE = ObjectTracker/Info.plist; 282 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 283 | PRODUCT_BUNDLE_IDENTIFIER = com.saturdayapps.ObjectTracker; 284 | PRODUCT_NAME = "$(TARGET_NAME)"; 285 | SWIFT_VERSION = 4.0; 286 | TARGETED_DEVICE_FAMILY = "1,2"; 287 | }; 288 | name = Debug; 289 | }; 290 | 503D3A481EE9AE7B009EB3BB /* Release */ = { 291 | isa = XCBuildConfiguration; 292 | buildSettings = { 293 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 294 | DEVELOPMENT_TEAM = V6ESYGU6CV; 295 | INFOPLIST_FILE = ObjectTracker/Info.plist; 296 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 297 | PRODUCT_BUNDLE_IDENTIFIER = com.saturdayapps.ObjectTracker; 298 | PRODUCT_NAME = "$(TARGET_NAME)"; 299 | SWIFT_VERSION = 4.0; 300 | TARGETED_DEVICE_FAMILY = "1,2"; 301 | }; 302 | name = Release; 303 | }; 304 | /* End XCBuildConfiguration section */ 305 | 306 | /* Begin XCConfigurationList section */ 307 | 503D3A2F1EE9AE7B009EB3BB /* Build configuration list for PBXProject "ObjectTracker" */ = { 308 | isa = XCConfigurationList; 309 | buildConfigurations = ( 310 | 503D3A441EE9AE7B009EB3BB /* Debug */, 311 | 503D3A451EE9AE7B009EB3BB /* Release */, 312 | ); 313 | defaultConfigurationIsVisible = 0; 314 | defaultConfigurationName = Release; 315 | }; 316 | 503D3A461EE9AE7B009EB3BB /* Build configuration list for PBXNativeTarget "ObjectTracker" */ = { 317 | isa = XCConfigurationList; 318 | buildConfigurations = ( 319 | 503D3A471EE9AE7B009EB3BB /* Debug */, 320 | 503D3A481EE9AE7B009EB3BB /* Release */, 321 | ); 322 | defaultConfigurationIsVisible = 0; 323 | defaultConfigurationName = Release; 324 | }; 325 | /* End XCConfigurationList section */ 326 | }; 327 | rootObject = 503D3A2C1EE9AE7B009EB3BB /* Project object */; 328 | } 329 | -------------------------------------------------------------------------------- /ObjectTracker/ObjectTracker.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /ObjectTracker/ObjectTracker/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // ObjectTracker 4 | // 5 | // Created by Jeffrey Bergier on 6/8/17. 6 | // Copyright © 2017 Saturday Apps. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @UIApplicationMain 12 | class AppDelegate: UIResponder, UIApplicationDelegate { 13 | 14 | var window: UIWindow? 15 | 16 | 17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { 18 | // Override point for customization after application launch. 19 | return true 20 | } 21 | 22 | func applicationWillResignActive(_ application: UIApplication) { 23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 24 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 25 | } 26 | 27 | func applicationDidEnterBackground(_ application: UIApplication) { 28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 30 | } 31 | 32 | func applicationWillEnterForeground(_ application: UIApplication) { 33 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 34 | } 35 | 36 | func applicationDidBecomeActive(_ application: UIApplication) { 37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 38 | } 39 | 40 | func applicationWillTerminate(_ application: UIApplication) { 41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 42 | } 43 | 44 | 45 | } 46 | 47 | -------------------------------------------------------------------------------- /ObjectTracker/ObjectTracker/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | } 88 | ], 89 | "info" : { 90 | "version" : 1, 91 | "author" : "xcode" 92 | } 93 | } -------------------------------------------------------------------------------- /ObjectTracker/ObjectTracker/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /ObjectTracker/ObjectTracker/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | -------------------------------------------------------------------------------- /ObjectTracker/ObjectTracker/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | NSCameraUsageDescription 24 | "" 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIMainStoryboardFile 28 | Main 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | 39 | UISupportedInterfaceOrientations~ipad 40 | 41 | UIInterfaceOrientationPortrait 42 | UIInterfaceOrientationPortraitUpsideDown 43 | UIInterfaceOrientationLandscapeLeft 44 | UIInterfaceOrientationLandscapeRight 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /ObjectTracker/ObjectTracker/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // ObjectTracker 4 | // 5 | // Created by Jeffrey Bergier on 6/8/17. 6 | // Copyright © 2017 Saturday Apps. All rights reserved. 7 | // 8 | 9 | import AVFoundation 10 | import Vision 11 | import UIKit 12 | 13 | class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 14 | 15 | @IBOutlet private weak var cameraView: UIView? 16 | @IBOutlet private weak var highlightView: UIView? { 17 | didSet { 18 | self.highlightView?.layer.borderColor = UIColor.red.cgColor 19 | self.highlightView?.layer.borderWidth = 4 20 | self.highlightView?.backgroundColor = .clear 21 | } 22 | } 23 | 24 | private let visionSequenceHandler = VNSequenceRequestHandler() 25 | private lazy var cameraLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession) 26 | private lazy var captureSession: AVCaptureSession = { 27 | let session = AVCaptureSession() 28 | session.sessionPreset = AVCaptureSession.Preset.photo 29 | guard 30 | let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back), 31 | let input = try? AVCaptureDeviceInput(device: backCamera) 32 | else { return session } 33 | session.addInput(input) 34 | return session 35 | }() 36 | 37 | override func viewDidLoad() { 38 | super.viewDidLoad() 39 | 40 | // hide the red focus area on load 41 | self.highlightView?.frame = .zero 42 | 43 | // make the camera appear on the screen 44 | self.cameraView?.layer.addSublayer(self.cameraLayer) 45 | 46 | // register to receive buffers from the camera 47 | let videoOutput = AVCaptureVideoDataOutput() 48 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "MyQueue")) 49 | self.captureSession.addOutput(videoOutput) 50 | 51 | // begin the session 52 | self.captureSession.startRunning() 53 | } 54 | 55 | override func viewDidLayoutSubviews() { 56 | super.viewDidLayoutSubviews() 57 | 58 | // make sure the layer is the correct size 59 | self.cameraLayer.frame = self.cameraView?.bounds ?? .zero 60 | } 61 | 62 | private var lastObservation: VNDetectedObjectObservation? 63 | 64 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 65 | guard 66 | // make sure the pixel buffer can be converted 67 | let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), 68 | // make sure that there is a previous observation we can feed into the request 69 | let lastObservation = self.lastObservation 70 | else { return } 71 | 72 | // create the request 73 | let request = VNTrackObjectRequest(detectedObjectObservation: lastObservation, completionHandler: self.handleVisionRequestUpdate) 74 | // set the accuracy to high 75 | // this is slower, but it works a lot better 76 | request.trackingLevel = .accurate 77 | 78 | // perform the request 79 | do { 80 | try self.visionSequenceHandler.perform([request], on: pixelBuffer) 81 | } catch { 82 | print("Throws: \(error)") 83 | } 84 | } 85 | 86 | private func handleVisionRequestUpdate(_ request: VNRequest, error: Error?) { 87 | // Dispatch to the main queue because we are touching non-atomic, non-thread safe properties of the view controller 88 | DispatchQueue.main.async { 89 | // make sure we have an actual result 90 | guard let newObservation = request.results?.first as? VNDetectedObjectObservation else { return } 91 | 92 | // prepare for next loop 93 | self.lastObservation = newObservation 94 | 95 | // check the confidence level before updating the UI 96 | guard newObservation.confidence >= 0.3 else { 97 | // hide the rectangle when we lose accuracy so the user knows something is wrong 98 | self.highlightView?.frame = .zero 99 | return 100 | } 101 | 102 | // calculate view rect 103 | var transformedRect = newObservation.boundingBox 104 | transformedRect.origin.y = 1 - transformedRect.origin.y 105 | let convertedRect = self.cameraLayer.layerRectConverted(fromMetadataOutputRect: transformedRect) 106 | 107 | // move the highlight view 108 | self.highlightView?.frame = convertedRect 109 | } 110 | } 111 | 112 | @IBAction private func userTapped(_ sender: UITapGestureRecognizer) { 113 | // get the center of the tap 114 | self.highlightView?.frame.size = CGSize(width: 120, height: 120) 115 | self.highlightView?.center = sender.location(in: self.view) 116 | 117 | // convert the rect for the initial observation 118 | let originalRect = self.highlightView?.frame ?? .zero 119 | var convertedRect = self.cameraLayer.metadataOutputRectConverted(fromLayerRect: originalRect) 120 | convertedRect.origin.y = 1 - convertedRect.origin.y 121 | 122 | // set the observation 123 | let newObservation = VNDetectedObjectObservation(boundingBox: convertedRect) 124 | self.lastObservation = newObservation 125 | } 126 | 127 | @IBAction private func resetTapped(_ sender: UIBarButtonItem) { 128 | self.lastObservation = nil 129 | self.highlightView?.frame = .zero 130 | } 131 | } 132 | 133 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Blog: Getting Started with Vision [![Find me on Twitter](https://img.shields.io/twitter/url/http/shields.io.svg?style=social)](https://twitter.com/jeffburg) 2 | ![Xcode 9 beta 1](https://img.shields.io/badge/Xcode-9%20beta%201-0080FF.svg) ![Swift 4](https://img.shields.io/badge/Swift-4-yellow.svg) ![iOS 11 beta 1](https://img.shields.io/badge/iOS-11%20beta%201-green.svg) 3 | 4 | ![Tracking Screenshot](demo_screenshot.png) [![Tracking Video](demo_video.gif)](demo_video.mp4) 5 | 6 | ## What is Vision? 7 | Vision is a new framework from Apple for iOS 11 and other Apple platforms. Vision is a part of the [Core ML](https://developer.apple.com/machine-learning/) framework. CoreML is the new framework that makes it really easy to take a machine learning model and run your data through it to get predictions. The Vision framework helps you feed machine learning models that expect images. Using the Vision framework, its really easy to process a live feed from the camera and extract information from each frame using both built in and external machine learning models. 8 | 9 | ## Built-in Features 10 | Vision has a number of built in features. Some of the things vision can do on still images, others on video, most on both. 11 | 12 | - Face Detection 13 | - Individual feature detection, such as nose, mouth, left eye, etc 14 | - Horizon detection 15 | - Rectangle detection 16 | - Character detection 17 | - Object tracking 18 |   - Object recognition 19 | - via external machine learning models. 20 | 21 | ## Getting Started with Object Tracking 22 | 23 | We're going to build a simple project where the user taps on an object on the screen and then the Vision system is going to track that object. As the user moves the phone, we would expet the object to be tracked in the video frame. Also, if the object moves on its own, it should be tracked by the Vision framework. 24 | 25 | Note that the code below does not represent best practices in terms of reducing the complexity of your view controllers. Its just an easy place to get started. Ideally, you would abstract most of this code into a custom object that the view controller uses. 26 | 27 | Also note, this tutorial assumes you are comfortable with the basics of storyboards to hook up basic views and gesture recgonizers. 28 | 29 | ### Project Overview 30 | 1. Start AVCaptureSession 31 | 1. Configure AVCaptureSession 32 | 1. Configure the vision system. 33 | 1. Seed the vision system with an 'Observation' when the user taps the screen. 34 | 1. Update the rectangle on the screen as the vision system returns new 'Observations.' 35 | 36 | ### 1. Start the AVCaptureSession 37 | 38 | This is not new code so I'm not going to go into detail. We're going to add some lazy properties to our view controller. They just give us access to the `AVCaptureSession` as well as the `AVCaptureVideoPreviewLayer` so the user can see the video feed on the screen. The IBOutlet here is just a view that is the same width and height of the view controller's view. I did this so it was easy to put the Highlight view on top of the video output. 39 | 40 | At this point, you should be able to launch the app and see camera output on the screen. 41 | 42 | ``` swift 43 | import AVFoundation 44 | import Vision 45 | import UIKit 46 | 47 | class ViewController: UIViewController { 48 | 49 | @IBOutlet private weak var cameraView: UIView? 50 | 51 | private lazy var cameraLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession) 52 | private lazy var captureSession: AVCaptureSession = { 53 | let session = AVCaptureSession() 54 | session.sessionPreset = AVCaptureSession.Preset.photo 55 | guard 56 | let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back), 57 | let input = try? AVCaptureDeviceInput(device: backCamera) 58 | else { return session } 59 | session.addInput(input) 60 | return session 61 | }() 62 | 63 | override func viewDidLoad() { 64 | super.viewDidLoad() 65 | 66 | // make the camera appear on the screen 67 | self.cameraView?.layer.addSublayer(self.cameraLayer) 68 | 69 | // begin the session 70 | self.captureSession.startRunning() 71 | } 72 | 73 | override func viewDidLayoutSubviews() { 74 | super.viewDidLayoutSubviews() 75 | 76 | // make sure the layer is the correct size 77 | self.cameraLayer.frame = self.cameraView?.bounds ?? .zero 78 | } 79 | } 80 | ``` 81 | 82 | ### 2. Configure AVCaptureSession 83 | 84 | In order to get video buffers from the AVCaptureSession into the vision system we need to tell the `AVCaptureSession` that we want to be a delegate of its video feed. In `viewDidLoad:` add the following code. 85 | 86 | ``` swift 87 | override func viewDidLoad() { 88 | // ... 89 | // make the camera appear on the screen... 90 | 91 | // register to receive buffers from the camera 92 | let videoOutput = AVCaptureVideoDataOutput() 93 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "MyQueue")) 94 | self.captureSession.addOutput(videoOutput) 95 | 96 | // begin the session... 97 | } 98 | ``` 99 | 100 | In order to receive the frames, we need to conform to the `AVCaptureVideoDataOutputSampleBufferDelegate` and implement the appropriate method. 101 | 102 | Add a print statement into the method below and run the app. You should see the console grow rapidly. The AVCaptureSession returns data often. 103 | 104 | ``` swift 105 | class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 106 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { } 107 | } 108 | ``` 109 | 110 | ### 3. Configure the Vision System 111 | 112 | In this project, we're streaming video data into the vision system. This means that the Vision handler object is a long lived object in our view controller. So we're going to add another property for the `VNSequenceRequestHandler`. 113 | 114 | ``` swift 115 | private let visionSequenceHandler = VNSequenceRequestHandler() 116 | ``` 117 | 118 | The vision sequence system works in a loop. You provide a "seed" observation, then feed that into the vision system. The vision system then outputs a new observation. That new observation then needs to be fed back into the vision system when the camera has new data. In order to accomplish this, we need another property on our view controller. This property will store the seed observation. It will also store the observations returned by the vision system. Remember that the AVCaptureSession is creating the loop for us by calling the delegate method over and over. 119 | 120 | ``` swift 121 | private var lastObservation: VNDetectedObjectObservation? 122 | ``` 123 | 124 | In the `captureOutput:didOutput:from:` method, we need to do a few things: 125 | 126 | 1. We need to get the `CVPixelBuffer` out of the `CMSampleBuffer` that is passed in. 127 | 1. We need to make sure we have an observation saved in the property we created in the above step. 128 | 1. Then we need to create and configure a `VNTrackObjectRequest`. 129 | 1. Lastly, we need to ask the `VNSequenceRequestHandler` to perform the request. 130 | 131 | Note that the request takes a completion handler and we have passed in `nil` for that. Thats OK. we're going to write the completion handler later in the tutorial. 132 | 133 | ``` swift 134 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 135 | guard 136 | // get the CVPixelBuffer out of the CMSampleBuffer 137 | let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), 138 | // make sure that there is a previous observation we can feed into the request 139 | let lastObservation = self.lastObservation 140 | else { return } 141 | 142 | // create the request 143 | let request = VNTrackObjectRequest(detectedObjectObservation: lastObservation, completionHandler: nil) 144 | // set the accuracy to high 145 | // this is slower, but it works a lot better 146 | request.trackingLevel = .accurate 147 | 148 | // perform the request 149 | do { 150 | try self.visionSequenceHandler.perform([request], on: pixelBuffer) 151 | } catch { 152 | print("Throws: \(error)") 153 | } 154 | } 155 | ``` 156 | 157 | ### 4. Seed the vision system with an 'Observation' when the user taps the screen. 158 | 159 | When the user taps the screen, we want to find out where the user tapped, then pass that into the vision system as the seed observation. We also want to draw a red box around it so the user can see what we are tracking. In order to do this. Add an `@IBOutlet` property to your view controller for the Highlight View. Add a UIView into the Storyboard and wire it up to the outlet. Don't configure any autolayout on it because we will be managing its frame directly. 160 | 161 | ``` swift 162 | @IBOutlet private weak var highlightView: UIView? { 163 | didSet { 164 | self.highlightView?.layer.borderColor = UIColor.red.cgColor 165 | self.highlightView?.layer.borderWidth = 4 166 | self.highlightView?.backgroundColor = .clear 167 | } 168 | } 169 | ``` 170 | 171 | In order to the receive the tap, we're going to use a `UITapGestureRecognizer` on the main view of the view controller. Once thats in the storyboard, wire it up to an `@IBAction` in the view controller. Below is the code to receive that tap from the gesture recognizer and then draw a red box around it using the Highlight view. 172 | 173 | Note that the size I picked is arbitrary. Also note that the Vision system is sensitive to the width and height of the rectangle we pass in. The closer the rectangle surrounds the object, the better the Vision system will be able to track it. 174 | 175 | ``` swift 176 | @IBAction private func userTapped(_ sender: UITapGestureRecognizer) { 177 | // get the center of the tap 178 | self.highlightView?.frame.size = CGSize(width: 120, height: 120) 179 | self.highlightView?.center = sender.location(in: self.view) 180 | } 181 | ``` 182 | 183 | Unfortunately, we can't pass this CGRect directly into the Vision system. There are 3 different coordinate systems we have to convert between. 184 | 1. UIKit coordinate space 185 | - Origin in the top left corner 186 | - Max height and width values of the screen size in points (320 x 568 on iPhone SE) 187 | 1. AVFoundation coordinate space 188 | - Origin in the top left 189 | - Max height and width of 1 190 | 1. Vision coordinate space 191 | - Origin in the bottom left 192 | - Max height and width of 1 193 | 194 | Luckily, the `AVCaptureVideoPreviewLayer` has helper methods that convert between UIKit coordinates and AVFoundation coordinates. Once we have AVFoundation values, we can invert the Y origin to convert to Vision coordinates. 195 | 196 | ``` swift 197 | @IBAction private func userTapped(_ sender: UITapGestureRecognizer) { 198 | // get the center of the tap 199 | // .. 200 | 201 | // convert the rect for the initial observation 202 | let originalRect = self.highlightView?.frame ?? .zero 203 | var convertedRect = self.cameraLayer.metadataOutputRectConverted(fromLayerRect: originalRect) 204 | convertedRect.origin.y = 1 - convertedRect.origin.y 205 | } 206 | ``` 207 | 208 | Once we have the correct CGRect to pass to the vision system, we can create our seed observation and store it in the property we created earlier. 209 | 210 | ``` swift 211 | @IBAction private func userTapped(_ sender: UITapGestureRecognizer) { 212 | // .. 213 | // convert the rect for the initial observation 214 | // .. 215 | 216 | // set the observation 217 | let newObservation = VNDetectedObjectObservation(boundingBox: convertedRect) 218 | self.lastObservation = newObservation 219 | } 220 | ``` 221 | 222 | Now if you run the app and tap the screen, you should see a red box appear around where you touched. Also, unknown to you, the vision system is running and it is performing object tracking. However, we never added the completion handler to our request. So the results of the object tracking are not doing anything. 223 | 224 | ### 5. Update the rectangle on the screen as the vision system returns new 'Observations.' 225 | 226 | We're going to add a new method to the view controller. We'll use this method as the completion handler for our object tracking request. 227 | 228 | ``` swift 229 | private func handleVisionRequestUpdate(_ request: VNRequest, error: Error?) { 230 | // Dispatch to the main queue because we are touching non-atomic, non-thread safe properties of the view controller 231 | DispatchQueue.main.async { 232 | } 233 | } 234 | ``` 235 | 236 | Make sure to adjust the request object to take this method as a completion handler 237 | 238 | ``` swift 239 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 240 | // .. 241 | // create the request 242 | let request = VNTrackObjectRequest(detectedObjectObservation: lastObservation, completionHandler: self.handleVisionRequestUpdate) 243 | //.. 244 | } 245 | ``` 246 | 247 | In the completion handler, there are 3 things we need to do: 248 | 249 | 1. Check that the observation object is the correct kind of observation. 250 | 1. Replace the `lastObservation` property with the new observation. 251 | - That way its ready next time the camera has a new frame for us. 252 | 1. Update the UI to draw the Highlight view around the new observation. This way the user can see the tracking as it happens. 253 | 254 | Below, is the guard statement that allows us to check we have the correct observation type and store it in our property for next time. 255 | 256 | ``` swift 257 | private func handleVisionRequestUpdate(_ request: VNRequest, error: Error?) { 258 | // Dispatch to the main queue because we are touching non-atomic, non-thread safe properties of the view controller 259 | DispatchQueue.main.async { 260 | // make sure we have an actual result 261 | guard let newObservation = request.results?.first as? VNDetectedObjectObservation else { return } 262 | 263 | // prepare for next loop 264 | self.lastObservation = newObservation 265 | } 266 | } 267 | ``` 268 | 269 | Now we need to take the `boundingBox` of the observation and convert it from Vision space to UIKit space. To do this, we do the opposite of what we did in the tap gesture `@IBAction`. We take the original, flip the Y coordinate to convert to AVFoundation coordinates. Then we use the `AVCaptureVideoPreviewLayer` to convert from AVFoundation coordinates to UIKit coordinates. Then we set the frame on the Highlight view. 270 | 271 | ``` swift 272 | private func handleVisionRequestUpdate(_ request: VNRequest, error: Error?) { 273 | // Dispatch to the main queue because we are touching non-atomic, non-thread safe properties of the view controller 274 | DispatchQueue.main.async { 275 | // .. 276 | // prepare for next loop 277 | // .. 278 | 279 | // calculate view rect 280 | var transformedRect = newObservation.boundingBox 281 | transformedRect.origin.y = 1 - transformedRect.origin.y 282 | let convertedRect = self.cameraLayer.layerRectConverted(fromMetadataOutputRect: transformedRect) 283 | 284 | // move the highlight view 285 | self.highlightView?.frame = convertedRect 286 | } 287 | } 288 | ``` 289 | 290 | Now when you run the app, you can tap on something and you should be able to slowly pan the phone and see the red Highlight view stay on that object. To add a tiny amount of polish, we can check the `confidence` property on the observation. This property tells us how confident the model is about whether its the correct object being tracked or not. Confidence is a value between 0 and 1. In my testing, 0.3 seemed to be about the cut off where things were getting bad. Here is the final completion handler: 291 | 292 | ``` swift 293 | private func handleVisionRequestUpdate(_ request: VNRequest, error: Error?) { 294 | // Dispatch to the main queue because we are touching non-atomic, non-thread safe properties of the view controller 295 | DispatchQueue.main.async { 296 | // make sure we have an actual result 297 | guard let newObservation = request.results?.first as? VNDetectedObjectObservation else { return } 298 | 299 | // prepare for next loop 300 | self.lastObservation = newObservation 301 | 302 | // check the confidence level before updating the UI 303 | guard newObservation.confidence >= 0.3 else { 304 | // hide the rectangle when we lose accuracy so the user knows something is wrong 305 | self.highlightView?.frame = .zero 306 | return 307 | } 308 | 309 | // calculate view rect 310 | var transformedRect = newObservation.boundingBox 311 | transformedRect.origin.y = 1 - transformedRect.origin.y 312 | let convertedRect = self.cameraLayer.layerRectConverted(fromMetadataOutputRect: transformedRect) 313 | 314 | // move the highlight view 315 | self.highlightView?.frame = convertedRect 316 | } 317 | } 318 | ``` 319 | 320 | ## Summary [![Find me on Twitter](https://img.shields.io/twitter/url/http/shields.io.svg?style=social)](https://twitter.com/jeffburg) 321 | 322 | Now you have an object tracker working with a live video feed. Note that the techniques we used here work with almost all the Vision framework request types. You use the AVCaptureSession delegate callbacks to feed new `CVPixelBuffer`s and new requests to the `VNSequenceRequestHandler`. 323 | 324 | Also note that the request handler can perform many requests simultaneously. The request handler takes an Array of request objects. So you can make several of them that all do different things and pass them into the request handler. Two primary use cases come to mind for why you would want to do this. 325 | 326 | 1. Use the `VNDetectFaceRectanglesRequest` object to detect faces. Once you find a face, make a new `VNTrackObjectRequest` for each face so that you can keep track of which face is which as they move around the camera. 327 | 2. Use the `VNTrackObjectRequest` to track an object the user is interested in (like in this tutorial) then create a `VNCoreMLRequest` to use a machine learning model to attempt to identify what is in the boundingBox of the `VNDetectedObjectObservation`. Note that all 'VNRequest' objects and their subclasses have a `regionOfInterest` property. Set this to tell the handler which part of the `CVPixelBuffer` it should look at. This is how you can easy go from the `boundingBox` of an observation, to detecting what is inside that part of the image. 328 | 329 | 330 | -------------------------------------------------------------------------------- /demo_screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jeffreybergier/Blog-Getting-Started-with-Vision/bb68c7d7d3def3413fb773d4b3208b9a6072836f/demo_screenshot.png -------------------------------------------------------------------------------- /demo_video.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jeffreybergier/Blog-Getting-Started-with-Vision/bb68c7d7d3def3413fb773d4b3208b9a6072836f/demo_video.gif -------------------------------------------------------------------------------- /demo_video.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jeffreybergier/Blog-Getting-Started-with-Vision/bb68c7d7d3def3413fb773d4b3208b9a6072836f/demo_video.mp4 --------------------------------------------------------------------------------