├── .gitignore ├── HelloVision.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ └── contents.xcworkspacedata └── xcuserdata │ └── peng.xcuserdatad │ └── xcschemes │ └── xcschememanagement.plist ├── HelloVision ├── AppDelegate.swift ├── Assets.xcassets │ └── AppIcon.appiconset │ │ └── Contents.json ├── BarcodesDetectViewController.swift ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── FaceDetectionViewController.swift ├── FaceLandMarksViewController.swift ├── ImageChooser.swift ├── Info.plist ├── MobileNet.mlmodel ├── ObjectTrackingViewController.swift ├── QRCode.png ├── TextDetectionViewController.swift ├── ViewController.swift ├── face.jpg ├── faces.jpg ├── image.jpg └── text.jpg └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # OS X 2 | .DS_Store 3 | 4 | # Xcode 5 | build/ 6 | *.pbxuser 7 | !default.pbxuser 8 | *.mode1v3 9 | !default.mode1v3 10 | *.mode2v3 11 | !default.mode2v3 12 | *.perspectivev3 13 | !default.perspectivev3 14 | xcuserdata 15 | *.xccheckout 16 | profile 17 | *.moved-aside 18 | DerivedData 19 | *.hmap 20 | *.xccheckout 21 | # AppCode 22 | .idea/ 23 | 24 | Carthage 25 | 26 | Demo/Pods 27 | .ruby-version 28 | .ruby-gemset 29 | # Swift Package Manager 30 | .build 31 | Packages 32 | -------------------------------------------------------------------------------- /HelloVision.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 24FBEE6D1EFECC9200D6020E /* ImageChooser.swift in Sources */ = {isa = PBXBuildFile; fileRef = 24FBEE6C1EFECC9200D6020E /* ImageChooser.swift */; }; 11 | B8597FB51EE94B7800B71E00 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8597FB41EE94B7800B71E00 /* AppDelegate.swift */; }; 12 | B8597FB71EE94B7800B71E00 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8597FB61EE94B7800B71E00 /* ViewController.swift */; }; 13 | B8597FBA1EE94B7800B71E00 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = B8597FB81EE94B7800B71E00 /* Main.storyboard */; }; 14 | B8597FBC1EE94B7800B71E00 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = B8597FBB1EE94B7800B71E00 /* Assets.xcassets */; }; 15 | B8597FBF1EE94B7800B71E00 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = B8597FBD1EE94B7800B71E00 /* LaunchScreen.storyboard */; }; 16 | B8597FC71EE9578E00B71E00 /* MobileNet.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = B8597FC61EE9578E00B71E00 /* MobileNet.mlmodel */; }; 17 | B8597FDC1EE9639600B71E00 /* image.jpg in Resources */ = {isa = PBXBuildFile; fileRef = B8597FDB1EE9639000B71E00 /* image.jpg */; }; 18 | B8597FE01EE970F100B71E00 /* FaceDetectionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8597FDF1EE970F100B71E00 /* FaceDetectionViewController.swift */; }; 19 | B8BC8B7D1EEA6A1A0029579B /* faces.jpg in Resources */ = {isa = PBXBuildFile; fileRef = B8BC8B7C1EEA6A120029579B /* faces.jpg */; }; 20 | B8BC8B831EEA8AAD0029579B /* FaceLandMarksViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BC8B821EEA8AAD0029579B /* FaceLandMarksViewController.swift */; }; 21 | B8F62C5E1EEBC69E008BE757 /* face.jpg in Resources */ = {isa = PBXBuildFile; fileRef = B8F62C5D1EEBC693008BE757 /* face.jpg */; }; 22 | B8F62C601EEBEC21008BE757 /* BarcodesDetectViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F62C5F1EEBEC21008BE757 /* BarcodesDetectViewController.swift */; }; 23 | B8F62C621EEBEE7E008BE757 /* QRCode.png in Resources */ = {isa = PBXBuildFile; fileRef = B8F62C611EEBEE74008BE757 /* QRCode.png */; }; 24 | B8F62C641EECE672008BE757 /* TextDetectionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F62C631EECE672008BE757 /* TextDetectionViewController.swift */; }; 25 | B8F62C6C1EECF0DA008BE757 /* text.jpg in Resources */ = {isa = PBXBuildFile; fileRef = B8F62C6B1EECF0D2008BE757 /* text.jpg */; }; 26 | B8F62C6E1EEE6A55008BE757 /* ObjectTrackingViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F62C6D1EEE6A55008BE757 /* ObjectTrackingViewController.swift */; }; 27 | /* End PBXBuildFile section */ 28 | 29 | /* Begin PBXFileReference section */ 30 | 24FBEE6C1EFECC9200D6020E /* ImageChooser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageChooser.swift; sourceTree = ""; }; 31 | B8597FB11EE94B7800B71E00 /* HelloVision.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = HelloVision.app; sourceTree = BUILT_PRODUCTS_DIR; }; 32 | B8597FB41EE94B7800B71E00 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 33 | B8597FB61EE94B7800B71E00 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 34 | B8597FB91EE94B7800B71E00 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 35 | B8597FBB1EE94B7800B71E00 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 36 | B8597FBE1EE94B7800B71E00 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 37 | B8597FC01EE94B7800B71E00 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 38 | B8597FC61EE9578E00B71E00 /* MobileNet.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; name = MobileNet.mlmodel; path = ../../../../Desktop/MobileNet.mlmodel; sourceTree = ""; }; 39 | B8597FDB1EE9639000B71E00 /* image.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = image.jpg; sourceTree = ""; }; 40 | B8597FDF1EE970F100B71E00 /* FaceDetectionViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceDetectionViewController.swift; sourceTree = ""; }; 41 | B8BC8B7C1EEA6A120029579B /* faces.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = faces.jpg; sourceTree = ""; }; 42 | B8BC8B821EEA8AAD0029579B /* FaceLandMarksViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceLandMarksViewController.swift; sourceTree = ""; }; 43 | B8F62C5D1EEBC693008BE757 /* face.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = face.jpg; sourceTree = ""; }; 44 | B8F62C5F1EEBEC21008BE757 /* BarcodesDetectViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BarcodesDetectViewController.swift; sourceTree = ""; }; 45 | B8F62C611EEBEE74008BE757 /* QRCode.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = QRCode.png; sourceTree = ""; }; 46 | B8F62C631EECE672008BE757 /* TextDetectionViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TextDetectionViewController.swift; sourceTree = ""; }; 47 | B8F62C6B1EECF0D2008BE757 /* text.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = text.jpg; sourceTree = ""; }; 48 | B8F62C6D1EEE6A55008BE757 /* ObjectTrackingViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ObjectTrackingViewController.swift; sourceTree = ""; }; 49 | /* End PBXFileReference section */ 50 | 51 | /* Begin PBXFrameworksBuildPhase section */ 52 | B8597FAE1EE94B7800B71E00 /* Frameworks */ = { 53 | isa = PBXFrameworksBuildPhase; 54 | buildActionMask = 2147483647; 55 | files = ( 56 | ); 57 | runOnlyForDeploymentPostprocessing = 0; 58 | }; 59 | /* End PBXFrameworksBuildPhase section */ 60 | 61 | /* Begin PBXGroup section */ 62 | B8597FA81EE94B7800B71E00 = { 63 | isa = PBXGroup; 64 | children = ( 65 | B8597FB31EE94B7800B71E00 /* HelloVision */, 66 | B8597FB21EE94B7800B71E00 /* Products */, 67 | ); 68 | sourceTree = ""; 69 | }; 70 | B8597FB21EE94B7800B71E00 /* Products */ = { 71 | isa = PBXGroup; 72 | children = ( 73 | B8597FB11EE94B7800B71E00 /* HelloVision.app */, 74 | ); 75 | name = Products; 76 | sourceTree = ""; 77 | }; 78 | B8597FB31EE94B7800B71E00 /* HelloVision */ = { 79 | isa = PBXGroup; 80 | children = ( 81 | B8597FB41EE94B7800B71E00 /* AppDelegate.swift */, 82 | B8597FB61EE94B7800B71E00 /* ViewController.swift */, 83 | B8597FDF1EE970F100B71E00 /* FaceDetectionViewController.swift */, 84 | B8BC8B821EEA8AAD0029579B /* FaceLandMarksViewController.swift */, 85 | B8F62C5F1EEBEC21008BE757 /* BarcodesDetectViewController.swift */, 86 | B8F62C631EECE672008BE757 /* TextDetectionViewController.swift */, 87 | B8F62C6D1EEE6A55008BE757 /* ObjectTrackingViewController.swift */, 88 | B8597FC61EE9578E00B71E00 /* MobileNet.mlmodel */, 89 | 24FBEE6C1EFECC9200D6020E /* ImageChooser.swift */, 90 | B8597FDB1EE9639000B71E00 /* image.jpg */, 91 | B8BC8B7C1EEA6A120029579B /* faces.jpg */, 92 | B8F62C5D1EEBC693008BE757 /* face.jpg */, 93 | B8F62C611EEBEE74008BE757 /* QRCode.png */, 94 | B8F62C6B1EECF0D2008BE757 /* text.jpg */, 95 | B8597FB81EE94B7800B71E00 /* Main.storyboard */, 96 | B8597FBB1EE94B7800B71E00 /* Assets.xcassets */, 97 | B8597FBD1EE94B7800B71E00 /* LaunchScreen.storyboard */, 98 | B8597FC01EE94B7800B71E00 /* Info.plist */, 99 | ); 100 | path = HelloVision; 101 | sourceTree = ""; 102 | }; 103 | /* End PBXGroup section */ 104 | 105 | /* Begin PBXNativeTarget section */ 106 | B8597FB01EE94B7800B71E00 /* HelloVision */ = { 107 | isa = PBXNativeTarget; 108 | buildConfigurationList = B8597FC31EE94B7800B71E00 /* Build configuration list for PBXNativeTarget "HelloVision" */; 109 | buildPhases = ( 110 | B8597FAD1EE94B7800B71E00 /* Sources */, 111 | B8597FAE1EE94B7800B71E00 /* Frameworks */, 112 | B8597FAF1EE94B7800B71E00 /* Resources */, 113 | ); 114 | buildRules = ( 115 | ); 116 | dependencies = ( 117 | ); 118 | name = HelloVision; 119 | productName = HelloVision; 120 | productReference = B8597FB11EE94B7800B71E00 /* HelloVision.app */; 121 | productType = "com.apple.product-type.application"; 122 | }; 123 | /* End PBXNativeTarget section */ 124 | 125 | /* Begin PBXProject section */ 126 | B8597FA91EE94B7800B71E00 /* Project object */ = { 127 | isa = PBXProject; 128 | attributes = { 129 | LastSwiftUpdateCheck = 0900; 130 | LastUpgradeCheck = 0900; 131 | ORGANIZATIONNAME = "Peng Guo"; 132 | TargetAttributes = { 133 | B8597FB01EE94B7800B71E00 = { 134 | CreatedOnToolsVersion = 9.0; 135 | ProvisioningStyle = Automatic; 136 | }; 137 | }; 138 | }; 139 | buildConfigurationList = B8597FAC1EE94B7800B71E00 /* Build configuration list for PBXProject "HelloVision" */; 140 | compatibilityVersion = "Xcode 8.0"; 141 | developmentRegion = en; 142 | hasScannedForEncodings = 0; 143 | knownRegions = ( 144 | en, 145 | Base, 146 | ); 147 | mainGroup = B8597FA81EE94B7800B71E00; 148 | productRefGroup = B8597FB21EE94B7800B71E00 /* Products */; 149 | projectDirPath = ""; 150 | projectRoot = ""; 151 | targets = ( 152 | B8597FB01EE94B7800B71E00 /* HelloVision */, 153 | ); 154 | }; 155 | /* End PBXProject section */ 156 | 157 | /* Begin PBXResourcesBuildPhase section */ 158 | B8597FAF1EE94B7800B71E00 /* Resources */ = { 159 | isa = PBXResourcesBuildPhase; 160 | buildActionMask = 2147483647; 161 | files = ( 162 | B8F62C6C1EECF0DA008BE757 /* text.jpg in Resources */, 163 | B8F62C621EEBEE7E008BE757 /* QRCode.png in Resources */, 164 | B8F62C5E1EEBC69E008BE757 /* face.jpg in Resources */, 165 | B8BC8B7D1EEA6A1A0029579B /* faces.jpg in Resources */, 166 | B8597FDC1EE9639600B71E00 /* image.jpg in Resources */, 167 | B8597FBF1EE94B7800B71E00 /* LaunchScreen.storyboard in Resources */, 168 | B8597FBC1EE94B7800B71E00 /* Assets.xcassets in Resources */, 169 | B8597FBA1EE94B7800B71E00 /* Main.storyboard in Resources */, 170 | ); 171 | runOnlyForDeploymentPostprocessing = 0; 172 | }; 173 | /* End PBXResourcesBuildPhase section */ 174 | 175 | /* Begin PBXSourcesBuildPhase section */ 176 | B8597FAD1EE94B7800B71E00 /* Sources */ = { 177 | isa = PBXSourcesBuildPhase; 178 | buildActionMask = 2147483647; 179 | files = ( 180 | B8F62C601EEBEC21008BE757 /* BarcodesDetectViewController.swift in Sources */, 181 | B8597FB71EE94B7800B71E00 /* ViewController.swift in Sources */, 182 | B8597FB51EE94B7800B71E00 /* AppDelegate.swift in Sources */, 183 | B8BC8B831EEA8AAD0029579B /* FaceLandMarksViewController.swift in Sources */, 184 | B8597FC71EE9578E00B71E00 /* MobileNet.mlmodel in Sources */, 185 | B8597FE01EE970F100B71E00 /* FaceDetectionViewController.swift in Sources */, 186 | B8F62C6E1EEE6A55008BE757 /* ObjectTrackingViewController.swift in Sources */, 187 | B8F62C641EECE672008BE757 /* TextDetectionViewController.swift in Sources */, 188 | 24FBEE6D1EFECC9200D6020E /* ImageChooser.swift in Sources */, 189 | ); 190 | runOnlyForDeploymentPostprocessing = 0; 191 | }; 192 | /* End PBXSourcesBuildPhase section */ 193 | 194 | /* Begin PBXVariantGroup section */ 195 | B8597FB81EE94B7800B71E00 /* Main.storyboard */ = { 196 | isa = PBXVariantGroup; 197 | children = ( 198 | B8597FB91EE94B7800B71E00 /* Base */, 199 | ); 200 | name = Main.storyboard; 201 | sourceTree = ""; 202 | }; 203 | B8597FBD1EE94B7800B71E00 /* LaunchScreen.storyboard */ = { 204 | isa = PBXVariantGroup; 205 | children = ( 206 | B8597FBE1EE94B7800B71E00 /* Base */, 207 | ); 208 | name = LaunchScreen.storyboard; 209 | sourceTree = ""; 210 | }; 211 | /* End PBXVariantGroup section */ 212 | 213 | /* Begin XCBuildConfiguration section */ 214 | B8597FC11EE94B7800B71E00 /* Debug */ = { 215 | isa = XCBuildConfiguration; 216 | buildSettings = { 217 | ALWAYS_SEARCH_USER_PATHS = NO; 218 | CLANG_ANALYZER_NONNULL = YES; 219 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 220 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 221 | CLANG_CXX_LIBRARY = "libc++"; 222 | CLANG_ENABLE_MODULES = YES; 223 | CLANG_ENABLE_OBJC_ARC = YES; 224 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 225 | CLANG_WARN_BOOL_CONVERSION = YES; 226 | CLANG_WARN_COMMA = YES; 227 | CLANG_WARN_CONSTANT_CONVERSION = YES; 228 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 229 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 230 | CLANG_WARN_EMPTY_BODY = YES; 231 | CLANG_WARN_ENUM_CONVERSION = YES; 232 | CLANG_WARN_INFINITE_RECURSION = YES; 233 | CLANG_WARN_INT_CONVERSION = YES; 234 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 235 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 236 | CLANG_WARN_STRICT_PROTOTYPES = YES; 237 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 238 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 239 | CLANG_WARN_UNREACHABLE_CODE = YES; 240 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 241 | CODE_SIGN_IDENTITY = "iPhone Developer"; 242 | COPY_PHASE_STRIP = NO; 243 | DEBUG_INFORMATION_FORMAT = dwarf; 244 | ENABLE_STRICT_OBJC_MSGSEND = YES; 245 | ENABLE_TESTABILITY = YES; 246 | GCC_C_LANGUAGE_STANDARD = gnu11; 247 | GCC_DYNAMIC_NO_PIC = NO; 248 | GCC_NO_COMMON_BLOCKS = YES; 249 | GCC_OPTIMIZATION_LEVEL = 0; 250 | GCC_PREPROCESSOR_DEFINITIONS = ( 251 | "DEBUG=1", 252 | "$(inherited)", 253 | ); 254 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 255 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 256 | GCC_WARN_UNDECLARED_SELECTOR = YES; 257 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 258 | GCC_WARN_UNUSED_FUNCTION = YES; 259 | GCC_WARN_UNUSED_VARIABLE = YES; 260 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 261 | MTL_ENABLE_DEBUG_INFO = YES; 262 | ONLY_ACTIVE_ARCH = YES; 263 | SDKROOT = iphoneos; 264 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 265 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 266 | }; 267 | name = Debug; 268 | }; 269 | B8597FC21EE94B7800B71E00 /* Release */ = { 270 | isa = XCBuildConfiguration; 271 | buildSettings = { 272 | ALWAYS_SEARCH_USER_PATHS = NO; 273 | CLANG_ANALYZER_NONNULL = YES; 274 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 275 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 276 | CLANG_CXX_LIBRARY = "libc++"; 277 | CLANG_ENABLE_MODULES = YES; 278 | CLANG_ENABLE_OBJC_ARC = YES; 279 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 280 | CLANG_WARN_BOOL_CONVERSION = YES; 281 | CLANG_WARN_COMMA = YES; 282 | CLANG_WARN_CONSTANT_CONVERSION = YES; 283 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 284 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 285 | CLANG_WARN_EMPTY_BODY = YES; 286 | CLANG_WARN_ENUM_CONVERSION = YES; 287 | CLANG_WARN_INFINITE_RECURSION = YES; 288 | CLANG_WARN_INT_CONVERSION = YES; 289 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 290 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 291 | CLANG_WARN_STRICT_PROTOTYPES = YES; 292 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 293 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 294 | CLANG_WARN_UNREACHABLE_CODE = YES; 295 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 296 | CODE_SIGN_IDENTITY = "iPhone Developer"; 297 | COPY_PHASE_STRIP = NO; 298 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 299 | ENABLE_NS_ASSERTIONS = NO; 300 | ENABLE_STRICT_OBJC_MSGSEND = YES; 301 | GCC_C_LANGUAGE_STANDARD = gnu11; 302 | GCC_NO_COMMON_BLOCKS = YES; 303 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 304 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 305 | GCC_WARN_UNDECLARED_SELECTOR = YES; 306 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 307 | GCC_WARN_UNUSED_FUNCTION = YES; 308 | GCC_WARN_UNUSED_VARIABLE = YES; 309 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 310 | MTL_ENABLE_DEBUG_INFO = NO; 311 | SDKROOT = iphoneos; 312 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 313 | VALIDATE_PRODUCT = YES; 314 | }; 315 | name = Release; 316 | }; 317 | B8597FC41EE94B7800B71E00 /* Debug */ = { 318 | isa = XCBuildConfiguration; 319 | buildSettings = { 320 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 321 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 322 | CODE_SIGN_STYLE = Automatic; 323 | DEVELOPMENT_TEAM = 5K8UAH3FUM; 324 | INFOPLIST_FILE = HelloVision/Info.plist; 325 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 326 | PRODUCT_BUNDLE_IDENTIFIER = xyz.pengguo.HelloVision; 327 | PRODUCT_NAME = "$(TARGET_NAME)"; 328 | PROVISIONING_PROFILE = ""; 329 | PROVISIONING_PROFILE_SPECIFIER = ""; 330 | SWIFT_VERSION = 4.0; 331 | TARGETED_DEVICE_FAMILY = "1,2"; 332 | }; 333 | name = Debug; 334 | }; 335 | B8597FC51EE94B7800B71E00 /* Release */ = { 336 | isa = XCBuildConfiguration; 337 | buildSettings = { 338 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 339 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 340 | CODE_SIGN_STYLE = Automatic; 341 | DEVELOPMENT_TEAM = 5K8UAH3FUM; 342 | INFOPLIST_FILE = HelloVision/Info.plist; 343 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 344 | PRODUCT_BUNDLE_IDENTIFIER = xyz.pengguo.HelloVision; 345 | PRODUCT_NAME = "$(TARGET_NAME)"; 346 | PROVISIONING_PROFILE = ""; 347 | PROVISIONING_PROFILE_SPECIFIER = ""; 348 | SWIFT_VERSION = 4.0; 349 | TARGETED_DEVICE_FAMILY = "1,2"; 350 | }; 351 | name = Release; 352 | }; 353 | /* End XCBuildConfiguration section */ 354 | 355 | /* Begin XCConfigurationList section */ 356 | B8597FAC1EE94B7800B71E00 /* Build configuration list for PBXProject "HelloVision" */ = { 357 | isa = XCConfigurationList; 358 | buildConfigurations = ( 359 | B8597FC11EE94B7800B71E00 /* Debug */, 360 | B8597FC21EE94B7800B71E00 /* Release */, 361 | ); 362 | defaultConfigurationIsVisible = 0; 363 | defaultConfigurationName = Release; 364 | }; 365 | B8597FC31EE94B7800B71E00 /* Build configuration list for PBXNativeTarget "HelloVision" */ = { 366 | isa = XCConfigurationList; 367 | buildConfigurations = ( 368 | B8597FC41EE94B7800B71E00 /* Debug */, 369 | B8597FC51EE94B7800B71E00 /* Release */, 370 | ); 371 | defaultConfigurationIsVisible = 0; 372 | defaultConfigurationName = Release; 373 | }; 374 | /* End XCConfigurationList section */ 375 | }; 376 | rootObject = B8597FA91EE94B7800B71E00 /* Project object */; 377 | } 378 | -------------------------------------------------------------------------------- /HelloVision.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /HelloVision.xcodeproj/xcuserdata/peng.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | HelloVision.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /HelloVision/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // HelloVision 4 | // 5 | // Created by 郭朋 on 08/06/2017. 6 | // Copyright © 2017 Peng Guo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @UIApplicationMain 12 | class AppDelegate: UIResponder, UIApplicationDelegate { 13 | 14 | var window: UIWindow? 15 | 16 | 17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { 18 | // Override point for customization after application launch. 19 | return true 20 | } 21 | 22 | func applicationWillResignActive(_ application: UIApplication) { 23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 24 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 25 | } 26 | 27 | func applicationDidEnterBackground(_ application: UIApplication) { 28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 30 | } 31 | 32 | func applicationWillEnterForeground(_ application: UIApplication) { 33 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 34 | } 35 | 36 | func applicationDidBecomeActive(_ application: UIApplication) { 37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 38 | } 39 | 40 | func applicationWillTerminate(_ application: UIApplication) { 41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 42 | } 43 | 44 | 45 | } 46 | 47 | -------------------------------------------------------------------------------- /HelloVision/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | } 88 | ], 89 | "info" : { 90 | "version" : 1, 91 | "author" : "xcode" 92 | } 93 | } -------------------------------------------------------------------------------- /HelloVision/BarcodesDetectViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // BarcodesDetectViewController.swift 3 | // HelloVision 4 | // 5 | // Created by 郭朋 on 10/06/2017. 6 | // Copyright © 2017 Peng Guo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import Vision 11 | 12 | class BarcodesDetectViewController: UIViewController, ImageChooserDelegate { 13 | 14 | @IBOutlet weak var result: UILabel! 15 | @IBOutlet weak var imageView: UIImageView! 16 | 17 | var barcodeImage = UIImage(named: "QRCode.png")! 18 | let imageChooser = ImageChooser() 19 | 20 | override func viewDidLoad() { 21 | super.viewDidLoad() 22 | 23 | self.imageChooser.delegate = self 24 | 25 | self.analyze() 26 | } 27 | 28 | func analyze() { 29 | 30 | let barcodeRequest = VNDetectBarcodesRequest(completionHandler: {(request, error) in 31 | 32 | for result in request.results! { 33 | 34 | if let barcode = result as? VNBarcodeObservation { 35 | 36 | if let desc = barcode.barcodeDescriptor as? CIQRCodeDescriptor { 37 | print(desc.symbolVersion) 38 | let content = String(data: desc.errorCorrectedPayload, encoding: .utf8) 39 | 40 | let resultStr = """ 41 | Symbology: \(barcode.symbology.rawValue)\n 42 | Payload: \(String(describing: content))\n 43 | Error-Correction-Level:\(desc.errorCorrectionLevel)\n 44 | Symbol-Version: \(desc.symbolVersion)\n 45 | """ 46 | DispatchQueue.main.async { 47 | self.result.text = resultStr 48 | } 49 | } 50 | } 51 | } 52 | }) 53 | 54 | let handler = VNImageRequestHandler(cgImage: barcodeImage.cgImage!, options: [:]) 55 | 56 | guard let _ = try? handler.perform([barcodeRequest]) else { 57 | return print("error") 58 | } 59 | 60 | } 61 | 62 | @IBAction func chooseImage(_ sender: Any) { 63 | self.imageChooser.choose(viewController: self) 64 | } 65 | 66 | func imageChooser(picked: UIImage) { 67 | self.barcodeImage = picked 68 | self.imageView.image = picked 69 | self.analyze() 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /HelloVision/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /HelloVision/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 294 | 295 | 296 | 297 | 298 | 299 | 300 | 301 | 302 | 303 | 304 | 305 | 306 | 307 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | 325 | 326 | 327 | 334 | 335 | 336 | 337 | 338 | 339 | 340 | 341 | 342 | 343 | 344 | 345 | 346 | 347 | 354 | 355 | 356 | 357 | 358 | 359 | 360 | 361 | 362 | 363 | 364 | 365 | 366 | 367 | 374 | 375 | 376 | 377 | 378 | 379 | 380 | 381 | 382 | 383 | 384 | 385 | 386 | 387 | 394 | 395 | 396 | 397 | 398 | 399 | 400 | 401 | 402 | 403 | 404 | 405 | 406 | 407 | 408 | 409 | 410 | 411 | 412 | 413 | 414 | 415 | 416 | 417 | 418 | 419 | 420 | 421 | 422 | 423 | 424 | 425 | 426 | 427 | 428 | 429 | 430 | 431 | 432 | 433 | 434 | 435 | 436 | 437 | 438 | 439 | -------------------------------------------------------------------------------- /HelloVision/FaceDetectionViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FaceDetectionViewController.swift 3 | // HelloVision 4 | // 5 | // Created by 郭朋 on 08/06/2017. 6 | // Copyright © 2017 Peng Guo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import Vision 11 | import AVFoundation 12 | 13 | class FaceDetectionViewController: UIViewController, ImageChooserDelegate { 14 | 15 | @IBOutlet weak var facesImageView: UIImageView! 16 | 17 | var facesImage = UIImage(named: "faces.jpg")! 18 | let imageChooser = ImageChooser() 19 | 20 | override func viewDidLoad() { 21 | super.viewDidLoad() 22 | 23 | self.imageChooser.delegate = self 24 | self.analyze() 25 | } 26 | 27 | func analyze() { 28 | guard let facesCIImage = CIImage(image: facesImage) 29 | else { fatalError("can't create CIImage from UIImage") } 30 | let detectFaceRequest: VNDetectFaceRectanglesRequest = VNDetectFaceRectanglesRequest(completionHandler: self.handleFaces) 31 | let detectFaceRequestHandler = VNImageRequestHandler(ciImage: facesCIImage, options: [:]) 32 | 33 | do { 34 | try detectFaceRequestHandler.perform([detectFaceRequest]) 35 | } catch { 36 | print(error) 37 | } 38 | } 39 | 40 | func handleFaces(request: VNRequest, error: Error?) { 41 | guard let observations = request.results as? [VNFaceObservation] 42 | else { fatalError("unexpected result type from VNDetectFaceRectanglesRequest") } 43 | 44 | self.addShapesToFace(forObservations: observations) 45 | } 46 | 47 | func addShapesToFace(forObservations observations: [VNFaceObservation]) { 48 | 49 | if let sublayers = facesImageView.layer.sublayers { 50 | for layer in sublayers { 51 | layer.removeFromSuperlayer() 52 | } 53 | } 54 | 55 | let imageRect = AVMakeRect(aspectRatio: facesImage.size, insideRect: facesImageView.bounds) 56 | 57 | let layers: [CAShapeLayer] = observations.map { observation in 58 | 59 | let w = observation.boundingBox.size.width * imageRect.width 60 | let h = observation.boundingBox.size.height * imageRect.height 61 | let x = observation.boundingBox.origin.x * imageRect.width 62 | let y = imageRect.maxY - (observation.boundingBox.origin.y * imageRect.height) - h 63 | 64 | print("----") 65 | print("W: ", w) 66 | print("H: ", h) 67 | print("X: ", x) 68 | print("Y: ", y) 69 | 70 | let layer = CAShapeLayer() 71 | layer.frame = CGRect(x: x , y: y, width: w, height: h) 72 | layer.borderColor = UIColor.red.cgColor 73 | layer.borderWidth = 2 74 | layer.cornerRadius = 3 75 | return layer 76 | } 77 | 78 | for layer in layers { 79 | facesImageView.layer.addSublayer(layer) 80 | } 81 | } 82 | 83 | @IBAction func chooseImage(_ sender: Any) { 84 | self.imageChooser.choose(viewController: self) 85 | } 86 | 87 | func imageChooser(picked: UIImage) { 88 | self.facesImage = picked 89 | self.facesImageView.image = picked 90 | self.analyze() 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /HelloVision/FaceLandMarksViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FaceLandMarksViewController.swift 3 | // HelloVision 4 | // 5 | // Created by 郭朋 on 09/06/2017. 6 | // Copyright © 2017 Peng Guo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import Vision 11 | 12 | class FaceLandMarksViewController: UIViewController, ImageChooserDelegate { 13 | 14 | @IBOutlet weak var faceImageView: UIImageView! 15 | 16 | var faceImage = UIImage(named: "face.jpg")! 17 | let imageChooser = ImageChooser() 18 | 19 | override func viewDidLoad() { 20 | super.viewDidLoad() 21 | 22 | self.imageChooser.delegate = self 23 | self.analyze() 24 | } 25 | 26 | func analyze() { 27 | DispatchQueue.global().async { 28 | self.highlightFaces(for: self.faceImage) { (resultImage) in 29 | DispatchQueue.main.async { 30 | self.faceImageView.image = resultImage 31 | } 32 | } 33 | } 34 | } 35 | 36 | open func highlightFaces(for source: UIImage, complete: @escaping (UIImage) -> Void) { 37 | var resultImage = source 38 | let detectFaceRequest = VNDetectFaceLandmarksRequest { (request, error) in 39 | if error == nil { 40 | if let results = request.results as? [VNFaceObservation] { 41 | print("Found \(results.count) faces") 42 | 43 | for faceObservation in results { 44 | guard let landmarks = faceObservation.landmarks else { 45 | continue 46 | } 47 | let boundingRect = faceObservation.boundingBox 48 | var landmarkRegions: [VNFaceLandmarkRegion2D] = [] 49 | if let faceContour = landmarks.faceContour { 50 | landmarkRegions.append(faceContour) 51 | } 52 | if let leftEye = landmarks.leftEye { 53 | landmarkRegions.append(leftEye) 54 | } 55 | if let rightEye = landmarks.rightEye { 56 | landmarkRegions.append(rightEye) 57 | } 58 | if let nose = landmarks.nose { 59 | landmarkRegions.append(nose) 60 | } 61 | if let noseCrest = landmarks.noseCrest { 62 | landmarkRegions.append(noseCrest) 63 | } 64 | if let medianLine = landmarks.medianLine { 65 | landmarkRegions.append(medianLine) 66 | } 67 | if let outerLips = landmarks.outerLips { 68 | landmarkRegions.append(outerLips) 69 | } 70 | 71 | if let leftEyebrow = landmarks.leftEyebrow { 72 | landmarkRegions.append(leftEyebrow) 73 | } 74 | if let rightEyebrow = landmarks.rightEyebrow { 75 | landmarkRegions.append(rightEyebrow) 76 | } 77 | 78 | if let innerLips = landmarks.innerLips { 79 | landmarkRegions.append(innerLips) 80 | } 81 | if let leftPupil = landmarks.leftPupil { 82 | landmarkRegions.append(leftPupil) 83 | } 84 | if let rightPupil = landmarks.rightPupil { 85 | landmarkRegions.append(rightPupil) 86 | } 87 | 88 | resultImage = self.drawOnImage(source: resultImage, 89 | boundingRect: boundingRect, 90 | faceLandmarkRegions: landmarkRegions) 91 | 92 | 93 | } 94 | } 95 | } else { 96 | print(error!.localizedDescription) 97 | } 98 | complete(resultImage) 99 | } 100 | 101 | let vnImage = VNImageRequestHandler(cgImage: source.cgImage!, options: [:]) 102 | try? vnImage.perform([detectFaceRequest]) 103 | } 104 | 105 | fileprivate func drawOnImage(source: UIImage, 106 | boundingRect: CGRect, 107 | faceLandmarkRegions: [VNFaceLandmarkRegion2D]) -> UIImage { 108 | UIGraphicsBeginImageContextWithOptions(source.size, false, 1) 109 | let context = UIGraphicsGetCurrentContext()! 110 | context.translateBy(x: 0, y: source.size.height) 111 | context.scaleBy(x: 1.0, y: -1.0) 112 | context.setBlendMode(CGBlendMode.colorBurn) 113 | context.setLineJoin(.round) 114 | context.setLineCap(.round) 115 | context.setShouldAntialias(true) 116 | context.setAllowsAntialiasing(true) 117 | 118 | let rectWidth = source.size.width * boundingRect.size.width 119 | let rectHeight = source.size.height * boundingRect.size.height 120 | 121 | //draw image 122 | let rect = CGRect(x: 0, y:0, width: source.size.width, height: source.size.height) 123 | context.draw(source.cgImage!, in: rect) 124 | 125 | 126 | //draw bound rect 127 | var fillColor = UIColor.green 128 | fillColor.setFill() 129 | context.addRect(CGRect(x: boundingRect.origin.x * source.size.width, y:boundingRect.origin.y * source.size.height, width: rectWidth, height: rectHeight)) 130 | context.drawPath(using: CGPathDrawingMode.stroke) 131 | 132 | //draw overlay 133 | fillColor = UIColor.red 134 | fillColor.setStroke() 135 | context.setLineWidth(2.0) 136 | for faceLandmarkRegion in faceLandmarkRegions { 137 | var points: [CGPoint] = [] 138 | for i in 0.. 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UILaunchStoryboardName 24 | LaunchScreen 25 | UIMainStoryboardFile 26 | Main 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | UIInterfaceOrientationLandscapeLeft 35 | UIInterfaceOrientationLandscapeRight 36 | 37 | NSCameraUsageDescription 38 | Try to track object in camera. 39 | UISupportedInterfaceOrientations~ipad 40 | 41 | UIInterfaceOrientationPortrait 42 | UIInterfaceOrientationPortraitUpsideDown 43 | UIInterfaceOrientationLandscapeLeft 44 | UIInterfaceOrientationLandscapeRight 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /HelloVision/MobileNet.mlmodel: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SwiftBrain/HelloVision/bd3e825ab266944a3bd805e304bea23d7d728cd0/HelloVision/MobileNet.mlmodel -------------------------------------------------------------------------------- /HelloVision/ObjectTrackingViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ObjectTrackingViewController.swift 3 | // HelloVision 4 | // 5 | // Created by 郭朋 on 12/06/2017. 6 | // Copyright © 2017 Peng Guo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | import Vision 12 | 13 | // this is from https://github.com/jeffreybergier/Blog-Getting-Started-with-Vision 14 | class ObjectTrackingViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 15 | 16 | @IBOutlet weak var cameraView: UIView? 17 | @IBOutlet weak var highlightView: UIView? { 18 | didSet { 19 | self.highlightView?.layer.borderColor = UIColor.red.cgColor 20 | self.highlightView?.layer.borderWidth = 4 21 | self.highlightView?.backgroundColor = .clear 22 | } 23 | } 24 | 25 | private let visionSequenceHandler = VNSequenceRequestHandler() 26 | private lazy var cameraLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession) 27 | private lazy var captureSession: AVCaptureSession = { 28 | let session = AVCaptureSession() 29 | session.sessionPreset = AVCaptureSession.Preset.photo 30 | guard 31 | let backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back), 32 | let input = try? AVCaptureDeviceInput(device: backCamera) 33 | else { return session } 34 | session.addInput(input) 35 | return session 36 | }() 37 | 38 | override func viewDidLoad() { 39 | super.viewDidLoad() 40 | 41 | // hide the red focus area on load 42 | self.highlightView?.frame = .zero 43 | 44 | // make the camera appear on the screen 45 | self.cameraView?.layer.addSublayer(self.cameraLayer) 46 | 47 | // register to receive buffers from the camera 48 | let videoOutput = AVCaptureVideoDataOutput() 49 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "MyQueue")) 50 | self.captureSession.addOutput(videoOutput) 51 | 52 | // begin the session 53 | self.captureSession.startRunning() 54 | 55 | } 56 | 57 | override func viewDidLayoutSubviews() { 58 | super.viewDidLayoutSubviews() 59 | 60 | // make sure the layer is the correct size 61 | self.cameraLayer.frame = self.cameraView?.bounds ?? .zero 62 | } 63 | 64 | private var lastObservation: VNDetectedObjectObservation? 65 | 66 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 67 | guard 68 | // make sure the pixel buffer can be converted 69 | let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), 70 | // make sure that there is a previous observation we can feed into the request 71 | let lastObservation = self.lastObservation 72 | else { return } 73 | 74 | // create the request 75 | let request = VNTrackObjectRequest(detectedObjectObservation: lastObservation, completionHandler: self.handleVisionRequestUpdate) 76 | // set the accuracy to high 77 | // this is slower, but it works a lot better 78 | request.trackingLevel = .accurate 79 | 80 | // perform the request 81 | do { 82 | try self.visionSequenceHandler.perform([request], on: pixelBuffer) 83 | } catch { 84 | print("Throws: \(error)") 85 | } 86 | } 87 | 88 | private func handleVisionRequestUpdate(_ request: VNRequest, error: Error?) { 89 | // Dispatch to the main queue because we are touching non-atomic, non-thread safe properties of the view controller 90 | DispatchQueue.main.async { 91 | // make sure we have an actual result 92 | guard let newObservation = request.results?.first as? VNDetectedObjectObservation else { return } 93 | 94 | // prepare for next loop 95 | self.lastObservation = newObservation 96 | 97 | // check the confidence level before updating the UI 98 | guard newObservation.confidence >= 0.3 else { 99 | // hide the rectangle when we lose accuracy so the user knows something is wrong 100 | self.highlightView?.frame = .zero 101 | return 102 | } 103 | 104 | // calculate view rect 105 | var transformedRect = newObservation.boundingBox 106 | transformedRect.origin.y = 1 - transformedRect.origin.y 107 | let convertedRect = self.cameraLayer.layerRectConverted(fromMetadataOutputRect: transformedRect) 108 | 109 | // move the highlight view 110 | self.highlightView?.frame = convertedRect 111 | } 112 | } 113 | 114 | @IBAction func userTapped(_ sender: UITapGestureRecognizer) { 115 | // get the center of the tap 116 | self.highlightView?.frame.size = CGSize(width: 120, height: 120) 117 | self.highlightView?.center = sender.location(in: self.view) 118 | 119 | // convert the rect for the initial observation 120 | let originalRect = self.highlightView?.frame ?? .zero 121 | var convertedRect = self.cameraLayer.metadataOutputRectConverted(fromLayerRect: originalRect) 122 | convertedRect.origin.y = 1 - convertedRect.origin.y 123 | 124 | // set the observation 125 | let newObservation = VNDetectedObjectObservation(boundingBox: convertedRect) 126 | self.lastObservation = newObservation 127 | 128 | } 129 | @IBAction func resetTapped(_ sender: UIBarButtonItem) { 130 | self.lastObservation = nil 131 | self.highlightView?.frame = .zero 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /HelloVision/QRCode.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SwiftBrain/HelloVision/bd3e825ab266944a3bd805e304bea23d7d728cd0/HelloVision/QRCode.png -------------------------------------------------------------------------------- /HelloVision/TextDetectionViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TextDetectionViewController.swift 3 | // HelloVision 4 | // 5 | // Created by 郭朋 on 11/06/2017. 6 | // Copyright © 2017 Peng Guo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import Vision 11 | import AVFoundation 12 | 13 | class TextDetectionViewController: UIViewController, ImageChooserDelegate { 14 | 15 | @IBOutlet weak var textImageView: UIImageView! 16 | 17 | let imageChooser = ImageChooser() 18 | var textImage = UIImage(named: "text.jpg")! 19 | 20 | override func viewDidLoad() { 21 | super.viewDidLoad() 22 | 23 | self.imageChooser.delegate = self 24 | 25 | self.analyze() 26 | } 27 | 28 | func analyze() { 29 | 30 | let textDetectionRequest = VNDetectTextRectanglesRequest(completionHandler: {(request, error) in 31 | 32 | guard let observations = request.results as? [VNTextObservation] 33 | else { fatalError("unexpected result type from VNDetectTextRectanglesRequest") } 34 | 35 | self.addShapesToText(forObservations: observations) 36 | }) 37 | 38 | let handler = VNImageRequestHandler(cgImage: textImage.cgImage!, options: [:]) 39 | 40 | guard let _ = try? handler.perform([textDetectionRequest]) else { 41 | return print("Could not perform text Detection Request!") 42 | } 43 | 44 | } 45 | 46 | func addShapesToText(forObservations observations: [VNTextObservation]) { 47 | 48 | if let layers = self.textImageView.layer.sublayers { 49 | for layer in layers { 50 | layer.removeFromSuperlayer() 51 | } 52 | } 53 | 54 | let imageRect = AVMakeRect(aspectRatio: textImage.size, insideRect: textImageView.bounds) 55 | 56 | let layers: [CAShapeLayer] = observations.map { observation in 57 | 58 | 59 | let w = observation.boundingBox.size.width * imageRect.width 60 | let h = observation.boundingBox.size.height * imageRect.height 61 | let x = observation.boundingBox.origin.x * imageRect.width + imageRect.origin.x 62 | let y = imageRect.maxY - (observation.boundingBox.origin.y * imageRect.height) - h 63 | 64 | print("----") 65 | print("W: ", w) 66 | print("H: ", h) 67 | print("X: ", x) 68 | print("Y: ", y) 69 | 70 | let layer = CAShapeLayer() 71 | layer.frame = CGRect(x: x , y: y, width: w, height: h) 72 | layer.borderColor = UIColor.red.cgColor 73 | layer.borderWidth = 2 74 | layer.cornerRadius = 3 75 | return layer 76 | } 77 | 78 | for layer in layers { 79 | textImageView.layer.addSublayer(layer) 80 | } 81 | } 82 | 83 | @IBAction func chooseImage(_ sender: Any) { 84 | self.imageChooser.choose(viewController: self) 85 | } 86 | 87 | func imageChooser(picked: UIImage) { 88 | self.textImage = picked 89 | self.textImageView.image = picked 90 | self.analyze() 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /HelloVision/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // HelloVision 4 | // 5 | // Created by 郭朋 on 08/06/2017. 6 | // Copyright © 2017 Peng Guo. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import Vision 11 | 12 | class ViewController: UIViewController, ImageChooserDelegate { 13 | 14 | @IBOutlet weak var result: UILabel! 15 | @IBOutlet weak var imageView: UIImageView! 16 | 17 | let model = MobileNet() 18 | var image = UIImage(named: "image.jpg")! 19 | let imageChooser = ImageChooser() 20 | var request: VNCoreMLRequest? 21 | 22 | override func viewDidLoad() { 23 | super.viewDidLoad() 24 | 25 | guard let visionModel = try? VNCoreMLModel(for: model.model) else { 26 | fatalError("Error") 27 | } 28 | 29 | self.request = VNCoreMLRequest(model: visionModel) { request, error in 30 | 31 | if let observations = request.results as? [VNClassificationObservation] { 32 | let top5 = observations.prefix(through: 4) 33 | .map { ($0.identifier, Double($0.confidence)) } 34 | self.show(results: top5) 35 | } 36 | } 37 | 38 | self.analyze() 39 | 40 | imageChooser.delegate = self 41 | } 42 | 43 | func analyze() { 44 | guard let request = self.request else { 45 | return 46 | } 47 | let handler = VNImageRequestHandler(cgImage: image.cgImage!) 48 | try? handler.perform([request]) 49 | } 50 | 51 | // MARK: - UI stuff 52 | 53 | typealias Prediction = (String, Double) 54 | 55 | func show(results: [Prediction]) { 56 | var s: [String] = [] 57 | for (i, pred) in results.enumerated() { 58 | s.append(String(format: "%d: %@ (%3.2f%%)", i + 1, pred.0, pred.1 * 100)) 59 | } 60 | result.text = s.joined(separator: "\n") 61 | } 62 | 63 | @IBAction func chooseImage(_ sender: Any) { 64 | imageChooser.choose(viewController: self) 65 | } 66 | 67 | func imageChooser(picked: UIImage) { 68 | self.image = picked 69 | self.imageView.image = picked 70 | self.analyze() 71 | } 72 | } 73 | 74 | -------------------------------------------------------------------------------- /HelloVision/face.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SwiftBrain/HelloVision/bd3e825ab266944a3bd805e304bea23d7d728cd0/HelloVision/face.jpg -------------------------------------------------------------------------------- /HelloVision/faces.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SwiftBrain/HelloVision/bd3e825ab266944a3bd805e304bea23d7d728cd0/HelloVision/faces.jpg -------------------------------------------------------------------------------- /HelloVision/image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SwiftBrain/HelloVision/bd3e825ab266944a3bd805e304bea23d7d728cd0/HelloVision/image.jpg -------------------------------------------------------------------------------- /HelloVision/text.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SwiftBrain/HelloVision/bd3e825ab266944a3bd805e304bea23d7d728cd0/HelloVision/text.jpg -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HelloVision 2 | Vision framework example for my article [Swift World: What’s new in iOS 11 — Vision](https://medium.com/compileswift/swift-world-whats-new-in-ios-11-vision-456ba4156bad). 3 | 4 | ### 1. Machine Learning Image analysis 5 | 6 | ![Machine Learning Image analysis](https://cdn-images-1.medium.com/max/1600/1*WCLNya0ku09EIz_zf7Hv_Q.png) 7 | 8 | ### 2. Face detection 9 | 10 | ![Face detection](https://cdn-images-1.medium.com/max/1600/1*449izZmyHOWoW_P7IAdzSQ.png) 11 | 12 | ### 3. Face Landmarks Detection 13 | 14 | ![Face Landmarks Detection](https://cdn-images-1.medium.com/max/1600/1*7Ay3HUhV4zfYywsqlHE8EA.png) 15 | 16 | ### 4. Text Detection 17 | 18 | ![Text Detection](https://cdn-images-1.medium.com/max/1600/1*fNSvBzouHJAVciOiFkbTrA.png) 19 | 20 | ### 5. Object Tracking 21 | 22 | [![Object Tracking](https://img.youtube.com/vi/QzM0je58-cY/0.jpg)](https://www.youtube.com/watch?v=QzM0je58-cY "Object Tracking") 23 | --------------------------------------------------------------------------------